commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6182f07a82733c0c3d22aee40abbc392da67ce4d
|
spraakbanken/s5/spr_local/inword_backoff_node.py
|
spraakbanken/s5/spr_local/inword_backoff_node.py
|
#!/usr/bin/env python3
import argparse
import sys
def main(inf, outf, word_map):
iw_backoff_outgoing = []
iw_backoff_incoming = []
iw_ending_nodes = set()
max_node = 0
for line in inf:
parts = line.strip("\n").split("\t")
if len(parts) < 3:
print("\t".join(parts), file=outf)
continue
node_out = int(parts[0])
node_in = int(parts[1])
max_node = max(max_node, node_in, node_out)
if node_out == 0:
word = word_map.get(parts[2],parts[2])
if word.endswith('+'):
iw_ending_nodes.add(node_in)
if word.startswith('+'):
iw_backoff_outgoing.append(parts)
continue
if node_in == 0:
if node_out in iw_ending_nodes:
iw_backoff_incoming.append(parts)
continue
print("\t".join(parts), file=outf)
backoff_node = str(max_node + 1)
print("Backoff node: {}".format(backoff_node), file=sys.stderr)
print("Incoming arcs iw-backoff: {}".format(len(iw_backoff_incoming)), file=sys.stderr)
print("Outcoming arcs iw-backoff: {}".format(len(iw_backoff_outgoing)), file=sys.stderr)
for parts in iw_backoff_outgoing:
parts[0] = backoff_node
print("\t".join(parts), file=outf)
for parts in iw_backoff_incoming:
parts[1] = backoff_node
print("\t".join(parts), file=outf)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('infile', nargs='?', type=argparse.FileType('r'), default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w'), default=sys.stdout)
parser.add_argument('--symbols', type=argparse.FileType('r'), default=None)
args = parser.parse_args()
word_map = {}
if args.symbols is not None:
word_map = {l.split()[1]: l.split()[0] for l in args.symbols}
main(args.infile, args.outfile, word_map)
|
Add tool for creating a separate backoff node for inword positions
|
Add tool for creating a separate backoff node for inword positions
|
Python
|
apache-2.0
|
psmit/kaldi-recipes,psmit/kaldi-recipes,psmit/kaldi-recipes,phsmit/kaldi-recipes,phsmit/kaldi-recipes
|
Add tool for creating a separate backoff node for inword positions
|
#!/usr/bin/env python3
import argparse
import sys
def main(inf, outf, word_map):
iw_backoff_outgoing = []
iw_backoff_incoming = []
iw_ending_nodes = set()
max_node = 0
for line in inf:
parts = line.strip("\n").split("\t")
if len(parts) < 3:
print("\t".join(parts), file=outf)
continue
node_out = int(parts[0])
node_in = int(parts[1])
max_node = max(max_node, node_in, node_out)
if node_out == 0:
word = word_map.get(parts[2],parts[2])
if word.endswith('+'):
iw_ending_nodes.add(node_in)
if word.startswith('+'):
iw_backoff_outgoing.append(parts)
continue
if node_in == 0:
if node_out in iw_ending_nodes:
iw_backoff_incoming.append(parts)
continue
print("\t".join(parts), file=outf)
backoff_node = str(max_node + 1)
print("Backoff node: {}".format(backoff_node), file=sys.stderr)
print("Incoming arcs iw-backoff: {}".format(len(iw_backoff_incoming)), file=sys.stderr)
print("Outcoming arcs iw-backoff: {}".format(len(iw_backoff_outgoing)), file=sys.stderr)
for parts in iw_backoff_outgoing:
parts[0] = backoff_node
print("\t".join(parts), file=outf)
for parts in iw_backoff_incoming:
parts[1] = backoff_node
print("\t".join(parts), file=outf)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('infile', nargs='?', type=argparse.FileType('r'), default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w'), default=sys.stdout)
parser.add_argument('--symbols', type=argparse.FileType('r'), default=None)
args = parser.parse_args()
word_map = {}
if args.symbols is not None:
word_map = {l.split()[1]: l.split()[0] for l in args.symbols}
main(args.infile, args.outfile, word_map)
|
<commit_before><commit_msg>Add tool for creating a separate backoff node for inword positions<commit_after>
|
#!/usr/bin/env python3
import argparse
import sys
def main(inf, outf, word_map):
iw_backoff_outgoing = []
iw_backoff_incoming = []
iw_ending_nodes = set()
max_node = 0
for line in inf:
parts = line.strip("\n").split("\t")
if len(parts) < 3:
print("\t".join(parts), file=outf)
continue
node_out = int(parts[0])
node_in = int(parts[1])
max_node = max(max_node, node_in, node_out)
if node_out == 0:
word = word_map.get(parts[2],parts[2])
if word.endswith('+'):
iw_ending_nodes.add(node_in)
if word.startswith('+'):
iw_backoff_outgoing.append(parts)
continue
if node_in == 0:
if node_out in iw_ending_nodes:
iw_backoff_incoming.append(parts)
continue
print("\t".join(parts), file=outf)
backoff_node = str(max_node + 1)
print("Backoff node: {}".format(backoff_node), file=sys.stderr)
print("Incoming arcs iw-backoff: {}".format(len(iw_backoff_incoming)), file=sys.stderr)
print("Outcoming arcs iw-backoff: {}".format(len(iw_backoff_outgoing)), file=sys.stderr)
for parts in iw_backoff_outgoing:
parts[0] = backoff_node
print("\t".join(parts), file=outf)
for parts in iw_backoff_incoming:
parts[1] = backoff_node
print("\t".join(parts), file=outf)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('infile', nargs='?', type=argparse.FileType('r'), default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w'), default=sys.stdout)
parser.add_argument('--symbols', type=argparse.FileType('r'), default=None)
args = parser.parse_args()
word_map = {}
if args.symbols is not None:
word_map = {l.split()[1]: l.split()[0] for l in args.symbols}
main(args.infile, args.outfile, word_map)
|
Add tool for creating a separate backoff node for inword positions#!/usr/bin/env python3
import argparse
import sys
def main(inf, outf, word_map):
iw_backoff_outgoing = []
iw_backoff_incoming = []
iw_ending_nodes = set()
max_node = 0
for line in inf:
parts = line.strip("\n").split("\t")
if len(parts) < 3:
print("\t".join(parts), file=outf)
continue
node_out = int(parts[0])
node_in = int(parts[1])
max_node = max(max_node, node_in, node_out)
if node_out == 0:
word = word_map.get(parts[2],parts[2])
if word.endswith('+'):
iw_ending_nodes.add(node_in)
if word.startswith('+'):
iw_backoff_outgoing.append(parts)
continue
if node_in == 0:
if node_out in iw_ending_nodes:
iw_backoff_incoming.append(parts)
continue
print("\t".join(parts), file=outf)
backoff_node = str(max_node + 1)
print("Backoff node: {}".format(backoff_node), file=sys.stderr)
print("Incoming arcs iw-backoff: {}".format(len(iw_backoff_incoming)), file=sys.stderr)
print("Outcoming arcs iw-backoff: {}".format(len(iw_backoff_outgoing)), file=sys.stderr)
for parts in iw_backoff_outgoing:
parts[0] = backoff_node
print("\t".join(parts), file=outf)
for parts in iw_backoff_incoming:
parts[1] = backoff_node
print("\t".join(parts), file=outf)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('infile', nargs='?', type=argparse.FileType('r'), default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w'), default=sys.stdout)
parser.add_argument('--symbols', type=argparse.FileType('r'), default=None)
args = parser.parse_args()
word_map = {}
if args.symbols is not None:
word_map = {l.split()[1]: l.split()[0] for l in args.symbols}
main(args.infile, args.outfile, word_map)
|
<commit_before><commit_msg>Add tool for creating a separate backoff node for inword positions<commit_after>#!/usr/bin/env python3
import argparse
import sys
def main(inf, outf, word_map):
iw_backoff_outgoing = []
iw_backoff_incoming = []
iw_ending_nodes = set()
max_node = 0
for line in inf:
parts = line.strip("\n").split("\t")
if len(parts) < 3:
print("\t".join(parts), file=outf)
continue
node_out = int(parts[0])
node_in = int(parts[1])
max_node = max(max_node, node_in, node_out)
if node_out == 0:
word = word_map.get(parts[2],parts[2])
if word.endswith('+'):
iw_ending_nodes.add(node_in)
if word.startswith('+'):
iw_backoff_outgoing.append(parts)
continue
if node_in == 0:
if node_out in iw_ending_nodes:
iw_backoff_incoming.append(parts)
continue
print("\t".join(parts), file=outf)
backoff_node = str(max_node + 1)
print("Backoff node: {}".format(backoff_node), file=sys.stderr)
print("Incoming arcs iw-backoff: {}".format(len(iw_backoff_incoming)), file=sys.stderr)
print("Outcoming arcs iw-backoff: {}".format(len(iw_backoff_outgoing)), file=sys.stderr)
for parts in iw_backoff_outgoing:
parts[0] = backoff_node
print("\t".join(parts), file=outf)
for parts in iw_backoff_incoming:
parts[1] = backoff_node
print("\t".join(parts), file=outf)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('infile', nargs='?', type=argparse.FileType('r'), default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w'), default=sys.stdout)
parser.add_argument('--symbols', type=argparse.FileType('r'), default=None)
args = parser.parse_args()
word_map = {}
if args.symbols is not None:
word_map = {l.split()[1]: l.split()[0] for l in args.symbols}
main(args.infile, args.outfile, word_map)
|
|
ee87aecbd08abaac4b5fa7b50ac120fdf17553c2
|
ci/new_tsqa/tests/test_hostdb.py
|
ci/new_tsqa/tests/test_hostdb.py
|
'''
Test hostdb
'''
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import requests
import time
import helpers
import tsqa.test_cases
class TestHostDBFailedDNS(tsqa.test_cases.DynamicHTTPEndpointCase, helpers.EnvironmentCase):
'''
Tests for how hostdb handles when there is no reachable resolver
'''
@classmethod
def setUpEnv(cls, env):
cls.configs['remap.config'].add_line('map / http://some_nonexistant_domain:{0}/'.format(cls.http_endpoint.address[1]))
resolv_conf_path = os.path.join(env.layout.prefix, 'resolv.conf')
cls.configs['records.config']['CONFIG'].update({
'proxy.config.http.response_server_enabled': 2, # only add server headers when there weren't any
'proxy.config.hostdb.lookup_timeout': 1,
'proxy.config.dns.resolv_conf': resolv_conf_path,
})
with open(resolv_conf_path, 'w') as fh:
fh.write('nameserver 1.1.1.1\n') # some non-existant nameserver
def test_lookup_timeout(self):
start = time.time()
ret = requests.get(self.endpoint_url('/test'),
proxies=self.proxies,
)
self.assertGreater(time.time() - start, self.configs['records.config']['CONFIG']['proxy.config.hostdb.lookup_timeout'])
self.assertEqual(ret.status_code, 502)
self.assertIn('ATS', ret.headers['server'])
|
Add some initial teests for hostdb
|
Add some initial teests for hostdb
|
Python
|
apache-2.0
|
persiaAziz/trafficserver,PSUdaemon/trafficserver,rpufky/trafficserver,davidbz/trafficserver,SolidWallOfCode/trafficserver,rahmalik/trafficserver,rahmalik/trafficserver,vmamidi/trafficserver,chitianhao/trafficserver,reveller/trafficserver,duke8253/trafficserver,duke8253/trafficserver,chitianhao/trafficserver,pbchou/trafficserver,chitianhao/trafficserver,PSUdaemon/trafficserver,bryancall/trafficserver,clearswift/trafficserver,PSUdaemon/trafficserver,rpufky/trafficserver,dyrock/trafficserver,rahmalik/trafficserver,dyrock/trafficserver,rpufky/trafficserver,dyrock/trafficserver,duke8253/trafficserver,vmamidi/trafficserver,taoyunxing/trafficserver,taoyunxing/trafficserver,clearswift/trafficserver,reveller/trafficserver,pbchou/trafficserver,chenglongwei/trafficserver,reveller/trafficserver,taoyunxing/trafficserver,reveller/trafficserver,clearswift/trafficserver,reveller/trafficserver,taoyunxing/trafficserver,duke8253/trafficserver,rahmalik/trafficserver,rahmalik/trafficserver,davidbz/trafficserver,clearswift/trafficserver,davidbz/trafficserver,reveller/trafficserver,dyrock/trafficserver,chenglongwei/trafficserver,persiaAziz/trafficserver,persiaAziz/trafficserver,persiaAziz/trafficserver,pbchou/trafficserver,taoyunxing/trafficserver,rpufky/trafficserver,duke8253/trafficserver,rahmalik/trafficserver,rpufky/trafficserver,bryancall/trafficserver,chenglongwei/trafficserver,persiaAziz/trafficserver,PSUdaemon/trafficserver,SolidWallOfCode/trafficserver,clearswift/trafficserver,rahmalik/trafficserver,davidbz/trafficserver,clearswift/trafficserver,dyrock/trafficserver,chitianhao/trafficserver,taoyunxing/trafficserver,rpufky/trafficserver,pbchou/trafficserver,bryancall/trafficserver,vmamidi/trafficserver,rpufky/trafficserver,vmamidi/trafficserver,PSUdaemon/trafficserver,PSUdaemon/trafficserver,dyrock/trafficserver,reveller/trafficserver,vmamidi/trafficserver,chenglongwei/trafficserver,taoyunxing/trafficserver,SolidWallOfCode/trafficserver,rpufky/trafficserver,persiaAziz/trafficserver,persiaAziz/trafficserver,SolidWallOfCode/trafficserver,clearswift/trafficserver,clearswift/trafficserver,pbchou/trafficserver,chenglongwei/trafficserver,PSUdaemon/trafficserver,duke8253/trafficserver,SolidWallOfCode/trafficserver,dyrock/trafficserver,davidbz/trafficserver,clearswift/trafficserver,reveller/trafficserver,SolidWallOfCode/trafficserver,rahmalik/trafficserver,taoyunxing/trafficserver,rpufky/trafficserver,chenglongwei/trafficserver,chitianhao/trafficserver,persiaAziz/trafficserver,SolidWallOfCode/trafficserver,rahmalik/trafficserver,PSUdaemon/trafficserver,chitianhao/trafficserver,vmamidi/trafficserver,PSUdaemon/trafficserver,chitianhao/trafficserver,bryancall/trafficserver,davidbz/trafficserver,pbchou/trafficserver,chenglongwei/trafficserver,bryancall/trafficserver,bryancall/trafficserver,taoyunxing/trafficserver,duke8253/trafficserver,reveller/trafficserver,chenglongwei/trafficserver
|
Add some initial teests for hostdb
|
'''
Test hostdb
'''
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import requests
import time
import helpers
import tsqa.test_cases
class TestHostDBFailedDNS(tsqa.test_cases.DynamicHTTPEndpointCase, helpers.EnvironmentCase):
'''
Tests for how hostdb handles when there is no reachable resolver
'''
@classmethod
def setUpEnv(cls, env):
cls.configs['remap.config'].add_line('map / http://some_nonexistant_domain:{0}/'.format(cls.http_endpoint.address[1]))
resolv_conf_path = os.path.join(env.layout.prefix, 'resolv.conf')
cls.configs['records.config']['CONFIG'].update({
'proxy.config.http.response_server_enabled': 2, # only add server headers when there weren't any
'proxy.config.hostdb.lookup_timeout': 1,
'proxy.config.dns.resolv_conf': resolv_conf_path,
})
with open(resolv_conf_path, 'w') as fh:
fh.write('nameserver 1.1.1.1\n') # some non-existant nameserver
def test_lookup_timeout(self):
start = time.time()
ret = requests.get(self.endpoint_url('/test'),
proxies=self.proxies,
)
self.assertGreater(time.time() - start, self.configs['records.config']['CONFIG']['proxy.config.hostdb.lookup_timeout'])
self.assertEqual(ret.status_code, 502)
self.assertIn('ATS', ret.headers['server'])
|
<commit_before><commit_msg>Add some initial teests for hostdb<commit_after>
|
'''
Test hostdb
'''
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import requests
import time
import helpers
import tsqa.test_cases
class TestHostDBFailedDNS(tsqa.test_cases.DynamicHTTPEndpointCase, helpers.EnvironmentCase):
'''
Tests for how hostdb handles when there is no reachable resolver
'''
@classmethod
def setUpEnv(cls, env):
cls.configs['remap.config'].add_line('map / http://some_nonexistant_domain:{0}/'.format(cls.http_endpoint.address[1]))
resolv_conf_path = os.path.join(env.layout.prefix, 'resolv.conf')
cls.configs['records.config']['CONFIG'].update({
'proxy.config.http.response_server_enabled': 2, # only add server headers when there weren't any
'proxy.config.hostdb.lookup_timeout': 1,
'proxy.config.dns.resolv_conf': resolv_conf_path,
})
with open(resolv_conf_path, 'w') as fh:
fh.write('nameserver 1.1.1.1\n') # some non-existant nameserver
def test_lookup_timeout(self):
start = time.time()
ret = requests.get(self.endpoint_url('/test'),
proxies=self.proxies,
)
self.assertGreater(time.time() - start, self.configs['records.config']['CONFIG']['proxy.config.hostdb.lookup_timeout'])
self.assertEqual(ret.status_code, 502)
self.assertIn('ATS', ret.headers['server'])
|
Add some initial teests for hostdb'''
Test hostdb
'''
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import requests
import time
import helpers
import tsqa.test_cases
class TestHostDBFailedDNS(tsqa.test_cases.DynamicHTTPEndpointCase, helpers.EnvironmentCase):
'''
Tests for how hostdb handles when there is no reachable resolver
'''
@classmethod
def setUpEnv(cls, env):
cls.configs['remap.config'].add_line('map / http://some_nonexistant_domain:{0}/'.format(cls.http_endpoint.address[1]))
resolv_conf_path = os.path.join(env.layout.prefix, 'resolv.conf')
cls.configs['records.config']['CONFIG'].update({
'proxy.config.http.response_server_enabled': 2, # only add server headers when there weren't any
'proxy.config.hostdb.lookup_timeout': 1,
'proxy.config.dns.resolv_conf': resolv_conf_path,
})
with open(resolv_conf_path, 'w') as fh:
fh.write('nameserver 1.1.1.1\n') # some non-existant nameserver
def test_lookup_timeout(self):
start = time.time()
ret = requests.get(self.endpoint_url('/test'),
proxies=self.proxies,
)
self.assertGreater(time.time() - start, self.configs['records.config']['CONFIG']['proxy.config.hostdb.lookup_timeout'])
self.assertEqual(ret.status_code, 502)
self.assertIn('ATS', ret.headers['server'])
|
<commit_before><commit_msg>Add some initial teests for hostdb<commit_after>'''
Test hostdb
'''
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import requests
import time
import helpers
import tsqa.test_cases
class TestHostDBFailedDNS(tsqa.test_cases.DynamicHTTPEndpointCase, helpers.EnvironmentCase):
'''
Tests for how hostdb handles when there is no reachable resolver
'''
@classmethod
def setUpEnv(cls, env):
cls.configs['remap.config'].add_line('map / http://some_nonexistant_domain:{0}/'.format(cls.http_endpoint.address[1]))
resolv_conf_path = os.path.join(env.layout.prefix, 'resolv.conf')
cls.configs['records.config']['CONFIG'].update({
'proxy.config.http.response_server_enabled': 2, # only add server headers when there weren't any
'proxy.config.hostdb.lookup_timeout': 1,
'proxy.config.dns.resolv_conf': resolv_conf_path,
})
with open(resolv_conf_path, 'w') as fh:
fh.write('nameserver 1.1.1.1\n') # some non-existant nameserver
def test_lookup_timeout(self):
start = time.time()
ret = requests.get(self.endpoint_url('/test'),
proxies=self.proxies,
)
self.assertGreater(time.time() - start, self.configs['records.config']['CONFIG']['proxy.config.hostdb.lookup_timeout'])
self.assertEqual(ret.status_code, 502)
self.assertIn('ATS', ret.headers['server'])
|
|
21f2571a0e5b7100db8c2ddf3c49d32992d20e3e
|
db_queries.py
|
db_queries.py
|
import os
import sqlite3
import pandas as pd
DB_PATH = os.path.dirname(__file__)+"/Tables/Pyranitar.db"
def query_level(group, experience):
cnx = sqlite3.connect(DB_PATH)
sql = "" \
"SELECT level " \
"FROM Experience " \
"WHERE exp_group = {} " \
"AND total_exp <= {} " \
"ORDER BY total_exp DESC LIMIT 1".format("'"+ group + "'", experience)
df = pd.read_sql_query(sql, cnx)
cnx.close()
df = df.values.flatten()
return df[0]
def query_nature(nature):
cnx = sqlite3.connect(DB_PATH)
sql = "SELECT hp, atk, def, spa, spd, spe FROM Natures WHERE nature={};".format(
"'"+ nature + "'")
df = pd.read_sql_query(sql, cnx)
cnx.close()
df = list(df.values.flatten())
return df
if __name__ == '__main__':
print(query_nature('adamant'))
print(query_level('slow', 50000))
|
Add query_level and query_nature functions
|
Add query_level and query_nature functions
|
Python
|
mit
|
Ditoeight/Pyranitar
|
Add query_level and query_nature functions
|
import os
import sqlite3
import pandas as pd
DB_PATH = os.path.dirname(__file__)+"/Tables/Pyranitar.db"
def query_level(group, experience):
cnx = sqlite3.connect(DB_PATH)
sql = "" \
"SELECT level " \
"FROM Experience " \
"WHERE exp_group = {} " \
"AND total_exp <= {} " \
"ORDER BY total_exp DESC LIMIT 1".format("'"+ group + "'", experience)
df = pd.read_sql_query(sql, cnx)
cnx.close()
df = df.values.flatten()
return df[0]
def query_nature(nature):
cnx = sqlite3.connect(DB_PATH)
sql = "SELECT hp, atk, def, spa, spd, spe FROM Natures WHERE nature={};".format(
"'"+ nature + "'")
df = pd.read_sql_query(sql, cnx)
cnx.close()
df = list(df.values.flatten())
return df
if __name__ == '__main__':
print(query_nature('adamant'))
print(query_level('slow', 50000))
|
<commit_before><commit_msg>Add query_level and query_nature functions<commit_after>
|
import os
import sqlite3
import pandas as pd
DB_PATH = os.path.dirname(__file__)+"/Tables/Pyranitar.db"
def query_level(group, experience):
cnx = sqlite3.connect(DB_PATH)
sql = "" \
"SELECT level " \
"FROM Experience " \
"WHERE exp_group = {} " \
"AND total_exp <= {} " \
"ORDER BY total_exp DESC LIMIT 1".format("'"+ group + "'", experience)
df = pd.read_sql_query(sql, cnx)
cnx.close()
df = df.values.flatten()
return df[0]
def query_nature(nature):
cnx = sqlite3.connect(DB_PATH)
sql = "SELECT hp, atk, def, spa, spd, spe FROM Natures WHERE nature={};".format(
"'"+ nature + "'")
df = pd.read_sql_query(sql, cnx)
cnx.close()
df = list(df.values.flatten())
return df
if __name__ == '__main__':
print(query_nature('adamant'))
print(query_level('slow', 50000))
|
Add query_level and query_nature functionsimport os
import sqlite3
import pandas as pd
DB_PATH = os.path.dirname(__file__)+"/Tables/Pyranitar.db"
def query_level(group, experience):
cnx = sqlite3.connect(DB_PATH)
sql = "" \
"SELECT level " \
"FROM Experience " \
"WHERE exp_group = {} " \
"AND total_exp <= {} " \
"ORDER BY total_exp DESC LIMIT 1".format("'"+ group + "'", experience)
df = pd.read_sql_query(sql, cnx)
cnx.close()
df = df.values.flatten()
return df[0]
def query_nature(nature):
cnx = sqlite3.connect(DB_PATH)
sql = "SELECT hp, atk, def, spa, spd, spe FROM Natures WHERE nature={};".format(
"'"+ nature + "'")
df = pd.read_sql_query(sql, cnx)
cnx.close()
df = list(df.values.flatten())
return df
if __name__ == '__main__':
print(query_nature('adamant'))
print(query_level('slow', 50000))
|
<commit_before><commit_msg>Add query_level and query_nature functions<commit_after>import os
import sqlite3
import pandas as pd
DB_PATH = os.path.dirname(__file__)+"/Tables/Pyranitar.db"
def query_level(group, experience):
cnx = sqlite3.connect(DB_PATH)
sql = "" \
"SELECT level " \
"FROM Experience " \
"WHERE exp_group = {} " \
"AND total_exp <= {} " \
"ORDER BY total_exp DESC LIMIT 1".format("'"+ group + "'", experience)
df = pd.read_sql_query(sql, cnx)
cnx.close()
df = df.values.flatten()
return df[0]
def query_nature(nature):
cnx = sqlite3.connect(DB_PATH)
sql = "SELECT hp, atk, def, spa, spd, spe FROM Natures WHERE nature={};".format(
"'"+ nature + "'")
df = pd.read_sql_query(sql, cnx)
cnx.close()
df = list(df.values.flatten())
return df
if __name__ == '__main__':
print(query_nature('adamant'))
print(query_level('slow', 50000))
|
|
dfa99a462487fe728c29a8663a223c129ed17355
|
examples/translations/russian_test_1.py
|
examples/translations/russian_test_1.py
|
# Russian Language Test
from seleniumbase.translate.russian import ТестНаСелен # noqa
class МойТестовыйКласс(ТестНаСелен):
def test_пример_1(self):
self.открытый("https://ru.wikipedia.org/wiki/")
self.проверить_элемент('[title="Русский язык"]')
self.проверить_текст("Википедия", "h2.main-wikimedia-header")
self.обновить_текст("#searchInput", "МГУ")
self.нажмите("#searchButton")
self.проверить_текст("университет", "#firstHeading")
self.проверить_элемент('img[alt="МГУ, вид с воздуха.jpg"]')
self.обновить_текст("#searchInput", "приключения Шурика")
self.нажмите("#searchButton")
self.проверить_текст("Операция «Ы» и другие приключения Шурика")
self.проверить_элемент('img[alt="Постер фильма"]')
self.назад()
self.проверить_правду("университет" in self.получить_текущий_URL())
self.вперед()
self.проверить_правду("Шурика" in self.получить_текущий_URL())
|
Add the Russian example test
|
Add the Russian example test
|
Python
|
mit
|
mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase
|
Add the Russian example test
|
# Russian Language Test
from seleniumbase.translate.russian import ТестНаСелен # noqa
class МойТестовыйКласс(ТестНаСелен):
def test_пример_1(self):
self.открытый("https://ru.wikipedia.org/wiki/")
self.проверить_элемент('[title="Русский язык"]')
self.проверить_текст("Википедия", "h2.main-wikimedia-header")
self.обновить_текст("#searchInput", "МГУ")
self.нажмите("#searchButton")
self.проверить_текст("университет", "#firstHeading")
self.проверить_элемент('img[alt="МГУ, вид с воздуха.jpg"]')
self.обновить_текст("#searchInput", "приключения Шурика")
self.нажмите("#searchButton")
self.проверить_текст("Операция «Ы» и другие приключения Шурика")
self.проверить_элемент('img[alt="Постер фильма"]')
self.назад()
self.проверить_правду("университет" in self.получить_текущий_URL())
self.вперед()
self.проверить_правду("Шурика" in self.получить_текущий_URL())
|
<commit_before><commit_msg>Add the Russian example test<commit_after>
|
# Russian Language Test
from seleniumbase.translate.russian import ТестНаСелен # noqa
class МойТестовыйКласс(ТестНаСелен):
def test_пример_1(self):
self.открытый("https://ru.wikipedia.org/wiki/")
self.проверить_элемент('[title="Русский язык"]')
self.проверить_текст("Википедия", "h2.main-wikimedia-header")
self.обновить_текст("#searchInput", "МГУ")
self.нажмите("#searchButton")
self.проверить_текст("университет", "#firstHeading")
self.проверить_элемент('img[alt="МГУ, вид с воздуха.jpg"]')
self.обновить_текст("#searchInput", "приключения Шурика")
self.нажмите("#searchButton")
self.проверить_текст("Операция «Ы» и другие приключения Шурика")
self.проверить_элемент('img[alt="Постер фильма"]')
self.назад()
self.проверить_правду("университет" in self.получить_текущий_URL())
self.вперед()
self.проверить_правду("Шурика" in self.получить_текущий_URL())
|
Add the Russian example test# Russian Language Test
from seleniumbase.translate.russian import ТестНаСелен # noqa
class МойТестовыйКласс(ТестНаСелен):
def test_пример_1(self):
self.открытый("https://ru.wikipedia.org/wiki/")
self.проверить_элемент('[title="Русский язык"]')
self.проверить_текст("Википедия", "h2.main-wikimedia-header")
self.обновить_текст("#searchInput", "МГУ")
self.нажмите("#searchButton")
self.проверить_текст("университет", "#firstHeading")
self.проверить_элемент('img[alt="МГУ, вид с воздуха.jpg"]')
self.обновить_текст("#searchInput", "приключения Шурика")
self.нажмите("#searchButton")
self.проверить_текст("Операция «Ы» и другие приключения Шурика")
self.проверить_элемент('img[alt="Постер фильма"]')
self.назад()
self.проверить_правду("университет" in self.получить_текущий_URL())
self.вперед()
self.проверить_правду("Шурика" in self.получить_текущий_URL())
|
<commit_before><commit_msg>Add the Russian example test<commit_after># Russian Language Test
from seleniumbase.translate.russian import ТестНаСелен # noqa
class МойТестовыйКласс(ТестНаСелен):
def test_пример_1(self):
self.открытый("https://ru.wikipedia.org/wiki/")
self.проверить_элемент('[title="Русский язык"]')
self.проверить_текст("Википедия", "h2.main-wikimedia-header")
self.обновить_текст("#searchInput", "МГУ")
self.нажмите("#searchButton")
self.проверить_текст("университет", "#firstHeading")
self.проверить_элемент('img[alt="МГУ, вид с воздуха.jpg"]')
self.обновить_текст("#searchInput", "приключения Шурика")
self.нажмите("#searchButton")
self.проверить_текст("Операция «Ы» и другие приключения Шурика")
self.проверить_элемент('img[alt="Постер фильма"]')
self.назад()
self.проверить_правду("университет" in self.получить_текущий_URL())
self.вперед()
self.проверить_правду("Шурика" in self.получить_текущий_URL())
|
|
c7d32d2ed2483ce34c7a901c84d128c50f03c472
|
golddust/__init__.py
|
golddust/__init__.py
|
# Copyright 2015-2016 John "LuaMilkshake" Marion
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GoldDust package management library.
GoldDust manages modded installations of the game Minecraft. This
package implements a package manager for mod files (including
Minecraft Forge and texture packs).
"""
import os
import platform
def default_home_dir():
"""Get the default home directory path on this platform.
This is an absolute form of "~/.golddust" on all platforms except
for Windows, where it is "%APPDATA%/.golddust".
In the strange case where APPDATA isn't set on Windows, will default
to the non-Windows behavior.
Returns str, the absolute path of "~/.golddust" (non-Windows),
"%APPDATA/.golddust" (Windows).
"""
path = "~/.golddust"
if platform.system() == "Windows" and "APPDATA" in os.environ:
path = os.path.join(os.environ["APPDATA"], ".golddust")
return os.path.abspath(os.path.expanduser(path))
def write_default_config(path):
"""Write out a default GoldDust configuration.
"""
config = open(path, "w+")
# TODO Actually serialize a configuration
config.write("{}")
config.close()
def install_home_dir(path):
"""Install the GoldDust home directory.
The home directory is used for global configuration, local package caches,
and instance information.
Takes:
path (str): Path to install the home directory to. Should not exist.
Raises:
FileExistsError: The path supplied already exists.
"""
if os.path.isdir(path):
raise FileExistsError("Installation target directory shouldn't exist.")
gdhome = os.path.abspath(os.path.expanduser(path))
# Create directories and config file
os.mkdir(gdhome, mode=0o755)
os.mkdir(os.path.join(gdhome, "pkgcache"))
os.mkdir(os.path.join(gdhome, "instances"))
write_default_config(os.path.join(gdhome, "config.json"))
class Package:
"""A package managed by GoldDust"""
def __init__(self):
self.name = ""
self.version = ""
@property
def tarball(self):
return "{}-{}.tar.bz2".format(self.name, self.version)
@property
def sig_file(self):
return "{}.sig".format(self.tarball)
|
Add current state of golddust package
|
Add current state of golddust package
|
Python
|
apache-2.0
|
Packeteers/GoldDust
|
Add current state of golddust package
|
# Copyright 2015-2016 John "LuaMilkshake" Marion
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GoldDust package management library.
GoldDust manages modded installations of the game Minecraft. This
package implements a package manager for mod files (including
Minecraft Forge and texture packs).
"""
import os
import platform
def default_home_dir():
"""Get the default home directory path on this platform.
This is an absolute form of "~/.golddust" on all platforms except
for Windows, where it is "%APPDATA%/.golddust".
In the strange case where APPDATA isn't set on Windows, will default
to the non-Windows behavior.
Returns str, the absolute path of "~/.golddust" (non-Windows),
"%APPDATA/.golddust" (Windows).
"""
path = "~/.golddust"
if platform.system() == "Windows" and "APPDATA" in os.environ:
path = os.path.join(os.environ["APPDATA"], ".golddust")
return os.path.abspath(os.path.expanduser(path))
def write_default_config(path):
"""Write out a default GoldDust configuration.
"""
config = open(path, "w+")
# TODO Actually serialize a configuration
config.write("{}")
config.close()
def install_home_dir(path):
"""Install the GoldDust home directory.
The home directory is used for global configuration, local package caches,
and instance information.
Takes:
path (str): Path to install the home directory to. Should not exist.
Raises:
FileExistsError: The path supplied already exists.
"""
if os.path.isdir(path):
raise FileExistsError("Installation target directory shouldn't exist.")
gdhome = os.path.abspath(os.path.expanduser(path))
# Create directories and config file
os.mkdir(gdhome, mode=0o755)
os.mkdir(os.path.join(gdhome, "pkgcache"))
os.mkdir(os.path.join(gdhome, "instances"))
write_default_config(os.path.join(gdhome, "config.json"))
class Package:
"""A package managed by GoldDust"""
def __init__(self):
self.name = ""
self.version = ""
@property
def tarball(self):
return "{}-{}.tar.bz2".format(self.name, self.version)
@property
def sig_file(self):
return "{}.sig".format(self.tarball)
|
<commit_before><commit_msg>Add current state of golddust package<commit_after>
|
# Copyright 2015-2016 John "LuaMilkshake" Marion
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GoldDust package management library.
GoldDust manages modded installations of the game Minecraft. This
package implements a package manager for mod files (including
Minecraft Forge and texture packs).
"""
import os
import platform
def default_home_dir():
"""Get the default home directory path on this platform.
This is an absolute form of "~/.golddust" on all platforms except
for Windows, where it is "%APPDATA%/.golddust".
In the strange case where APPDATA isn't set on Windows, will default
to the non-Windows behavior.
Returns str, the absolute path of "~/.golddust" (non-Windows),
"%APPDATA/.golddust" (Windows).
"""
path = "~/.golddust"
if platform.system() == "Windows" and "APPDATA" in os.environ:
path = os.path.join(os.environ["APPDATA"], ".golddust")
return os.path.abspath(os.path.expanduser(path))
def write_default_config(path):
"""Write out a default GoldDust configuration.
"""
config = open(path, "w+")
# TODO Actually serialize a configuration
config.write("{}")
config.close()
def install_home_dir(path):
"""Install the GoldDust home directory.
The home directory is used for global configuration, local package caches,
and instance information.
Takes:
path (str): Path to install the home directory to. Should not exist.
Raises:
FileExistsError: The path supplied already exists.
"""
if os.path.isdir(path):
raise FileExistsError("Installation target directory shouldn't exist.")
gdhome = os.path.abspath(os.path.expanduser(path))
# Create directories and config file
os.mkdir(gdhome, mode=0o755)
os.mkdir(os.path.join(gdhome, "pkgcache"))
os.mkdir(os.path.join(gdhome, "instances"))
write_default_config(os.path.join(gdhome, "config.json"))
class Package:
"""A package managed by GoldDust"""
def __init__(self):
self.name = ""
self.version = ""
@property
def tarball(self):
return "{}-{}.tar.bz2".format(self.name, self.version)
@property
def sig_file(self):
return "{}.sig".format(self.tarball)
|
Add current state of golddust package# Copyright 2015-2016 John "LuaMilkshake" Marion
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GoldDust package management library.
GoldDust manages modded installations of the game Minecraft. This
package implements a package manager for mod files (including
Minecraft Forge and texture packs).
"""
import os
import platform
def default_home_dir():
"""Get the default home directory path on this platform.
This is an absolute form of "~/.golddust" on all platforms except
for Windows, where it is "%APPDATA%/.golddust".
In the strange case where APPDATA isn't set on Windows, will default
to the non-Windows behavior.
Returns str, the absolute path of "~/.golddust" (non-Windows),
"%APPDATA/.golddust" (Windows).
"""
path = "~/.golddust"
if platform.system() == "Windows" and "APPDATA" in os.environ:
path = os.path.join(os.environ["APPDATA"], ".golddust")
return os.path.abspath(os.path.expanduser(path))
def write_default_config(path):
"""Write out a default GoldDust configuration.
"""
config = open(path, "w+")
# TODO Actually serialize a configuration
config.write("{}")
config.close()
def install_home_dir(path):
"""Install the GoldDust home directory.
The home directory is used for global configuration, local package caches,
and instance information.
Takes:
path (str): Path to install the home directory to. Should not exist.
Raises:
FileExistsError: The path supplied already exists.
"""
if os.path.isdir(path):
raise FileExistsError("Installation target directory shouldn't exist.")
gdhome = os.path.abspath(os.path.expanduser(path))
# Create directories and config file
os.mkdir(gdhome, mode=0o755)
os.mkdir(os.path.join(gdhome, "pkgcache"))
os.mkdir(os.path.join(gdhome, "instances"))
write_default_config(os.path.join(gdhome, "config.json"))
class Package:
"""A package managed by GoldDust"""
def __init__(self):
self.name = ""
self.version = ""
@property
def tarball(self):
return "{}-{}.tar.bz2".format(self.name, self.version)
@property
def sig_file(self):
return "{}.sig".format(self.tarball)
|
<commit_before><commit_msg>Add current state of golddust package<commit_after># Copyright 2015-2016 John "LuaMilkshake" Marion
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GoldDust package management library.
GoldDust manages modded installations of the game Minecraft. This
package implements a package manager for mod files (including
Minecraft Forge and texture packs).
"""
import os
import platform
def default_home_dir():
"""Get the default home directory path on this platform.
This is an absolute form of "~/.golddust" on all platforms except
for Windows, where it is "%APPDATA%/.golddust".
In the strange case where APPDATA isn't set on Windows, will default
to the non-Windows behavior.
Returns str, the absolute path of "~/.golddust" (non-Windows),
"%APPDATA/.golddust" (Windows).
"""
path = "~/.golddust"
if platform.system() == "Windows" and "APPDATA" in os.environ:
path = os.path.join(os.environ["APPDATA"], ".golddust")
return os.path.abspath(os.path.expanduser(path))
def write_default_config(path):
"""Write out a default GoldDust configuration.
"""
config = open(path, "w+")
# TODO Actually serialize a configuration
config.write("{}")
config.close()
def install_home_dir(path):
"""Install the GoldDust home directory.
The home directory is used for global configuration, local package caches,
and instance information.
Takes:
path (str): Path to install the home directory to. Should not exist.
Raises:
FileExistsError: The path supplied already exists.
"""
if os.path.isdir(path):
raise FileExistsError("Installation target directory shouldn't exist.")
gdhome = os.path.abspath(os.path.expanduser(path))
# Create directories and config file
os.mkdir(gdhome, mode=0o755)
os.mkdir(os.path.join(gdhome, "pkgcache"))
os.mkdir(os.path.join(gdhome, "instances"))
write_default_config(os.path.join(gdhome, "config.json"))
class Package:
"""A package managed by GoldDust"""
def __init__(self):
self.name = ""
self.version = ""
@property
def tarball(self):
return "{}-{}.tar.bz2".format(self.name, self.version)
@property
def sig_file(self):
return "{}.sig".format(self.tarball)
|
|
0ad93dfef152aae325b99274ac6a6789b1d46a32
|
wk2/loading_catalogues.py
|
wk2/loading_catalogues.py
|
import numpy as np
def hms2dec(h, m, s):
return 15*(h + m/60 + s/3600)
def dms2dec(d, m, s):
if (d >= 0):
return d + m/60 + s/3600
else:
return d - m/60 - s/3600
def import_bss():
file = 'bss.dat'
lines = np.loadtxt(file, usecols=range(1, 7))
count=1
result = [ ]
for line in lines:
result.append((count, hms2dec(line[0], line[1], line[2]), dms2dec(line[3], line[4], line[5])))
count += 1
return result
def import_super():
file = 'super.csv'
lines = np.loadtxt(file, delimiter=',', skiprows=1,usecols=[0,1])
result = []
count = 1
for line in lines:
result.append((count, line[0], line[1]))
count += 1
return result
# You can use this to test your function.
# Any code inside this `if` statement will be ignored by the automarker.
if __name__ == '__main__':
# Output of the import_bss and import_super functions
bss_cat = import_bss()
super_cat = import_super()
print(bss_cat)
print(super_cat)
|
Load data from AT20G BSS and SUperCOSMOS catalogues
|
Load data from AT20G BSS and SUperCOSMOS catalogues
|
Python
|
mit
|
lokijota/datadrivenastronomymooc
|
Load data from AT20G BSS and SUperCOSMOS catalogues
|
import numpy as np
def hms2dec(h, m, s):
return 15*(h + m/60 + s/3600)
def dms2dec(d, m, s):
if (d >= 0):
return d + m/60 + s/3600
else:
return d - m/60 - s/3600
def import_bss():
file = 'bss.dat'
lines = np.loadtxt(file, usecols=range(1, 7))
count=1
result = [ ]
for line in lines:
result.append((count, hms2dec(line[0], line[1], line[2]), dms2dec(line[3], line[4], line[5])))
count += 1
return result
def import_super():
file = 'super.csv'
lines = np.loadtxt(file, delimiter=',', skiprows=1,usecols=[0,1])
result = []
count = 1
for line in lines:
result.append((count, line[0], line[1]))
count += 1
return result
# You can use this to test your function.
# Any code inside this `if` statement will be ignored by the automarker.
if __name__ == '__main__':
# Output of the import_bss and import_super functions
bss_cat = import_bss()
super_cat = import_super()
print(bss_cat)
print(super_cat)
|
<commit_before><commit_msg>Load data from AT20G BSS and SUperCOSMOS catalogues<commit_after>
|
import numpy as np
def hms2dec(h, m, s):
return 15*(h + m/60 + s/3600)
def dms2dec(d, m, s):
if (d >= 0):
return d + m/60 + s/3600
else:
return d - m/60 - s/3600
def import_bss():
file = 'bss.dat'
lines = np.loadtxt(file, usecols=range(1, 7))
count=1
result = [ ]
for line in lines:
result.append((count, hms2dec(line[0], line[1], line[2]), dms2dec(line[3], line[4], line[5])))
count += 1
return result
def import_super():
file = 'super.csv'
lines = np.loadtxt(file, delimiter=',', skiprows=1,usecols=[0,1])
result = []
count = 1
for line in lines:
result.append((count, line[0], line[1]))
count += 1
return result
# You can use this to test your function.
# Any code inside this `if` statement will be ignored by the automarker.
if __name__ == '__main__':
# Output of the import_bss and import_super functions
bss_cat = import_bss()
super_cat = import_super()
print(bss_cat)
print(super_cat)
|
Load data from AT20G BSS and SUperCOSMOS cataloguesimport numpy as np
def hms2dec(h, m, s):
return 15*(h + m/60 + s/3600)
def dms2dec(d, m, s):
if (d >= 0):
return d + m/60 + s/3600
else:
return d - m/60 - s/3600
def import_bss():
file = 'bss.dat'
lines = np.loadtxt(file, usecols=range(1, 7))
count=1
result = [ ]
for line in lines:
result.append((count, hms2dec(line[0], line[1], line[2]), dms2dec(line[3], line[4], line[5])))
count += 1
return result
def import_super():
file = 'super.csv'
lines = np.loadtxt(file, delimiter=',', skiprows=1,usecols=[0,1])
result = []
count = 1
for line in lines:
result.append((count, line[0], line[1]))
count += 1
return result
# You can use this to test your function.
# Any code inside this `if` statement will be ignored by the automarker.
if __name__ == '__main__':
# Output of the import_bss and import_super functions
bss_cat = import_bss()
super_cat = import_super()
print(bss_cat)
print(super_cat)
|
<commit_before><commit_msg>Load data from AT20G BSS and SUperCOSMOS catalogues<commit_after>import numpy as np
def hms2dec(h, m, s):
return 15*(h + m/60 + s/3600)
def dms2dec(d, m, s):
if (d >= 0):
return d + m/60 + s/3600
else:
return d - m/60 - s/3600
def import_bss():
file = 'bss.dat'
lines = np.loadtxt(file, usecols=range(1, 7))
count=1
result = [ ]
for line in lines:
result.append((count, hms2dec(line[0], line[1], line[2]), dms2dec(line[3], line[4], line[5])))
count += 1
return result
def import_super():
file = 'super.csv'
lines = np.loadtxt(file, delimiter=',', skiprows=1,usecols=[0,1])
result = []
count = 1
for line in lines:
result.append((count, line[0], line[1]))
count += 1
return result
# You can use this to test your function.
# Any code inside this `if` statement will be ignored by the automarker.
if __name__ == '__main__':
# Output of the import_bss and import_super functions
bss_cat = import_bss()
super_cat = import_super()
print(bss_cat)
print(super_cat)
|
|
5b5027a6e84324bc09ad3ba29d84e54c51d33b45
|
tests/conftest.py
|
tests/conftest.py
|
import pytest
def pytest_addoption(parser):
parser.addoption("--runslow", action="store_true",
help="run slow tests")
def pytest_runtest_setup(item):
if 'slow' in item.keywords and not item.config.getoption("--runslow"):
pytest.skip("need --runslow option to run")
|
Add pytest config file for slow tests.
|
Add pytest config file for slow tests.
Added a pytest config file in the tests folder, so that I could define a
--runslow option to some tests. This way, I don't have to run them all
the time if not necessary.
|
Python
|
bsd-3-clause
|
achabotl/pambox
|
Add pytest config file for slow tests.
Added a pytest config file in the tests folder, so that I could define a
--runslow option to some tests. This way, I don't have to run them all
the time if not necessary.
|
import pytest
def pytest_addoption(parser):
parser.addoption("--runslow", action="store_true",
help="run slow tests")
def pytest_runtest_setup(item):
if 'slow' in item.keywords and not item.config.getoption("--runslow"):
pytest.skip("need --runslow option to run")
|
<commit_before><commit_msg>Add pytest config file for slow tests.
Added a pytest config file in the tests folder, so that I could define a
--runslow option to some tests. This way, I don't have to run them all
the time if not necessary.<commit_after>
|
import pytest
def pytest_addoption(parser):
parser.addoption("--runslow", action="store_true",
help="run slow tests")
def pytest_runtest_setup(item):
if 'slow' in item.keywords and not item.config.getoption("--runslow"):
pytest.skip("need --runslow option to run")
|
Add pytest config file for slow tests.
Added a pytest config file in the tests folder, so that I could define a
--runslow option to some tests. This way, I don't have to run them all
the time if not necessary.import pytest
def pytest_addoption(parser):
parser.addoption("--runslow", action="store_true",
help="run slow tests")
def pytest_runtest_setup(item):
if 'slow' in item.keywords and not item.config.getoption("--runslow"):
pytest.skip("need --runslow option to run")
|
<commit_before><commit_msg>Add pytest config file for slow tests.
Added a pytest config file in the tests folder, so that I could define a
--runslow option to some tests. This way, I don't have to run them all
the time if not necessary.<commit_after>import pytest
def pytest_addoption(parser):
parser.addoption("--runslow", action="store_true",
help="run slow tests")
def pytest_runtest_setup(item):
if 'slow' in item.keywords and not item.config.getoption("--runslow"):
pytest.skip("need --runslow option to run")
|
|
d4aab57bf6173334fa2dbd82eef74b786473ef3d
|
djangosaml2/templatetags/idplist.py
|
djangosaml2/templatetags/idplist.py
|
# Copyright (C) 2011 Yaco Sistemas (http://www.yaco.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
self.conf = config_settings_loader()
def render(self, context):
context[self.variable_name] = self.conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
|
# Copyright (C) 2011 Yaco Sistemas (http://www.yaco.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
def render(self, context):
conf = config_settings_loader()
context[self.variable_name] = conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
|
Load the config as late as possible to avoid crashing when the configuration is not ready yet. Also this code is more reentrant
|
Load the config as late as possible to avoid crashing when the configuration is not ready yet. Also this code is more reentrant
|
Python
|
apache-2.0
|
WebSpider/djangosaml2,writepython/djangosaml2,BetterWorks/djangosaml2,damienmarie-alation/djangosaml2,BetterWorks/djangosaml2,MiguelSR/djangosaml2,bernii/djangosaml2,damienmarie-alation/djangosaml2,knaperek/djangosaml2,WebSpider/djangosaml2,writepython/djangosaml2,MiguelSR/djangosaml2,knaperek/djangosaml2,azavea/djangosaml2
|
# Copyright (C) 2011 Yaco Sistemas (http://www.yaco.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
self.conf = config_settings_loader()
def render(self, context):
context[self.variable_name] = self.conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
Load the config as late as possible to avoid crashing when the configuration is not ready yet. Also this code is more reentrant
|
# Copyright (C) 2011 Yaco Sistemas (http://www.yaco.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
def render(self, context):
conf = config_settings_loader()
context[self.variable_name] = conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
|
<commit_before># Copyright (C) 2011 Yaco Sistemas (http://www.yaco.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
self.conf = config_settings_loader()
def render(self, context):
context[self.variable_name] = self.conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
<commit_msg>Load the config as late as possible to avoid crashing when the configuration is not ready yet. Also this code is more reentrant<commit_after>
|
# Copyright (C) 2011 Yaco Sistemas (http://www.yaco.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
def render(self, context):
conf = config_settings_loader()
context[self.variable_name] = conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
|
# Copyright (C) 2011 Yaco Sistemas (http://www.yaco.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
self.conf = config_settings_loader()
def render(self, context):
context[self.variable_name] = self.conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
Load the config as late as possible to avoid crashing when the configuration is not ready yet. Also this code is more reentrant# Copyright (C) 2011 Yaco Sistemas (http://www.yaco.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
def render(self, context):
conf = config_settings_loader()
context[self.variable_name] = conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
|
<commit_before># Copyright (C) 2011 Yaco Sistemas (http://www.yaco.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
self.conf = config_settings_loader()
def render(self, context):
context[self.variable_name] = self.conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
<commit_msg>Load the config as late as possible to avoid crashing when the configuration is not ready yet. Also this code is more reentrant<commit_after># Copyright (C) 2011 Yaco Sistemas (http://www.yaco.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
def render(self, context):
conf = config_settings_loader()
context[self.variable_name] = conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
|
9e8b32928c068237e34a7c319564333bbed59cb7
|
pox/messenger/test_client.py
|
pox/messenger/test_client.py
|
#!/usr/bin/env python
# Copyright 2012 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
This is NOT a POX component. It's a little tool to test out the messenger.
"""
import socket
import threading
import json
class JSONDestreamer (object):
import json
decoder = json.JSONDecoder()
def __init__ (self, callback = None):
self.data = ''
self.callback = callback if callback else self.rx
def push (self, data):
if len(self.data) == 0:
data = data.lstrip()
self.data += data
try:
while len(self.data) > 0:
r,off = self.decoder.raw_decode(self.data)
self.data = self.data[off:].lstrip()
self.callback(r)
except ValueError:
pass
def rx (self, data):
import json
print "Recv:", json.dumps(data, indent=4)
jd = JSONDestreamer()
done = False
def reader (socket):
global done
while True:
d = socket.recv(1024)
if d == "":
done = True
break
jd.push(d)
cur_chan = None
def channel (ch):
global cur_chan
cur_chan = ch
import readline
def main (addr = "127.0.0.1", port = 7790):
print "Connecting to %s:%i" % (addr,port)
port = int(port)
sock = socket.create_connection((addr, port))
t = threading.Thread(target=reader, args=(sock,))
t.daemon = True
t.start()
while not done:
try:
#print ">",
m = raw_input()
if len(m) == 0: continue
m = eval(m)
if not isinstance(m, dict):
continue
if cur_chan is not None and 'CHANNEL' not in m:
m['CHANNEL'] = cur_chan
m = json.dumps(m)
sock.send(m)
except EOFError:
break
except KeyboardInterrupt:
break
except:
import traceback
traceback.print_exc()
if __name__ == "__main__":
import sys
main(*sys.argv[1:])
|
Add a small test client program
|
messenger: Add a small test client program
|
Python
|
apache-2.0
|
waltznetworks/pox,waltznetworks/pox,PrincetonUniversity/pox,andiwundsam/_of_normalize,VamsikrishnaNallabothu/pox,jacobq/csci5221-viro-project,chenyuntc/pox,diogommartins/pox,carlye566/IoT-POX,pthien92/sdn,adusia/pox,pthien92/sdn,pthien92/sdn,carlye566/IoT-POX,pthien92/sdn,waltznetworks/pox,kulawczukmarcin/mypox,PrincetonUniversity/pox,MurphyMc/pox,diogommartins/pox,carlye566/IoT-POX,waltznetworks/pox,VamsikrishnaNallabothu/pox,kavitshah8/SDNDeveloper,denovogroup/pox,andiwundsam/_of_normalize,xAKLx/pox,VamsikrishnaNallabothu/pox,kulawczukmarcin/mypox,adusia/pox,andiwundsam/_of_normalize,noxrepo/pox,kpengboy/pox-exercise,andiwundsam/_of_normalize,diogommartins/pox,noxrepo/pox,kpengboy/pox-exercise,carlye566/IoT-POX,VamsikrishnaNallabothu/pox,PrincetonUniversity/pox,adusia/pox,xAKLx/pox,kulawczukmarcin/mypox,VamsikrishnaNallabothu/pox,MurphyMc/pox,chenyuntc/pox,jacobq/csci5221-viro-project,PrincetonUniversity/pox,xAKLx/pox,denovogroup/pox,kavitshah8/SDNDeveloper,kulawczukmarcin/mypox,chenyuntc/pox,diogommartins/pox,kpengboy/pox-exercise,pthien92/sdn,xAKLx/pox,MurphyMc/pox,chenyuntc/pox,xAKLx/pox,kavitshah8/SDNDeveloper,jacobq/csci5221-viro-project,carlye566/IoT-POX,jacobq/csci5221-viro-project,kpengboy/pox-exercise,noxrepo/pox,PrincetonUniversity/pox,adusia/pox,denovogroup/pox,MurphyMc/pox,adusia/pox,denovogroup/pox,noxrepo/pox,jacobq/csci5221-viro-project,chenyuntc/pox,diogommartins/pox,waltznetworks/pox,MurphyMc/pox,kavitshah8/SDNDeveloper,kpengboy/pox-exercise,kulawczukmarcin/mypox,denovogroup/pox
|
messenger: Add a small test client program
|
#!/usr/bin/env python
# Copyright 2012 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
This is NOT a POX component. It's a little tool to test out the messenger.
"""
import socket
import threading
import json
class JSONDestreamer (object):
import json
decoder = json.JSONDecoder()
def __init__ (self, callback = None):
self.data = ''
self.callback = callback if callback else self.rx
def push (self, data):
if len(self.data) == 0:
data = data.lstrip()
self.data += data
try:
while len(self.data) > 0:
r,off = self.decoder.raw_decode(self.data)
self.data = self.data[off:].lstrip()
self.callback(r)
except ValueError:
pass
def rx (self, data):
import json
print "Recv:", json.dumps(data, indent=4)
jd = JSONDestreamer()
done = False
def reader (socket):
global done
while True:
d = socket.recv(1024)
if d == "":
done = True
break
jd.push(d)
cur_chan = None
def channel (ch):
global cur_chan
cur_chan = ch
import readline
def main (addr = "127.0.0.1", port = 7790):
print "Connecting to %s:%i" % (addr,port)
port = int(port)
sock = socket.create_connection((addr, port))
t = threading.Thread(target=reader, args=(sock,))
t.daemon = True
t.start()
while not done:
try:
#print ">",
m = raw_input()
if len(m) == 0: continue
m = eval(m)
if not isinstance(m, dict):
continue
if cur_chan is not None and 'CHANNEL' not in m:
m['CHANNEL'] = cur_chan
m = json.dumps(m)
sock.send(m)
except EOFError:
break
except KeyboardInterrupt:
break
except:
import traceback
traceback.print_exc()
if __name__ == "__main__":
import sys
main(*sys.argv[1:])
|
<commit_before><commit_msg>messenger: Add a small test client program<commit_after>
|
#!/usr/bin/env python
# Copyright 2012 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
This is NOT a POX component. It's a little tool to test out the messenger.
"""
import socket
import threading
import json
class JSONDestreamer (object):
import json
decoder = json.JSONDecoder()
def __init__ (self, callback = None):
self.data = ''
self.callback = callback if callback else self.rx
def push (self, data):
if len(self.data) == 0:
data = data.lstrip()
self.data += data
try:
while len(self.data) > 0:
r,off = self.decoder.raw_decode(self.data)
self.data = self.data[off:].lstrip()
self.callback(r)
except ValueError:
pass
def rx (self, data):
import json
print "Recv:", json.dumps(data, indent=4)
jd = JSONDestreamer()
done = False
def reader (socket):
global done
while True:
d = socket.recv(1024)
if d == "":
done = True
break
jd.push(d)
cur_chan = None
def channel (ch):
global cur_chan
cur_chan = ch
import readline
def main (addr = "127.0.0.1", port = 7790):
print "Connecting to %s:%i" % (addr,port)
port = int(port)
sock = socket.create_connection((addr, port))
t = threading.Thread(target=reader, args=(sock,))
t.daemon = True
t.start()
while not done:
try:
#print ">",
m = raw_input()
if len(m) == 0: continue
m = eval(m)
if not isinstance(m, dict):
continue
if cur_chan is not None and 'CHANNEL' not in m:
m['CHANNEL'] = cur_chan
m = json.dumps(m)
sock.send(m)
except EOFError:
break
except KeyboardInterrupt:
break
except:
import traceback
traceback.print_exc()
if __name__ == "__main__":
import sys
main(*sys.argv[1:])
|
messenger: Add a small test client program#!/usr/bin/env python
# Copyright 2012 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
This is NOT a POX component. It's a little tool to test out the messenger.
"""
import socket
import threading
import json
class JSONDestreamer (object):
import json
decoder = json.JSONDecoder()
def __init__ (self, callback = None):
self.data = ''
self.callback = callback if callback else self.rx
def push (self, data):
if len(self.data) == 0:
data = data.lstrip()
self.data += data
try:
while len(self.data) > 0:
r,off = self.decoder.raw_decode(self.data)
self.data = self.data[off:].lstrip()
self.callback(r)
except ValueError:
pass
def rx (self, data):
import json
print "Recv:", json.dumps(data, indent=4)
jd = JSONDestreamer()
done = False
def reader (socket):
global done
while True:
d = socket.recv(1024)
if d == "":
done = True
break
jd.push(d)
cur_chan = None
def channel (ch):
global cur_chan
cur_chan = ch
import readline
def main (addr = "127.0.0.1", port = 7790):
print "Connecting to %s:%i" % (addr,port)
port = int(port)
sock = socket.create_connection((addr, port))
t = threading.Thread(target=reader, args=(sock,))
t.daemon = True
t.start()
while not done:
try:
#print ">",
m = raw_input()
if len(m) == 0: continue
m = eval(m)
if not isinstance(m, dict):
continue
if cur_chan is not None and 'CHANNEL' not in m:
m['CHANNEL'] = cur_chan
m = json.dumps(m)
sock.send(m)
except EOFError:
break
except KeyboardInterrupt:
break
except:
import traceback
traceback.print_exc()
if __name__ == "__main__":
import sys
main(*sys.argv[1:])
|
<commit_before><commit_msg>messenger: Add a small test client program<commit_after>#!/usr/bin/env python
# Copyright 2012 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
This is NOT a POX component. It's a little tool to test out the messenger.
"""
import socket
import threading
import json
class JSONDestreamer (object):
import json
decoder = json.JSONDecoder()
def __init__ (self, callback = None):
self.data = ''
self.callback = callback if callback else self.rx
def push (self, data):
if len(self.data) == 0:
data = data.lstrip()
self.data += data
try:
while len(self.data) > 0:
r,off = self.decoder.raw_decode(self.data)
self.data = self.data[off:].lstrip()
self.callback(r)
except ValueError:
pass
def rx (self, data):
import json
print "Recv:", json.dumps(data, indent=4)
jd = JSONDestreamer()
done = False
def reader (socket):
global done
while True:
d = socket.recv(1024)
if d == "":
done = True
break
jd.push(d)
cur_chan = None
def channel (ch):
global cur_chan
cur_chan = ch
import readline
def main (addr = "127.0.0.1", port = 7790):
print "Connecting to %s:%i" % (addr,port)
port = int(port)
sock = socket.create_connection((addr, port))
t = threading.Thread(target=reader, args=(sock,))
t.daemon = True
t.start()
while not done:
try:
#print ">",
m = raw_input()
if len(m) == 0: continue
m = eval(m)
if not isinstance(m, dict):
continue
if cur_chan is not None and 'CHANNEL' not in m:
m['CHANNEL'] = cur_chan
m = json.dumps(m)
sock.send(m)
except EOFError:
break
except KeyboardInterrupt:
break
except:
import traceback
traceback.print_exc()
if __name__ == "__main__":
import sys
main(*sys.argv[1:])
|
|
1210f01eddf1979f9a8a4c9eed429d43e3fd7d53
|
i3pystatus/online.py
|
i3pystatus/online.py
|
from i3pystatus import IntervalModule
from i3pystatus.core.util import internet
class Online(IntervalModule):
"""Show internet connection status."""
settings = (
("color", "Text color when online"),
('color_offline', 'Text color when offline'),
('format_online', 'Status text when online'),
('format_offline', 'Status text when offline'),
)
color = '#ffffff'
color_offline = '#ff0000'
format_online = 'online'
format_offline = 'offline'
interval = 10
def run(self):
if internet():
self.output = {
"color": self.color,
"full_text": self.format_online,
}
else:
self.output = {
"color": self.color_offline,
"full_text": self.format_offline,
}
|
Add simple module to show internet connection
|
Add simple module to show internet connection
|
Python
|
mit
|
schroeji/i3pystatus,richese/i3pystatus,yang-ling/i3pystatus,fmarchenko/i3pystatus,facetoe/i3pystatus,ncoop/i3pystatus,juliushaertl/i3pystatus,Arvedui/i3pystatus,juliushaertl/i3pystatus,m45t3r/i3pystatus,facetoe/i3pystatus,enkore/i3pystatus,eBrnd/i3pystatus,enkore/i3pystatus,drwahl/i3pystatus,teto/i3pystatus,asmikhailov/i3pystatus,ncoop/i3pystatus,fmarchenko/i3pystatus,teto/i3pystatus,drwahl/i3pystatus,yang-ling/i3pystatus,schroeji/i3pystatus,eBrnd/i3pystatus,asmikhailov/i3pystatus,Arvedui/i3pystatus,richese/i3pystatus,m45t3r/i3pystatus
|
Add simple module to show internet connection
|
from i3pystatus import IntervalModule
from i3pystatus.core.util import internet
class Online(IntervalModule):
"""Show internet connection status."""
settings = (
("color", "Text color when online"),
('color_offline', 'Text color when offline'),
('format_online', 'Status text when online'),
('format_offline', 'Status text when offline'),
)
color = '#ffffff'
color_offline = '#ff0000'
format_online = 'online'
format_offline = 'offline'
interval = 10
def run(self):
if internet():
self.output = {
"color": self.color,
"full_text": self.format_online,
}
else:
self.output = {
"color": self.color_offline,
"full_text": self.format_offline,
}
|
<commit_before><commit_msg>Add simple module to show internet connection<commit_after>
|
from i3pystatus import IntervalModule
from i3pystatus.core.util import internet
class Online(IntervalModule):
"""Show internet connection status."""
settings = (
("color", "Text color when online"),
('color_offline', 'Text color when offline'),
('format_online', 'Status text when online'),
('format_offline', 'Status text when offline'),
)
color = '#ffffff'
color_offline = '#ff0000'
format_online = 'online'
format_offline = 'offline'
interval = 10
def run(self):
if internet():
self.output = {
"color": self.color,
"full_text": self.format_online,
}
else:
self.output = {
"color": self.color_offline,
"full_text": self.format_offline,
}
|
Add simple module to show internet connectionfrom i3pystatus import IntervalModule
from i3pystatus.core.util import internet
class Online(IntervalModule):
"""Show internet connection status."""
settings = (
("color", "Text color when online"),
('color_offline', 'Text color when offline'),
('format_online', 'Status text when online'),
('format_offline', 'Status text when offline'),
)
color = '#ffffff'
color_offline = '#ff0000'
format_online = 'online'
format_offline = 'offline'
interval = 10
def run(self):
if internet():
self.output = {
"color": self.color,
"full_text": self.format_online,
}
else:
self.output = {
"color": self.color_offline,
"full_text": self.format_offline,
}
|
<commit_before><commit_msg>Add simple module to show internet connection<commit_after>from i3pystatus import IntervalModule
from i3pystatus.core.util import internet
class Online(IntervalModule):
"""Show internet connection status."""
settings = (
("color", "Text color when online"),
('color_offline', 'Text color when offline'),
('format_online', 'Status text when online'),
('format_offline', 'Status text when offline'),
)
color = '#ffffff'
color_offline = '#ff0000'
format_online = 'online'
format_offline = 'offline'
interval = 10
def run(self):
if internet():
self.output = {
"color": self.color,
"full_text": self.format_online,
}
else:
self.output = {
"color": self.color_offline,
"full_text": self.format_offline,
}
|
|
a6f1d1e7ca63a4d204aade3b0f67b53c1e867609
|
scripts/feature/stage4_24h_heavy.py
|
scripts/feature/stage4_24h_heavy.py
|
"""
Create a plot of today's total precipitation from the Stage4 estimates
"""
import pygrib
import mx.DateTime
import iemplot
import numpy
import os, sys
def doday():
"""
Create a plot of precipitation stage4 estimates for some day
"""
sts = mx.DateTime.DateTime(2010,4,1,12)
ets = mx.DateTime.DateTime(2010,9,22,12)
interval = mx.DateTime.RelativeDateTime(days=1)
now = sts
total = None
while now < ets:
fp = "/mesonet/ARCHIVE/data/%s/stage4/ST4.%s.24h.grib" % (
now.strftime("%Y/%m/%d"),
now.strftime("%Y%m%d%H") )
if os.path.isfile(fp):
lts = now
grbs = pygrib.open(fp)
if total is None:
g = grbs[1]
total = numpy.where(g["values"] > 25.4, 1., 0.)
lats, lons = g.latlons()
else:
total += numpy.where(grbs[1]["values"] > 25.4, 1., 0.)
grbs.close()
now += interval
# Now we dance
cfg = {
'wkColorMap': 'BlAqGrYeOrRe',
'nglSpreadColorStart': -1,
'nglSpreadColorEnd' : 2,
'_MaskZero' : True,
'_midwest' : True,
'lbTitleString' : "[days]",
'_valid' : 'Number of days between 1 Apr and %s' % (
now.strftime("%d %B %Y"),),
'_title' : "NCEP StageIV 24 Hour Rainfall Over 1 inch",
}
# Midwest
tmpfp = iemplot.simple_grid_fill(lons, lats, total, cfg)
iemplot.makefeature(tmpfp)
if __name__ == "__main__":
doday()
|
Add script to run on IEM to compute days of 1+ inch rainfall since 1 Apr
|
Add script to run on IEM to compute days of 1+ inch rainfall since 1 Apr
|
Python
|
mit
|
akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem
|
Add script to run on IEM to compute days of 1+ inch rainfall since 1 Apr
|
"""
Create a plot of today's total precipitation from the Stage4 estimates
"""
import pygrib
import mx.DateTime
import iemplot
import numpy
import os, sys
def doday():
"""
Create a plot of precipitation stage4 estimates for some day
"""
sts = mx.DateTime.DateTime(2010,4,1,12)
ets = mx.DateTime.DateTime(2010,9,22,12)
interval = mx.DateTime.RelativeDateTime(days=1)
now = sts
total = None
while now < ets:
fp = "/mesonet/ARCHIVE/data/%s/stage4/ST4.%s.24h.grib" % (
now.strftime("%Y/%m/%d"),
now.strftime("%Y%m%d%H") )
if os.path.isfile(fp):
lts = now
grbs = pygrib.open(fp)
if total is None:
g = grbs[1]
total = numpy.where(g["values"] > 25.4, 1., 0.)
lats, lons = g.latlons()
else:
total += numpy.where(grbs[1]["values"] > 25.4, 1., 0.)
grbs.close()
now += interval
# Now we dance
cfg = {
'wkColorMap': 'BlAqGrYeOrRe',
'nglSpreadColorStart': -1,
'nglSpreadColorEnd' : 2,
'_MaskZero' : True,
'_midwest' : True,
'lbTitleString' : "[days]",
'_valid' : 'Number of days between 1 Apr and %s' % (
now.strftime("%d %B %Y"),),
'_title' : "NCEP StageIV 24 Hour Rainfall Over 1 inch",
}
# Midwest
tmpfp = iemplot.simple_grid_fill(lons, lats, total, cfg)
iemplot.makefeature(tmpfp)
if __name__ == "__main__":
doday()
|
<commit_before><commit_msg>Add script to run on IEM to compute days of 1+ inch rainfall since 1 Apr<commit_after>
|
"""
Create a plot of today's total precipitation from the Stage4 estimates
"""
import pygrib
import mx.DateTime
import iemplot
import numpy
import os, sys
def doday():
"""
Create a plot of precipitation stage4 estimates for some day
"""
sts = mx.DateTime.DateTime(2010,4,1,12)
ets = mx.DateTime.DateTime(2010,9,22,12)
interval = mx.DateTime.RelativeDateTime(days=1)
now = sts
total = None
while now < ets:
fp = "/mesonet/ARCHIVE/data/%s/stage4/ST4.%s.24h.grib" % (
now.strftime("%Y/%m/%d"),
now.strftime("%Y%m%d%H") )
if os.path.isfile(fp):
lts = now
grbs = pygrib.open(fp)
if total is None:
g = grbs[1]
total = numpy.where(g["values"] > 25.4, 1., 0.)
lats, lons = g.latlons()
else:
total += numpy.where(grbs[1]["values"] > 25.4, 1., 0.)
grbs.close()
now += interval
# Now we dance
cfg = {
'wkColorMap': 'BlAqGrYeOrRe',
'nglSpreadColorStart': -1,
'nglSpreadColorEnd' : 2,
'_MaskZero' : True,
'_midwest' : True,
'lbTitleString' : "[days]",
'_valid' : 'Number of days between 1 Apr and %s' % (
now.strftime("%d %B %Y"),),
'_title' : "NCEP StageIV 24 Hour Rainfall Over 1 inch",
}
# Midwest
tmpfp = iemplot.simple_grid_fill(lons, lats, total, cfg)
iemplot.makefeature(tmpfp)
if __name__ == "__main__":
doday()
|
Add script to run on IEM to compute days of 1+ inch rainfall since 1 Apr"""
Create a plot of today's total precipitation from the Stage4 estimates
"""
import pygrib
import mx.DateTime
import iemplot
import numpy
import os, sys
def doday():
"""
Create a plot of precipitation stage4 estimates for some day
"""
sts = mx.DateTime.DateTime(2010,4,1,12)
ets = mx.DateTime.DateTime(2010,9,22,12)
interval = mx.DateTime.RelativeDateTime(days=1)
now = sts
total = None
while now < ets:
fp = "/mesonet/ARCHIVE/data/%s/stage4/ST4.%s.24h.grib" % (
now.strftime("%Y/%m/%d"),
now.strftime("%Y%m%d%H") )
if os.path.isfile(fp):
lts = now
grbs = pygrib.open(fp)
if total is None:
g = grbs[1]
total = numpy.where(g["values"] > 25.4, 1., 0.)
lats, lons = g.latlons()
else:
total += numpy.where(grbs[1]["values"] > 25.4, 1., 0.)
grbs.close()
now += interval
# Now we dance
cfg = {
'wkColorMap': 'BlAqGrYeOrRe',
'nglSpreadColorStart': -1,
'nglSpreadColorEnd' : 2,
'_MaskZero' : True,
'_midwest' : True,
'lbTitleString' : "[days]",
'_valid' : 'Number of days between 1 Apr and %s' % (
now.strftime("%d %B %Y"),),
'_title' : "NCEP StageIV 24 Hour Rainfall Over 1 inch",
}
# Midwest
tmpfp = iemplot.simple_grid_fill(lons, lats, total, cfg)
iemplot.makefeature(tmpfp)
if __name__ == "__main__":
doday()
|
<commit_before><commit_msg>Add script to run on IEM to compute days of 1+ inch rainfall since 1 Apr<commit_after>"""
Create a plot of today's total precipitation from the Stage4 estimates
"""
import pygrib
import mx.DateTime
import iemplot
import numpy
import os, sys
def doday():
"""
Create a plot of precipitation stage4 estimates for some day
"""
sts = mx.DateTime.DateTime(2010,4,1,12)
ets = mx.DateTime.DateTime(2010,9,22,12)
interval = mx.DateTime.RelativeDateTime(days=1)
now = sts
total = None
while now < ets:
fp = "/mesonet/ARCHIVE/data/%s/stage4/ST4.%s.24h.grib" % (
now.strftime("%Y/%m/%d"),
now.strftime("%Y%m%d%H") )
if os.path.isfile(fp):
lts = now
grbs = pygrib.open(fp)
if total is None:
g = grbs[1]
total = numpy.where(g["values"] > 25.4, 1., 0.)
lats, lons = g.latlons()
else:
total += numpy.where(grbs[1]["values"] > 25.4, 1., 0.)
grbs.close()
now += interval
# Now we dance
cfg = {
'wkColorMap': 'BlAqGrYeOrRe',
'nglSpreadColorStart': -1,
'nglSpreadColorEnd' : 2,
'_MaskZero' : True,
'_midwest' : True,
'lbTitleString' : "[days]",
'_valid' : 'Number of days between 1 Apr and %s' % (
now.strftime("%d %B %Y"),),
'_title' : "NCEP StageIV 24 Hour Rainfall Over 1 inch",
}
# Midwest
tmpfp = iemplot.simple_grid_fill(lons, lats, total, cfg)
iemplot.makefeature(tmpfp)
if __name__ == "__main__":
doday()
|
|
c20e83fc946c8898d40b0210f71286575891ae94
|
debug_toolbar/panels/__init__.py
|
debug_toolbar/panels/__init__.py
|
from django.template.defaultfilters import slugify
from debug_toolbar.middleware import DebugToolbarMiddleware
class DebugPanel(object):
"""
Base class for debug panels.
"""
# name = Base
has_content = False # If content returns something, set to true in subclass
# We'll maintain a local context instance so we can expose our template
# context variables to panels which need them:
context = {}
# Panel methods
def __init__(self, context={}):
self.context.update(context)
self.toolbar = DebugToolbarMiddleware.get_current()
self.slug = slugify(self.name)
def dom_id(self):
return 'djDebug%sPanel' % (self.name.replace(' ', ''))
def nav_title(self):
"""Title showing in toolbar"""
raise NotImplementedError
def nav_subtitle(self):
"""Subtitle showing until title in toolbar"""
return ''
def title(self):
"""Title showing in panel"""
raise NotImplementedError
def url(self):
raise NotImplementedError
def content(self):
raise NotImplementedError
def record_stats(self, stats):
self.toolbar.stats[self.slug].update(stats)
def get_stats(self):
return self.toolbar.stats[self.slug]
# Standard middleware methods
def process_request(self, request):
pass
def process_view(self, request, view_func, view_args, view_kwargs):
pass
def process_response(self, request, response):
pass
|
from django.template.defaultfilters import slugify
from debug_toolbar.middleware import DebugToolbarMiddleware
class DebugPanel(object):
"""
Base class for debug panels.
"""
# name = Base
has_content = False # If content returns something, set to true in subclass
# We'll maintain a local context instance so we can expose our template
# context variables to panels which need them:
context = {}
# Panel methods
def __init__(self, context={}):
self.context.update(context)
self.toolbar = DebugToolbarMiddleware.get_current()
self.slug = slugify(self.name)
def dom_id(self):
return 'djDebug%sPanel' % (self.name.replace(' ', ''))
def nav_title(self):
"""Title showing in toolbar"""
raise NotImplementedError
def nav_subtitle(self):
"""Subtitle showing until title in toolbar"""
return ''
def title(self):
"""Title showing in panel"""
raise NotImplementedError
def url(self):
raise NotImplementedError
def content(self):
raise NotImplementedError
def record_stats(self, stats):
panel_stats = self.toolbar.stats.get(self.slug)
if panel_stats:
panel_stats.update(stats)
else:
self.toolbar.stats[self.slug] = stats
def get_stats(self):
return self.toolbar.stats.get(self.slug, {})
# Standard middleware methods
def process_request(self, request):
pass
def process_view(self, request, view_func, view_args, view_kwargs):
pass
def process_response(self, request, response):
pass
|
Correct some issues with the stats methods
|
Correct some issues with the stats methods
|
Python
|
bsd-3-clause
|
megcunningham/django-debug-toolbar,spookylukey/django-debug-toolbar,megcunningham/django-debug-toolbar,tim-schilling/django-debug-toolbar,calvinpy/django-debug-toolbar,barseghyanartur/django-debug-toolbar,Endika/django-debug-toolbar,spookylukey/django-debug-toolbar,megcunningham/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,guilhermetavares/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,ivelum/django-debug-toolbar,calvinpy/django-debug-toolbar,guilhermetavares/django-debug-toolbar,barseghyanartur/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,peap/django-debug-toolbar,guilhermetavares/django-debug-toolbar,tim-schilling/django-debug-toolbar,stored/django-debug-toolbar,stored/django-debug-toolbar,sidja/django-debug-toolbar,spookylukey/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,peap/django-debug-toolbar,jazzband/django-debug-toolbar,barseghyanartur/django-debug-toolbar,ivelum/django-debug-toolbar,seperman/django-debug-toolbar,peap/django-debug-toolbar,seperman/django-debug-toolbar,Endika/django-debug-toolbar,calvinpy/django-debug-toolbar,pevzi/django-debug-toolbar,tim-schilling/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,ivelum/django-debug-toolbar,jazzband/django-debug-toolbar,pevzi/django-debug-toolbar,pevzi/django-debug-toolbar,sidja/django-debug-toolbar,seperman/django-debug-toolbar,sidja/django-debug-toolbar,jazzband/django-debug-toolbar,stored/django-debug-toolbar,Endika/django-debug-toolbar
|
from django.template.defaultfilters import slugify
from debug_toolbar.middleware import DebugToolbarMiddleware
class DebugPanel(object):
"""
Base class for debug panels.
"""
# name = Base
has_content = False # If content returns something, set to true in subclass
# We'll maintain a local context instance so we can expose our template
# context variables to panels which need them:
context = {}
# Panel methods
def __init__(self, context={}):
self.context.update(context)
self.toolbar = DebugToolbarMiddleware.get_current()
self.slug = slugify(self.name)
def dom_id(self):
return 'djDebug%sPanel' % (self.name.replace(' ', ''))
def nav_title(self):
"""Title showing in toolbar"""
raise NotImplementedError
def nav_subtitle(self):
"""Subtitle showing until title in toolbar"""
return ''
def title(self):
"""Title showing in panel"""
raise NotImplementedError
def url(self):
raise NotImplementedError
def content(self):
raise NotImplementedError
def record_stats(self, stats):
self.toolbar.stats[self.slug].update(stats)
def get_stats(self):
return self.toolbar.stats[self.slug]
# Standard middleware methods
def process_request(self, request):
pass
def process_view(self, request, view_func, view_args, view_kwargs):
pass
def process_response(self, request, response):
pass
Correct some issues with the stats methods
|
from django.template.defaultfilters import slugify
from debug_toolbar.middleware import DebugToolbarMiddleware
class DebugPanel(object):
"""
Base class for debug panels.
"""
# name = Base
has_content = False # If content returns something, set to true in subclass
# We'll maintain a local context instance so we can expose our template
# context variables to panels which need them:
context = {}
# Panel methods
def __init__(self, context={}):
self.context.update(context)
self.toolbar = DebugToolbarMiddleware.get_current()
self.slug = slugify(self.name)
def dom_id(self):
return 'djDebug%sPanel' % (self.name.replace(' ', ''))
def nav_title(self):
"""Title showing in toolbar"""
raise NotImplementedError
def nav_subtitle(self):
"""Subtitle showing until title in toolbar"""
return ''
def title(self):
"""Title showing in panel"""
raise NotImplementedError
def url(self):
raise NotImplementedError
def content(self):
raise NotImplementedError
def record_stats(self, stats):
panel_stats = self.toolbar.stats.get(self.slug)
if panel_stats:
panel_stats.update(stats)
else:
self.toolbar.stats[self.slug] = stats
def get_stats(self):
return self.toolbar.stats.get(self.slug, {})
# Standard middleware methods
def process_request(self, request):
pass
def process_view(self, request, view_func, view_args, view_kwargs):
pass
def process_response(self, request, response):
pass
|
<commit_before>from django.template.defaultfilters import slugify
from debug_toolbar.middleware import DebugToolbarMiddleware
class DebugPanel(object):
"""
Base class for debug panels.
"""
# name = Base
has_content = False # If content returns something, set to true in subclass
# We'll maintain a local context instance so we can expose our template
# context variables to panels which need them:
context = {}
# Panel methods
def __init__(self, context={}):
self.context.update(context)
self.toolbar = DebugToolbarMiddleware.get_current()
self.slug = slugify(self.name)
def dom_id(self):
return 'djDebug%sPanel' % (self.name.replace(' ', ''))
def nav_title(self):
"""Title showing in toolbar"""
raise NotImplementedError
def nav_subtitle(self):
"""Subtitle showing until title in toolbar"""
return ''
def title(self):
"""Title showing in panel"""
raise NotImplementedError
def url(self):
raise NotImplementedError
def content(self):
raise NotImplementedError
def record_stats(self, stats):
self.toolbar.stats[self.slug].update(stats)
def get_stats(self):
return self.toolbar.stats[self.slug]
# Standard middleware methods
def process_request(self, request):
pass
def process_view(self, request, view_func, view_args, view_kwargs):
pass
def process_response(self, request, response):
pass
<commit_msg>Correct some issues with the stats methods<commit_after>
|
from django.template.defaultfilters import slugify
from debug_toolbar.middleware import DebugToolbarMiddleware
class DebugPanel(object):
"""
Base class for debug panels.
"""
# name = Base
has_content = False # If content returns something, set to true in subclass
# We'll maintain a local context instance so we can expose our template
# context variables to panels which need them:
context = {}
# Panel methods
def __init__(self, context={}):
self.context.update(context)
self.toolbar = DebugToolbarMiddleware.get_current()
self.slug = slugify(self.name)
def dom_id(self):
return 'djDebug%sPanel' % (self.name.replace(' ', ''))
def nav_title(self):
"""Title showing in toolbar"""
raise NotImplementedError
def nav_subtitle(self):
"""Subtitle showing until title in toolbar"""
return ''
def title(self):
"""Title showing in panel"""
raise NotImplementedError
def url(self):
raise NotImplementedError
def content(self):
raise NotImplementedError
def record_stats(self, stats):
panel_stats = self.toolbar.stats.get(self.slug)
if panel_stats:
panel_stats.update(stats)
else:
self.toolbar.stats[self.slug] = stats
def get_stats(self):
return self.toolbar.stats.get(self.slug, {})
# Standard middleware methods
def process_request(self, request):
pass
def process_view(self, request, view_func, view_args, view_kwargs):
pass
def process_response(self, request, response):
pass
|
from django.template.defaultfilters import slugify
from debug_toolbar.middleware import DebugToolbarMiddleware
class DebugPanel(object):
"""
Base class for debug panels.
"""
# name = Base
has_content = False # If content returns something, set to true in subclass
# We'll maintain a local context instance so we can expose our template
# context variables to panels which need them:
context = {}
# Panel methods
def __init__(self, context={}):
self.context.update(context)
self.toolbar = DebugToolbarMiddleware.get_current()
self.slug = slugify(self.name)
def dom_id(self):
return 'djDebug%sPanel' % (self.name.replace(' ', ''))
def nav_title(self):
"""Title showing in toolbar"""
raise NotImplementedError
def nav_subtitle(self):
"""Subtitle showing until title in toolbar"""
return ''
def title(self):
"""Title showing in panel"""
raise NotImplementedError
def url(self):
raise NotImplementedError
def content(self):
raise NotImplementedError
def record_stats(self, stats):
self.toolbar.stats[self.slug].update(stats)
def get_stats(self):
return self.toolbar.stats[self.slug]
# Standard middleware methods
def process_request(self, request):
pass
def process_view(self, request, view_func, view_args, view_kwargs):
pass
def process_response(self, request, response):
pass
Correct some issues with the stats methodsfrom django.template.defaultfilters import slugify
from debug_toolbar.middleware import DebugToolbarMiddleware
class DebugPanel(object):
"""
Base class for debug panels.
"""
# name = Base
has_content = False # If content returns something, set to true in subclass
# We'll maintain a local context instance so we can expose our template
# context variables to panels which need them:
context = {}
# Panel methods
def __init__(self, context={}):
self.context.update(context)
self.toolbar = DebugToolbarMiddleware.get_current()
self.slug = slugify(self.name)
def dom_id(self):
return 'djDebug%sPanel' % (self.name.replace(' ', ''))
def nav_title(self):
"""Title showing in toolbar"""
raise NotImplementedError
def nav_subtitle(self):
"""Subtitle showing until title in toolbar"""
return ''
def title(self):
"""Title showing in panel"""
raise NotImplementedError
def url(self):
raise NotImplementedError
def content(self):
raise NotImplementedError
def record_stats(self, stats):
panel_stats = self.toolbar.stats.get(self.slug)
if panel_stats:
panel_stats.update(stats)
else:
self.toolbar.stats[self.slug] = stats
def get_stats(self):
return self.toolbar.stats.get(self.slug, {})
# Standard middleware methods
def process_request(self, request):
pass
def process_view(self, request, view_func, view_args, view_kwargs):
pass
def process_response(self, request, response):
pass
|
<commit_before>from django.template.defaultfilters import slugify
from debug_toolbar.middleware import DebugToolbarMiddleware
class DebugPanel(object):
"""
Base class for debug panels.
"""
# name = Base
has_content = False # If content returns something, set to true in subclass
# We'll maintain a local context instance so we can expose our template
# context variables to panels which need them:
context = {}
# Panel methods
def __init__(self, context={}):
self.context.update(context)
self.toolbar = DebugToolbarMiddleware.get_current()
self.slug = slugify(self.name)
def dom_id(self):
return 'djDebug%sPanel' % (self.name.replace(' ', ''))
def nav_title(self):
"""Title showing in toolbar"""
raise NotImplementedError
def nav_subtitle(self):
"""Subtitle showing until title in toolbar"""
return ''
def title(self):
"""Title showing in panel"""
raise NotImplementedError
def url(self):
raise NotImplementedError
def content(self):
raise NotImplementedError
def record_stats(self, stats):
self.toolbar.stats[self.slug].update(stats)
def get_stats(self):
return self.toolbar.stats[self.slug]
# Standard middleware methods
def process_request(self, request):
pass
def process_view(self, request, view_func, view_args, view_kwargs):
pass
def process_response(self, request, response):
pass
<commit_msg>Correct some issues with the stats methods<commit_after>from django.template.defaultfilters import slugify
from debug_toolbar.middleware import DebugToolbarMiddleware
class DebugPanel(object):
"""
Base class for debug panels.
"""
# name = Base
has_content = False # If content returns something, set to true in subclass
# We'll maintain a local context instance so we can expose our template
# context variables to panels which need them:
context = {}
# Panel methods
def __init__(self, context={}):
self.context.update(context)
self.toolbar = DebugToolbarMiddleware.get_current()
self.slug = slugify(self.name)
def dom_id(self):
return 'djDebug%sPanel' % (self.name.replace(' ', ''))
def nav_title(self):
"""Title showing in toolbar"""
raise NotImplementedError
def nav_subtitle(self):
"""Subtitle showing until title in toolbar"""
return ''
def title(self):
"""Title showing in panel"""
raise NotImplementedError
def url(self):
raise NotImplementedError
def content(self):
raise NotImplementedError
def record_stats(self, stats):
panel_stats = self.toolbar.stats.get(self.slug)
if panel_stats:
panel_stats.update(stats)
else:
self.toolbar.stats[self.slug] = stats
def get_stats(self):
return self.toolbar.stats.get(self.slug, {})
# Standard middleware methods
def process_request(self, request):
pass
def process_view(self, request, view_func, view_args, view_kwargs):
pass
def process_response(self, request, response):
pass
|
e6318e03697034ba40ebd33666c1cf3d2f9a7bc5
|
02_task/sample_tests_v2.py
|
02_task/sample_tests_v2.py
|
import unittest
import solution as s
class SampleTest(unittest.TestCase):
def test_five_plus_three(self):
plus = s.create_operator('+', lambda lhs, rhs: lhs + rhs)
x = s.create_variable('x')
y = s.create_variable('y')
added_expression = s.create_expression((x, plus, y))
self.assertEqual(added_expression.evaluate(x=5, y=3), 8)
def test_operators(self):
y = s.create_variable('y')
twelve = s.create_constant(12)
expression = y + twelve
self.assertEqual(expression.evaluate(y=3), 15)
def test_constant_evaluation(self):
self.assertEqual(s.create_variable('x').evaluate(x=42), 42)
self.assertEqual(s.create_constant(5).evaluate(), 5)
def test_create_constant(self):
const = s.create_constant(4.5)
self.assertEqual(str(const), '4.5')
self.assertEqual(const.evaluate(), 4.5)
def test_expressions(self):
four = s.create_constant(4)
y = s.create_variable('y')
f = four/2 + 3.5*(y-2)
d = (3+1j) + y
z = 15 - 2/four * (12 / f)
self.assertEqual(d.evaluate(y=2), 5+1j)
self.assertEqual(f.evaluate(y=2), 2)
self.assertEqual(f.evaluate(y=0), -5)
self.assertEqual(z.evaluate(y=2, x=3), 12.0)
def test_expression_variable_names(self):
x = s.create_variable('x')
y = s.create_variable('y')
z = s.create_variable('z')
f = (z - 2 * (x + 1j)) + 3.5*(y-2)
variables = f.variable_names
expected = ['x', 'y', 'z']
for v in variables:
self.assertTrue(v in expected)
for v in expected:
self.assertTrue(v in variables)
def test_str_method_of_classes(self):
four = s.create_constant(4)
y = s.create_variable('y')
f = four + 3.5*(y-2)
self.assertEqual(str(four), '4')
self.assertEqual(str(y), 'y')
self.assertEqual(str(f), '(4 + (3.5 * (y - 2)))')
if __name__ == '__main__':
unittest.main()
|
Add 02-task sample test v2.
|
Add 02-task sample test v2.
|
Python
|
mit
|
pepincho/Python-Course-FMI
|
Add 02-task sample test v2.
|
import unittest
import solution as s
class SampleTest(unittest.TestCase):
def test_five_plus_three(self):
plus = s.create_operator('+', lambda lhs, rhs: lhs + rhs)
x = s.create_variable('x')
y = s.create_variable('y')
added_expression = s.create_expression((x, plus, y))
self.assertEqual(added_expression.evaluate(x=5, y=3), 8)
def test_operators(self):
y = s.create_variable('y')
twelve = s.create_constant(12)
expression = y + twelve
self.assertEqual(expression.evaluate(y=3), 15)
def test_constant_evaluation(self):
self.assertEqual(s.create_variable('x').evaluate(x=42), 42)
self.assertEqual(s.create_constant(5).evaluate(), 5)
def test_create_constant(self):
const = s.create_constant(4.5)
self.assertEqual(str(const), '4.5')
self.assertEqual(const.evaluate(), 4.5)
def test_expressions(self):
four = s.create_constant(4)
y = s.create_variable('y')
f = four/2 + 3.5*(y-2)
d = (3+1j) + y
z = 15 - 2/four * (12 / f)
self.assertEqual(d.evaluate(y=2), 5+1j)
self.assertEqual(f.evaluate(y=2), 2)
self.assertEqual(f.evaluate(y=0), -5)
self.assertEqual(z.evaluate(y=2, x=3), 12.0)
def test_expression_variable_names(self):
x = s.create_variable('x')
y = s.create_variable('y')
z = s.create_variable('z')
f = (z - 2 * (x + 1j)) + 3.5*(y-2)
variables = f.variable_names
expected = ['x', 'y', 'z']
for v in variables:
self.assertTrue(v in expected)
for v in expected:
self.assertTrue(v in variables)
def test_str_method_of_classes(self):
four = s.create_constant(4)
y = s.create_variable('y')
f = four + 3.5*(y-2)
self.assertEqual(str(four), '4')
self.assertEqual(str(y), 'y')
self.assertEqual(str(f), '(4 + (3.5 * (y - 2)))')
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add 02-task sample test v2.<commit_after>
|
import unittest
import solution as s
class SampleTest(unittest.TestCase):
def test_five_plus_three(self):
plus = s.create_operator('+', lambda lhs, rhs: lhs + rhs)
x = s.create_variable('x')
y = s.create_variable('y')
added_expression = s.create_expression((x, plus, y))
self.assertEqual(added_expression.evaluate(x=5, y=3), 8)
def test_operators(self):
y = s.create_variable('y')
twelve = s.create_constant(12)
expression = y + twelve
self.assertEqual(expression.evaluate(y=3), 15)
def test_constant_evaluation(self):
self.assertEqual(s.create_variable('x').evaluate(x=42), 42)
self.assertEqual(s.create_constant(5).evaluate(), 5)
def test_create_constant(self):
const = s.create_constant(4.5)
self.assertEqual(str(const), '4.5')
self.assertEqual(const.evaluate(), 4.5)
def test_expressions(self):
four = s.create_constant(4)
y = s.create_variable('y')
f = four/2 + 3.5*(y-2)
d = (3+1j) + y
z = 15 - 2/four * (12 / f)
self.assertEqual(d.evaluate(y=2), 5+1j)
self.assertEqual(f.evaluate(y=2), 2)
self.assertEqual(f.evaluate(y=0), -5)
self.assertEqual(z.evaluate(y=2, x=3), 12.0)
def test_expression_variable_names(self):
x = s.create_variable('x')
y = s.create_variable('y')
z = s.create_variable('z')
f = (z - 2 * (x + 1j)) + 3.5*(y-2)
variables = f.variable_names
expected = ['x', 'y', 'z']
for v in variables:
self.assertTrue(v in expected)
for v in expected:
self.assertTrue(v in variables)
def test_str_method_of_classes(self):
four = s.create_constant(4)
y = s.create_variable('y')
f = four + 3.5*(y-2)
self.assertEqual(str(four), '4')
self.assertEqual(str(y), 'y')
self.assertEqual(str(f), '(4 + (3.5 * (y - 2)))')
if __name__ == '__main__':
unittest.main()
|
Add 02-task sample test v2.import unittest
import solution as s
class SampleTest(unittest.TestCase):
def test_five_plus_three(self):
plus = s.create_operator('+', lambda lhs, rhs: lhs + rhs)
x = s.create_variable('x')
y = s.create_variable('y')
added_expression = s.create_expression((x, plus, y))
self.assertEqual(added_expression.evaluate(x=5, y=3), 8)
def test_operators(self):
y = s.create_variable('y')
twelve = s.create_constant(12)
expression = y + twelve
self.assertEqual(expression.evaluate(y=3), 15)
def test_constant_evaluation(self):
self.assertEqual(s.create_variable('x').evaluate(x=42), 42)
self.assertEqual(s.create_constant(5).evaluate(), 5)
def test_create_constant(self):
const = s.create_constant(4.5)
self.assertEqual(str(const), '4.5')
self.assertEqual(const.evaluate(), 4.5)
def test_expressions(self):
four = s.create_constant(4)
y = s.create_variable('y')
f = four/2 + 3.5*(y-2)
d = (3+1j) + y
z = 15 - 2/four * (12 / f)
self.assertEqual(d.evaluate(y=2), 5+1j)
self.assertEqual(f.evaluate(y=2), 2)
self.assertEqual(f.evaluate(y=0), -5)
self.assertEqual(z.evaluate(y=2, x=3), 12.0)
def test_expression_variable_names(self):
x = s.create_variable('x')
y = s.create_variable('y')
z = s.create_variable('z')
f = (z - 2 * (x + 1j)) + 3.5*(y-2)
variables = f.variable_names
expected = ['x', 'y', 'z']
for v in variables:
self.assertTrue(v in expected)
for v in expected:
self.assertTrue(v in variables)
def test_str_method_of_classes(self):
four = s.create_constant(4)
y = s.create_variable('y')
f = four + 3.5*(y-2)
self.assertEqual(str(four), '4')
self.assertEqual(str(y), 'y')
self.assertEqual(str(f), '(4 + (3.5 * (y - 2)))')
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add 02-task sample test v2.<commit_after>import unittest
import solution as s
class SampleTest(unittest.TestCase):
def test_five_plus_three(self):
plus = s.create_operator('+', lambda lhs, rhs: lhs + rhs)
x = s.create_variable('x')
y = s.create_variable('y')
added_expression = s.create_expression((x, plus, y))
self.assertEqual(added_expression.evaluate(x=5, y=3), 8)
def test_operators(self):
y = s.create_variable('y')
twelve = s.create_constant(12)
expression = y + twelve
self.assertEqual(expression.evaluate(y=3), 15)
def test_constant_evaluation(self):
self.assertEqual(s.create_variable('x').evaluate(x=42), 42)
self.assertEqual(s.create_constant(5).evaluate(), 5)
def test_create_constant(self):
const = s.create_constant(4.5)
self.assertEqual(str(const), '4.5')
self.assertEqual(const.evaluate(), 4.5)
def test_expressions(self):
four = s.create_constant(4)
y = s.create_variable('y')
f = four/2 + 3.5*(y-2)
d = (3+1j) + y
z = 15 - 2/four * (12 / f)
self.assertEqual(d.evaluate(y=2), 5+1j)
self.assertEqual(f.evaluate(y=2), 2)
self.assertEqual(f.evaluate(y=0), -5)
self.assertEqual(z.evaluate(y=2, x=3), 12.0)
def test_expression_variable_names(self):
x = s.create_variable('x')
y = s.create_variable('y')
z = s.create_variable('z')
f = (z - 2 * (x + 1j)) + 3.5*(y-2)
variables = f.variable_names
expected = ['x', 'y', 'z']
for v in variables:
self.assertTrue(v in expected)
for v in expected:
self.assertTrue(v in variables)
def test_str_method_of_classes(self):
four = s.create_constant(4)
y = s.create_variable('y')
f = four + 3.5*(y-2)
self.assertEqual(str(four), '4')
self.assertEqual(str(y), 'y')
self.assertEqual(str(f), '(4 + (3.5 * (y - 2)))')
if __name__ == '__main__':
unittest.main()
|
|
ce5a657c4a8205879de8d46fe1c4ea2966808879
|
tests/test_api.py
|
tests/test_api.py
|
import os
import sys
import json
import responses
import unittest
CWD = os.path.dirname(os.path.abspath(__file__))
MS_WD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Allow import of api.py
if os.path.join(MS_WD, 'utils') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'utils'))
# Use multiscanner in ../
sys.path.insert(0, os.path.dirname(CWD))
import multiscanner
import api
HTTP_OK = 200
HTTP_CREATED = 201
class TestURLCase(unittest.TestCase):
def setUp(self):
self.app = api.app.test_client()
def test_index(self):
expected_response = {'Message': 'True'}
resp = self.app.get('/')
self.assertEqual(resp.status_code, HTTP_OK)
self.assertEqual(json.loads(resp.data), expected_response)
|
Add first unit test for api
|
Add first unit test for api
|
Python
|
mpl-2.0
|
jmlong1027/multiscanner,jmlong1027/multiscanner,MITRECND/multiscanner,MITRECND/multiscanner,jmlong1027/multiscanner,mitre/multiscanner,jmlong1027/multiscanner,awest1339/multiscanner,awest1339/multiscanner,mitre/multiscanner,awest1339/multiscanner,awest1339/multiscanner,mitre/multiscanner
|
Add first unit test for api
|
import os
import sys
import json
import responses
import unittest
CWD = os.path.dirname(os.path.abspath(__file__))
MS_WD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Allow import of api.py
if os.path.join(MS_WD, 'utils') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'utils'))
# Use multiscanner in ../
sys.path.insert(0, os.path.dirname(CWD))
import multiscanner
import api
HTTP_OK = 200
HTTP_CREATED = 201
class TestURLCase(unittest.TestCase):
def setUp(self):
self.app = api.app.test_client()
def test_index(self):
expected_response = {'Message': 'True'}
resp = self.app.get('/')
self.assertEqual(resp.status_code, HTTP_OK)
self.assertEqual(json.loads(resp.data), expected_response)
|
<commit_before><commit_msg>Add first unit test for api<commit_after>
|
import os
import sys
import json
import responses
import unittest
CWD = os.path.dirname(os.path.abspath(__file__))
MS_WD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Allow import of api.py
if os.path.join(MS_WD, 'utils') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'utils'))
# Use multiscanner in ../
sys.path.insert(0, os.path.dirname(CWD))
import multiscanner
import api
HTTP_OK = 200
HTTP_CREATED = 201
class TestURLCase(unittest.TestCase):
def setUp(self):
self.app = api.app.test_client()
def test_index(self):
expected_response = {'Message': 'True'}
resp = self.app.get('/')
self.assertEqual(resp.status_code, HTTP_OK)
self.assertEqual(json.loads(resp.data), expected_response)
|
Add first unit test for apiimport os
import sys
import json
import responses
import unittest
CWD = os.path.dirname(os.path.abspath(__file__))
MS_WD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Allow import of api.py
if os.path.join(MS_WD, 'utils') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'utils'))
# Use multiscanner in ../
sys.path.insert(0, os.path.dirname(CWD))
import multiscanner
import api
HTTP_OK = 200
HTTP_CREATED = 201
class TestURLCase(unittest.TestCase):
def setUp(self):
self.app = api.app.test_client()
def test_index(self):
expected_response = {'Message': 'True'}
resp = self.app.get('/')
self.assertEqual(resp.status_code, HTTP_OK)
self.assertEqual(json.loads(resp.data), expected_response)
|
<commit_before><commit_msg>Add first unit test for api<commit_after>import os
import sys
import json
import responses
import unittest
CWD = os.path.dirname(os.path.abspath(__file__))
MS_WD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Allow import of api.py
if os.path.join(MS_WD, 'utils') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'utils'))
# Use multiscanner in ../
sys.path.insert(0, os.path.dirname(CWD))
import multiscanner
import api
HTTP_OK = 200
HTTP_CREATED = 201
class TestURLCase(unittest.TestCase):
def setUp(self):
self.app = api.app.test_client()
def test_index(self):
expected_response = {'Message': 'True'}
resp = self.app.get('/')
self.assertEqual(resp.status_code, HTTP_OK)
self.assertEqual(json.loads(resp.data), expected_response)
|
|
77274f00cf1f4c3b99289fa0c21461e207cdfcb8
|
tests/test_zip.py
|
tests/test_zip.py
|
# -*- coding:utf-8 -*-
import os
import glob
import zipfile
from sigal.gallery import Gallery
from sigal.settings import read_settings
CURRENT_DIR = os.path.dirname(__file__)
SAMPLE_DIR = os.path.join(CURRENT_DIR, 'sample')
SAMPLE_SOURCE = os.path.join(SAMPLE_DIR, 'pictures', 'dir1')
def make_gallery(**kwargs):
default_conf = os.path.join(SAMPLE_DIR, 'sigal.conf.py')
settings = read_settings(default_conf)
settings['source'] = SAMPLE_SOURCE
settings.update(kwargs)
return Gallery(settings)
def test_zipped_correctly(tmpdir):
outpath = str(tmpdir)
gallery = make_gallery(destination=outpath,
zip_gallery='archive.zip')
gallery.build()
zipped1 = glob.glob(os.path.join(outpath, 'test1', '*.zip'))
assert len(zipped1) == 1
assert os.path.basename(zipped1[0]) == 'archive.zip'
zip_file = zipfile.ZipFile(zipped1[0], 'r')
expected = ('11.jpg', 'archlinux-kiss-1024x640.png')
for filename in zip_file.namelist():
assert filename in expected
zip_file.close()
zipped2 = glob.glob(os.path.join(outpath, 'test2', '*.zip'))
assert len(zipped2) == 1
assert os.path.basename(zipped2[0]) == 'archive.zip'
def test_no_archive(tmpdir):
outpath = str(tmpdir)
gallery = make_gallery(destination=outpath,
zip_gallery=False)
gallery.build()
assert not glob.glob(os.path.join(outpath, 'test1', '*.zip'))
assert not glob.glob(os.path.join(outpath, 'test2', '*.zip'))
|
Add test cases for zipping galleries
|
Add test cases for zipping galleries
|
Python
|
mit
|
Ferada/sigal,cbosdo/sigal,xouillet/sigal,elaOnMars/sigal,kontza/sigal,jdn06/sigal,xouillet/sigal,saimn/sigal,cbosdo/sigal,kontza/sigal,jdn06/sigal,jasuarez/sigal,franek/sigal,Ferada/sigal,jdn06/sigal,saimn/sigal,muggenhor/sigal,elaOnMars/sigal,t-animal/sigal,franek/sigal,kontza/sigal,t-animal/sigal,jasuarez/sigal,Ferada/sigal,muggenhor/sigal,t-animal/sigal,jasuarez/sigal,xouillet/sigal,saimn/sigal,cbosdo/sigal
|
Add test cases for zipping galleries
|
# -*- coding:utf-8 -*-
import os
import glob
import zipfile
from sigal.gallery import Gallery
from sigal.settings import read_settings
CURRENT_DIR = os.path.dirname(__file__)
SAMPLE_DIR = os.path.join(CURRENT_DIR, 'sample')
SAMPLE_SOURCE = os.path.join(SAMPLE_DIR, 'pictures', 'dir1')
def make_gallery(**kwargs):
default_conf = os.path.join(SAMPLE_DIR, 'sigal.conf.py')
settings = read_settings(default_conf)
settings['source'] = SAMPLE_SOURCE
settings.update(kwargs)
return Gallery(settings)
def test_zipped_correctly(tmpdir):
outpath = str(tmpdir)
gallery = make_gallery(destination=outpath,
zip_gallery='archive.zip')
gallery.build()
zipped1 = glob.glob(os.path.join(outpath, 'test1', '*.zip'))
assert len(zipped1) == 1
assert os.path.basename(zipped1[0]) == 'archive.zip'
zip_file = zipfile.ZipFile(zipped1[0], 'r')
expected = ('11.jpg', 'archlinux-kiss-1024x640.png')
for filename in zip_file.namelist():
assert filename in expected
zip_file.close()
zipped2 = glob.glob(os.path.join(outpath, 'test2', '*.zip'))
assert len(zipped2) == 1
assert os.path.basename(zipped2[0]) == 'archive.zip'
def test_no_archive(tmpdir):
outpath = str(tmpdir)
gallery = make_gallery(destination=outpath,
zip_gallery=False)
gallery.build()
assert not glob.glob(os.path.join(outpath, 'test1', '*.zip'))
assert not glob.glob(os.path.join(outpath, 'test2', '*.zip'))
|
<commit_before><commit_msg>Add test cases for zipping galleries<commit_after>
|
# -*- coding:utf-8 -*-
import os
import glob
import zipfile
from sigal.gallery import Gallery
from sigal.settings import read_settings
CURRENT_DIR = os.path.dirname(__file__)
SAMPLE_DIR = os.path.join(CURRENT_DIR, 'sample')
SAMPLE_SOURCE = os.path.join(SAMPLE_DIR, 'pictures', 'dir1')
def make_gallery(**kwargs):
default_conf = os.path.join(SAMPLE_DIR, 'sigal.conf.py')
settings = read_settings(default_conf)
settings['source'] = SAMPLE_SOURCE
settings.update(kwargs)
return Gallery(settings)
def test_zipped_correctly(tmpdir):
outpath = str(tmpdir)
gallery = make_gallery(destination=outpath,
zip_gallery='archive.zip')
gallery.build()
zipped1 = glob.glob(os.path.join(outpath, 'test1', '*.zip'))
assert len(zipped1) == 1
assert os.path.basename(zipped1[0]) == 'archive.zip'
zip_file = zipfile.ZipFile(zipped1[0], 'r')
expected = ('11.jpg', 'archlinux-kiss-1024x640.png')
for filename in zip_file.namelist():
assert filename in expected
zip_file.close()
zipped2 = glob.glob(os.path.join(outpath, 'test2', '*.zip'))
assert len(zipped2) == 1
assert os.path.basename(zipped2[0]) == 'archive.zip'
def test_no_archive(tmpdir):
outpath = str(tmpdir)
gallery = make_gallery(destination=outpath,
zip_gallery=False)
gallery.build()
assert not glob.glob(os.path.join(outpath, 'test1', '*.zip'))
assert not glob.glob(os.path.join(outpath, 'test2', '*.zip'))
|
Add test cases for zipping galleries# -*- coding:utf-8 -*-
import os
import glob
import zipfile
from sigal.gallery import Gallery
from sigal.settings import read_settings
CURRENT_DIR = os.path.dirname(__file__)
SAMPLE_DIR = os.path.join(CURRENT_DIR, 'sample')
SAMPLE_SOURCE = os.path.join(SAMPLE_DIR, 'pictures', 'dir1')
def make_gallery(**kwargs):
default_conf = os.path.join(SAMPLE_DIR, 'sigal.conf.py')
settings = read_settings(default_conf)
settings['source'] = SAMPLE_SOURCE
settings.update(kwargs)
return Gallery(settings)
def test_zipped_correctly(tmpdir):
outpath = str(tmpdir)
gallery = make_gallery(destination=outpath,
zip_gallery='archive.zip')
gallery.build()
zipped1 = glob.glob(os.path.join(outpath, 'test1', '*.zip'))
assert len(zipped1) == 1
assert os.path.basename(zipped1[0]) == 'archive.zip'
zip_file = zipfile.ZipFile(zipped1[0], 'r')
expected = ('11.jpg', 'archlinux-kiss-1024x640.png')
for filename in zip_file.namelist():
assert filename in expected
zip_file.close()
zipped2 = glob.glob(os.path.join(outpath, 'test2', '*.zip'))
assert len(zipped2) == 1
assert os.path.basename(zipped2[0]) == 'archive.zip'
def test_no_archive(tmpdir):
outpath = str(tmpdir)
gallery = make_gallery(destination=outpath,
zip_gallery=False)
gallery.build()
assert not glob.glob(os.path.join(outpath, 'test1', '*.zip'))
assert not glob.glob(os.path.join(outpath, 'test2', '*.zip'))
|
<commit_before><commit_msg>Add test cases for zipping galleries<commit_after># -*- coding:utf-8 -*-
import os
import glob
import zipfile
from sigal.gallery import Gallery
from sigal.settings import read_settings
CURRENT_DIR = os.path.dirname(__file__)
SAMPLE_DIR = os.path.join(CURRENT_DIR, 'sample')
SAMPLE_SOURCE = os.path.join(SAMPLE_DIR, 'pictures', 'dir1')
def make_gallery(**kwargs):
default_conf = os.path.join(SAMPLE_DIR, 'sigal.conf.py')
settings = read_settings(default_conf)
settings['source'] = SAMPLE_SOURCE
settings.update(kwargs)
return Gallery(settings)
def test_zipped_correctly(tmpdir):
outpath = str(tmpdir)
gallery = make_gallery(destination=outpath,
zip_gallery='archive.zip')
gallery.build()
zipped1 = glob.glob(os.path.join(outpath, 'test1', '*.zip'))
assert len(zipped1) == 1
assert os.path.basename(zipped1[0]) == 'archive.zip'
zip_file = zipfile.ZipFile(zipped1[0], 'r')
expected = ('11.jpg', 'archlinux-kiss-1024x640.png')
for filename in zip_file.namelist():
assert filename in expected
zip_file.close()
zipped2 = glob.glob(os.path.join(outpath, 'test2', '*.zip'))
assert len(zipped2) == 1
assert os.path.basename(zipped2[0]) == 'archive.zip'
def test_no_archive(tmpdir):
outpath = str(tmpdir)
gallery = make_gallery(destination=outpath,
zip_gallery=False)
gallery.build()
assert not glob.glob(os.path.join(outpath, 'test1', '*.zip'))
assert not glob.glob(os.path.join(outpath, 'test2', '*.zip'))
|
|
c3c5128726e86436a7c7413c60c578410750d2c1
|
oscar/apps/customer/auth_backends.py
|
oscar/apps/customer/auth_backends.py
|
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class Emailbackend(ModelBackend):
def authenticate(self, email=None, password=None, *args, **kwargs):
if email is None:
if not 'username' in kwargs or kwargs['username'] is None:
return None
email = kwargs['username']
# We lowercase the host part as this is what Django does when saving a
# user
local, host = email.split('@')
clean_email = local + '@' + host.lower()
try:
user = User.objects.get(email=clean_email)
except User.DoesNotExist:
return None
if user.check_password(password):
return user
|
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class Emailbackend(ModelBackend):
def authenticate(self, email=None, password=None, *args, **kwargs):
if email is None:
if not 'username' in kwargs or kwargs['username'] is None:
return None
email = kwargs['username']
# Check if we're dealing with an email address
if '@' not in email:
return None
# We lowercase the host part as this is what Django does when saving a
# user
local, host = email.split('@')
clean_email = local + '@' + host.lower()
try:
user = User.objects.get(email=clean_email)
except User.DoesNotExist:
return None
if user.check_password(password):
return user
|
Fix bug with updated auth backend
|
Fix bug with updated auth backend
Now it checks to see if an email is being submitted.
|
Python
|
bsd-3-clause
|
Jannes123/django-oscar,amirrpp/django-oscar,Bogh/django-oscar,michaelkuty/django-oscar,jinnykoo/christmas,pasqualguerrero/django-oscar,rocopartners/django-oscar,pdonadeo/django-oscar,eddiep1101/django-oscar,adamend/django-oscar,jinnykoo/wuyisj,jmt4/django-oscar,jlmadurga/django-oscar,itbabu/django-oscar,jinnykoo/wuyisj,django-oscar/django-oscar,jinnykoo/wuyisj.com,michaelkuty/django-oscar,solarissmoke/django-oscar,josesanch/django-oscar,pdonadeo/django-oscar,ahmetdaglarbas/e-commerce,jinnykoo/wuyisj,ademuk/django-oscar,ahmetdaglarbas/e-commerce,nfletton/django-oscar,jinnykoo/christmas,bschuon/django-oscar,bschuon/django-oscar,jlmadurga/django-oscar,vovanbo/django-oscar,dongguangming/django-oscar,makielab/django-oscar,sonofatailor/django-oscar,taedori81/django-oscar,thechampanurag/django-oscar,WadeYuChen/django-oscar,binarydud/django-oscar,okfish/django-oscar,jlmadurga/django-oscar,anentropic/django-oscar,vovanbo/django-oscar,marcoantoniooliveira/labweb,django-oscar/django-oscar,lijoantony/django-oscar,pasqualguerrero/django-oscar,nfletton/django-oscar,kapari/django-oscar,jlmadurga/django-oscar,jmt4/django-oscar,bnprk/django-oscar,anentropic/django-oscar,saadatqadri/django-oscar,monikasulik/django-oscar,QLGu/django-oscar,Jannes123/django-oscar,itbabu/django-oscar,solarissmoke/django-oscar,WillisXChen/django-oscar,WillisXChen/django-oscar,DrOctogon/unwash_ecom,pasqualguerrero/django-oscar,sonofatailor/django-oscar,sasha0/django-oscar,manevant/django-oscar,nickpack/django-oscar,WadeYuChen/django-oscar,anentropic/django-oscar,Idematica/django-oscar,MatthewWilkes/django-oscar,QLGu/django-oscar,okfish/django-oscar,manevant/django-oscar,amirrpp/django-oscar,pasqualguerrero/django-oscar,faratro/django-oscar,sasha0/django-oscar,john-parton/django-oscar,sasha0/django-oscar,eddiep1101/django-oscar,QLGu/django-oscar,taedori81/django-oscar,taedori81/django-oscar,spartonia/django-oscar,jinnykoo/wuyisj.com,monikasulik/django-oscar,manevant/django-oscar,pdonadeo/django-oscar,dongguangming/django-oscar,ka7eh/django-oscar,Jannes123/django-oscar,bnprk/django-oscar,marcoantoniooliveira/labweb,binarydud/django-oscar,spartonia/django-oscar,machtfit/django-oscar,mexeniz/django-oscar,machtfit/django-oscar,nickpack/django-oscar,jmt4/django-oscar,kapt/django-oscar,adamend/django-oscar,ka7eh/django-oscar,john-parton/django-oscar,django-oscar/django-oscar,DrOctogon/unwash_ecom,spartonia/django-oscar,kapt/django-oscar,pdonadeo/django-oscar,marcoantoniooliveira/labweb,lijoantony/django-oscar,bnprk/django-oscar,solarissmoke/django-oscar,amirrpp/django-oscar,Idematica/django-oscar,sasha0/django-oscar,solarissmoke/django-oscar,itbabu/django-oscar,binarydud/django-oscar,kapt/django-oscar,ka7eh/django-oscar,vovanbo/django-oscar,jinnykoo/wuyisj.com,saadatqadri/django-oscar,sonofatailor/django-oscar,nickpack/django-oscar,DrOctogon/unwash_ecom,faratro/django-oscar,kapari/django-oscar,adamend/django-oscar,elliotthill/django-oscar,saadatqadri/django-oscar,rocopartners/django-oscar,josesanch/django-oscar,john-parton/django-oscar,elliotthill/django-oscar,marcoantoniooliveira/labweb,ademuk/django-oscar,machtfit/django-oscar,WillisXChen/django-oscar,mexeniz/django-oscar,Bogh/django-oscar,jinnykoo/wuyisj,lijoantony/django-oscar,saadatqadri/django-oscar,okfish/django-oscar,binarydud/django-oscar,itbabu/django-oscar,QLGu/django-oscar,taedori81/django-oscar,jinnykoo/christmas,monikasulik/django-oscar,mexeniz/django-oscar,Idematica/django-oscar,sonofatailor/django-oscar,makielab/django-oscar,eddiep1101/django-oscar,Bogh/django-oscar,ahmetdaglarbas/e-commerce,josesanch/django-oscar,mexeniz/django-oscar,MatthewWilkes/django-oscar,michaelkuty/django-oscar,dongguangming/django-oscar,Jannes123/django-oscar,spartonia/django-oscar,rocopartners/django-oscar,elliotthill/django-oscar,faratro/django-oscar,makielab/django-oscar,kapari/django-oscar,vovanbo/django-oscar,WadeYuChen/django-oscar,lijoantony/django-oscar,WillisXChen/django-oscar,bnprk/django-oscar,john-parton/django-oscar,MatthewWilkes/django-oscar,anentropic/django-oscar,ademuk/django-oscar,MatthewWilkes/django-oscar,jinnykoo/wuyisj.com,ka7eh/django-oscar,Bogh/django-oscar,thechampanurag/django-oscar,ademuk/django-oscar,manevant/django-oscar,faratro/django-oscar,WillisXChen/django-oscar,nickpack/django-oscar,makielab/django-oscar,WadeYuChen/django-oscar,nfletton/django-oscar,dongguangming/django-oscar,kapari/django-oscar,django-oscar/django-oscar,jmt4/django-oscar,bschuon/django-oscar,rocopartners/django-oscar,okfish/django-oscar,bschuon/django-oscar,michaelkuty/django-oscar,ahmetdaglarbas/e-commerce,monikasulik/django-oscar,nfletton/django-oscar,adamend/django-oscar,thechampanurag/django-oscar,eddiep1101/django-oscar,thechampanurag/django-oscar,WillisXChen/django-oscar,amirrpp/django-oscar
|
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class Emailbackend(ModelBackend):
def authenticate(self, email=None, password=None, *args, **kwargs):
if email is None:
if not 'username' in kwargs or kwargs['username'] is None:
return None
email = kwargs['username']
# We lowercase the host part as this is what Django does when saving a
# user
local, host = email.split('@')
clean_email = local + '@' + host.lower()
try:
user = User.objects.get(email=clean_email)
except User.DoesNotExist:
return None
if user.check_password(password):
return user
Fix bug with updated auth backend
Now it checks to see if an email is being submitted.
|
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class Emailbackend(ModelBackend):
def authenticate(self, email=None, password=None, *args, **kwargs):
if email is None:
if not 'username' in kwargs or kwargs['username'] is None:
return None
email = kwargs['username']
# Check if we're dealing with an email address
if '@' not in email:
return None
# We lowercase the host part as this is what Django does when saving a
# user
local, host = email.split('@')
clean_email = local + '@' + host.lower()
try:
user = User.objects.get(email=clean_email)
except User.DoesNotExist:
return None
if user.check_password(password):
return user
|
<commit_before>from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class Emailbackend(ModelBackend):
def authenticate(self, email=None, password=None, *args, **kwargs):
if email is None:
if not 'username' in kwargs or kwargs['username'] is None:
return None
email = kwargs['username']
# We lowercase the host part as this is what Django does when saving a
# user
local, host = email.split('@')
clean_email = local + '@' + host.lower()
try:
user = User.objects.get(email=clean_email)
except User.DoesNotExist:
return None
if user.check_password(password):
return user
<commit_msg>Fix bug with updated auth backend
Now it checks to see if an email is being submitted.<commit_after>
|
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class Emailbackend(ModelBackend):
def authenticate(self, email=None, password=None, *args, **kwargs):
if email is None:
if not 'username' in kwargs or kwargs['username'] is None:
return None
email = kwargs['username']
# Check if we're dealing with an email address
if '@' not in email:
return None
# We lowercase the host part as this is what Django does when saving a
# user
local, host = email.split('@')
clean_email = local + '@' + host.lower()
try:
user = User.objects.get(email=clean_email)
except User.DoesNotExist:
return None
if user.check_password(password):
return user
|
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class Emailbackend(ModelBackend):
def authenticate(self, email=None, password=None, *args, **kwargs):
if email is None:
if not 'username' in kwargs or kwargs['username'] is None:
return None
email = kwargs['username']
# We lowercase the host part as this is what Django does when saving a
# user
local, host = email.split('@')
clean_email = local + '@' + host.lower()
try:
user = User.objects.get(email=clean_email)
except User.DoesNotExist:
return None
if user.check_password(password):
return user
Fix bug with updated auth backend
Now it checks to see if an email is being submitted.from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class Emailbackend(ModelBackend):
def authenticate(self, email=None, password=None, *args, **kwargs):
if email is None:
if not 'username' in kwargs or kwargs['username'] is None:
return None
email = kwargs['username']
# Check if we're dealing with an email address
if '@' not in email:
return None
# We lowercase the host part as this is what Django does when saving a
# user
local, host = email.split('@')
clean_email = local + '@' + host.lower()
try:
user = User.objects.get(email=clean_email)
except User.DoesNotExist:
return None
if user.check_password(password):
return user
|
<commit_before>from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class Emailbackend(ModelBackend):
def authenticate(self, email=None, password=None, *args, **kwargs):
if email is None:
if not 'username' in kwargs or kwargs['username'] is None:
return None
email = kwargs['username']
# We lowercase the host part as this is what Django does when saving a
# user
local, host = email.split('@')
clean_email = local + '@' + host.lower()
try:
user = User.objects.get(email=clean_email)
except User.DoesNotExist:
return None
if user.check_password(password):
return user
<commit_msg>Fix bug with updated auth backend
Now it checks to see if an email is being submitted.<commit_after>from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class Emailbackend(ModelBackend):
def authenticate(self, email=None, password=None, *args, **kwargs):
if email is None:
if not 'username' in kwargs or kwargs['username'] is None:
return None
email = kwargs['username']
# Check if we're dealing with an email address
if '@' not in email:
return None
# We lowercase the host part as this is what Django does when saving a
# user
local, host = email.split('@')
clean_email = local + '@' + host.lower()
try:
user = User.objects.get(email=clean_email)
except User.DoesNotExist:
return None
if user.check_password(password):
return user
|
63d2d3cd16cd4f7f2bedb748f84fefa4e6c32661
|
ET_Utils/WATERS_Utils/event_indexing_client.py
|
ET_Utils/WATERS_Utils/event_indexing_client.py
|
"""Wrapper with convenience functions for EPA WATERS Event Indexing Service."""
import json
try:
from urllib2 import urlopen # Python 2
except:
from urllib.request import urlopen # Python 3
_base_url = 'http://ofmpub.epa.gov/waters10/EventIndexing.Service?'
def trace_downstream(start_latitude, start_longitude, max_length_km):
"""Performs Downstream Mainline Indexing."""
# Query EPA
pBatchResolution = '3' # 3 = medium-res (NHDPlus), 2 = hi-res
pFlowDistance = length_km
pInputGeometry = 'POINT({0}%20{1})'.format(start_longitude, start_latitude)
url = (_base_url + 'pBatchResolution=' + pBatchResolution +
'&pIndexingType=downstream_main' + '&optOut2D=TRUE' +
'&pFlowDistance=' + str(length_km) +
'&pInputGeometry=' + pInputGeometry)
response = urlopen(url)
data = response.read()
json_response = json.loads(data)
# todo: handle errors or no results found
# Build GeoJSON from the response
features = []
for line in json_response['output']['line_events']:
feature = {'type': 'Feature',
'geometry': line['shape'],
'properties': {
'reachcode': line['reachcode'],
'fmeasure': line['fmeasure'],
'tmeasure': line['tmeasure']}}
features.append(feature)
geojson = {'type': 'FeatureCollection',
'features': features}
return geojson
if __name__ == '__main__':
from pprint import pprint
lon, lat = (-97.782752, 30.397861) # Bull Creek in zip 78759
length_km = 5
result = trace_downstream(lat, lon, length_km)
print('Line event count: {}'.format(len(result['features'])))
with open('bull_creek.json', 'wb') as f:
f.write(json.dumps(result))
|
Add event indexing service client
|
Add event indexing service client
Add script that serves as client to EPA Event Indexing Service. The
script has a convenience function for querying EPA with minimal
arguments and returns GeoJSON.
|
Python
|
mit
|
Harefoot/TurboQUALTX
|
Add event indexing service client
Add script that serves as client to EPA Event Indexing Service. The
script has a convenience function for querying EPA with minimal
arguments and returns GeoJSON.
|
"""Wrapper with convenience functions for EPA WATERS Event Indexing Service."""
import json
try:
from urllib2 import urlopen # Python 2
except:
from urllib.request import urlopen # Python 3
_base_url = 'http://ofmpub.epa.gov/waters10/EventIndexing.Service?'
def trace_downstream(start_latitude, start_longitude, max_length_km):
"""Performs Downstream Mainline Indexing."""
# Query EPA
pBatchResolution = '3' # 3 = medium-res (NHDPlus), 2 = hi-res
pFlowDistance = length_km
pInputGeometry = 'POINT({0}%20{1})'.format(start_longitude, start_latitude)
url = (_base_url + 'pBatchResolution=' + pBatchResolution +
'&pIndexingType=downstream_main' + '&optOut2D=TRUE' +
'&pFlowDistance=' + str(length_km) +
'&pInputGeometry=' + pInputGeometry)
response = urlopen(url)
data = response.read()
json_response = json.loads(data)
# todo: handle errors or no results found
# Build GeoJSON from the response
features = []
for line in json_response['output']['line_events']:
feature = {'type': 'Feature',
'geometry': line['shape'],
'properties': {
'reachcode': line['reachcode'],
'fmeasure': line['fmeasure'],
'tmeasure': line['tmeasure']}}
features.append(feature)
geojson = {'type': 'FeatureCollection',
'features': features}
return geojson
if __name__ == '__main__':
from pprint import pprint
lon, lat = (-97.782752, 30.397861) # Bull Creek in zip 78759
length_km = 5
result = trace_downstream(lat, lon, length_km)
print('Line event count: {}'.format(len(result['features'])))
with open('bull_creek.json', 'wb') as f:
f.write(json.dumps(result))
|
<commit_before><commit_msg>Add event indexing service client
Add script that serves as client to EPA Event Indexing Service. The
script has a convenience function for querying EPA with minimal
arguments and returns GeoJSON.<commit_after>
|
"""Wrapper with convenience functions for EPA WATERS Event Indexing Service."""
import json
try:
from urllib2 import urlopen # Python 2
except:
from urllib.request import urlopen # Python 3
_base_url = 'http://ofmpub.epa.gov/waters10/EventIndexing.Service?'
def trace_downstream(start_latitude, start_longitude, max_length_km):
"""Performs Downstream Mainline Indexing."""
# Query EPA
pBatchResolution = '3' # 3 = medium-res (NHDPlus), 2 = hi-res
pFlowDistance = length_km
pInputGeometry = 'POINT({0}%20{1})'.format(start_longitude, start_latitude)
url = (_base_url + 'pBatchResolution=' + pBatchResolution +
'&pIndexingType=downstream_main' + '&optOut2D=TRUE' +
'&pFlowDistance=' + str(length_km) +
'&pInputGeometry=' + pInputGeometry)
response = urlopen(url)
data = response.read()
json_response = json.loads(data)
# todo: handle errors or no results found
# Build GeoJSON from the response
features = []
for line in json_response['output']['line_events']:
feature = {'type': 'Feature',
'geometry': line['shape'],
'properties': {
'reachcode': line['reachcode'],
'fmeasure': line['fmeasure'],
'tmeasure': line['tmeasure']}}
features.append(feature)
geojson = {'type': 'FeatureCollection',
'features': features}
return geojson
if __name__ == '__main__':
from pprint import pprint
lon, lat = (-97.782752, 30.397861) # Bull Creek in zip 78759
length_km = 5
result = trace_downstream(lat, lon, length_km)
print('Line event count: {}'.format(len(result['features'])))
with open('bull_creek.json', 'wb') as f:
f.write(json.dumps(result))
|
Add event indexing service client
Add script that serves as client to EPA Event Indexing Service. The
script has a convenience function for querying EPA with minimal
arguments and returns GeoJSON."""Wrapper with convenience functions for EPA WATERS Event Indexing Service."""
import json
try:
from urllib2 import urlopen # Python 2
except:
from urllib.request import urlopen # Python 3
_base_url = 'http://ofmpub.epa.gov/waters10/EventIndexing.Service?'
def trace_downstream(start_latitude, start_longitude, max_length_km):
"""Performs Downstream Mainline Indexing."""
# Query EPA
pBatchResolution = '3' # 3 = medium-res (NHDPlus), 2 = hi-res
pFlowDistance = length_km
pInputGeometry = 'POINT({0}%20{1})'.format(start_longitude, start_latitude)
url = (_base_url + 'pBatchResolution=' + pBatchResolution +
'&pIndexingType=downstream_main' + '&optOut2D=TRUE' +
'&pFlowDistance=' + str(length_km) +
'&pInputGeometry=' + pInputGeometry)
response = urlopen(url)
data = response.read()
json_response = json.loads(data)
# todo: handle errors or no results found
# Build GeoJSON from the response
features = []
for line in json_response['output']['line_events']:
feature = {'type': 'Feature',
'geometry': line['shape'],
'properties': {
'reachcode': line['reachcode'],
'fmeasure': line['fmeasure'],
'tmeasure': line['tmeasure']}}
features.append(feature)
geojson = {'type': 'FeatureCollection',
'features': features}
return geojson
if __name__ == '__main__':
from pprint import pprint
lon, lat = (-97.782752, 30.397861) # Bull Creek in zip 78759
length_km = 5
result = trace_downstream(lat, lon, length_km)
print('Line event count: {}'.format(len(result['features'])))
with open('bull_creek.json', 'wb') as f:
f.write(json.dumps(result))
|
<commit_before><commit_msg>Add event indexing service client
Add script that serves as client to EPA Event Indexing Service. The
script has a convenience function for querying EPA with minimal
arguments and returns GeoJSON.<commit_after>"""Wrapper with convenience functions for EPA WATERS Event Indexing Service."""
import json
try:
from urllib2 import urlopen # Python 2
except:
from urllib.request import urlopen # Python 3
_base_url = 'http://ofmpub.epa.gov/waters10/EventIndexing.Service?'
def trace_downstream(start_latitude, start_longitude, max_length_km):
"""Performs Downstream Mainline Indexing."""
# Query EPA
pBatchResolution = '3' # 3 = medium-res (NHDPlus), 2 = hi-res
pFlowDistance = length_km
pInputGeometry = 'POINT({0}%20{1})'.format(start_longitude, start_latitude)
url = (_base_url + 'pBatchResolution=' + pBatchResolution +
'&pIndexingType=downstream_main' + '&optOut2D=TRUE' +
'&pFlowDistance=' + str(length_km) +
'&pInputGeometry=' + pInputGeometry)
response = urlopen(url)
data = response.read()
json_response = json.loads(data)
# todo: handle errors or no results found
# Build GeoJSON from the response
features = []
for line in json_response['output']['line_events']:
feature = {'type': 'Feature',
'geometry': line['shape'],
'properties': {
'reachcode': line['reachcode'],
'fmeasure': line['fmeasure'],
'tmeasure': line['tmeasure']}}
features.append(feature)
geojson = {'type': 'FeatureCollection',
'features': features}
return geojson
if __name__ == '__main__':
from pprint import pprint
lon, lat = (-97.782752, 30.397861) # Bull Creek in zip 78759
length_km = 5
result = trace_downstream(lat, lon, length_km)
print('Line event count: {}'.format(len(result['features'])))
with open('bull_creek.json', 'wb') as f:
f.write(json.dumps(result))
|
|
1dc07f52ea90a50e40d550193f8ac19b5086f208
|
angr/procedures/java_util/random.py
|
angr/procedures/java_util/random.py
|
from ..java import JavaSimProcedure
from angr.engines.soot.values.thisref import SimSootValue_ThisRef
from angr.engines.soot.values.instancefieldref import SimSootValue_InstanceFieldRef
import logging
import claripy
class NextInt(JavaSimProcedure):
__provides__ = (
("java.util.Random", "nextInt(int)"),
)
def run(self, obj, bound):
rand = self.state.solver.BVS('rand', 32)
self.state.solver.add(rand.UGE(0))
self.state.solver.add(rand.ULT(bound))
return rand
|
Add sim procedure for Random.nextInt
|
Add sim procedure for Random.nextInt
|
Python
|
bsd-2-clause
|
iamahuman/angr,iamahuman/angr,angr/angr,angr/angr,iamahuman/angr,schieb/angr,angr/angr,schieb/angr,schieb/angr
|
Add sim procedure for Random.nextInt
|
from ..java import JavaSimProcedure
from angr.engines.soot.values.thisref import SimSootValue_ThisRef
from angr.engines.soot.values.instancefieldref import SimSootValue_InstanceFieldRef
import logging
import claripy
class NextInt(JavaSimProcedure):
__provides__ = (
("java.util.Random", "nextInt(int)"),
)
def run(self, obj, bound):
rand = self.state.solver.BVS('rand', 32)
self.state.solver.add(rand.UGE(0))
self.state.solver.add(rand.ULT(bound))
return rand
|
<commit_before><commit_msg>Add sim procedure for Random.nextInt<commit_after>
|
from ..java import JavaSimProcedure
from angr.engines.soot.values.thisref import SimSootValue_ThisRef
from angr.engines.soot.values.instancefieldref import SimSootValue_InstanceFieldRef
import logging
import claripy
class NextInt(JavaSimProcedure):
__provides__ = (
("java.util.Random", "nextInt(int)"),
)
def run(self, obj, bound):
rand = self.state.solver.BVS('rand', 32)
self.state.solver.add(rand.UGE(0))
self.state.solver.add(rand.ULT(bound))
return rand
|
Add sim procedure for Random.nextIntfrom ..java import JavaSimProcedure
from angr.engines.soot.values.thisref import SimSootValue_ThisRef
from angr.engines.soot.values.instancefieldref import SimSootValue_InstanceFieldRef
import logging
import claripy
class NextInt(JavaSimProcedure):
__provides__ = (
("java.util.Random", "nextInt(int)"),
)
def run(self, obj, bound):
rand = self.state.solver.BVS('rand', 32)
self.state.solver.add(rand.UGE(0))
self.state.solver.add(rand.ULT(bound))
return rand
|
<commit_before><commit_msg>Add sim procedure for Random.nextInt<commit_after>from ..java import JavaSimProcedure
from angr.engines.soot.values.thisref import SimSootValue_ThisRef
from angr.engines.soot.values.instancefieldref import SimSootValue_InstanceFieldRef
import logging
import claripy
class NextInt(JavaSimProcedure):
__provides__ = (
("java.util.Random", "nextInt(int)"),
)
def run(self, obj, bound):
rand = self.state.solver.BVS('rand', 32)
self.state.solver.add(rand.UGE(0))
self.state.solver.add(rand.ULT(bound))
return rand
|
|
8d1b233c336298f2929a176d0bc6c7279dc07853
|
Sketches/JT/Jam/library/trunk/Kamaelia/Apps/Jam/Util/PeerSelector.py
|
Sketches/JT/Jam/library/trunk/Kamaelia/Apps/Jam/Util/PeerSelector.py
|
import sets
import Axon
class PeerSelector(Axon.Component.component):
Inboxes = {"inbox" : "",
"control" : "",
"addPeer" : "",
"peerSet" : ""
}
def __init__(self, localport, localaddress=None):
super(PeerSelector, self).__init__()
self.peers = sets.Set()
self.connectedTo = sets.Set()
if localaddress:
self.local = (localaddress, localport)
else:
self.local = localport
def main(self):
while 1:
if self.dataReady("peerSet"):
data = self.recv("peerSet")
self.peers.update(data)
self.sendConnectMessages()
self.connectedTo.update(self.peers)
if self.dataReady("addPeer"):
data = self.recv("addPeer")
self.peers.add(data)
self.sendConnectMessages()
self.connectedTo.update(self.peers)
if not self.anyReady():
self.pause()
yield 1
def sendConnectMessages(self):
for peer in self.peers.difference(self.connectedTo):
print "Sending connect message - %s:%s" % peer
self.send((peer[0], peer[1], ("Connect", self.local)), "outbox")
|
Add a simple peer selector which automatically connects to any new jam instance it hears about
|
Add a simple peer selector which automatically connects to any new jam instance it hears about
|
Python
|
apache-2.0
|
sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia
|
Add a simple peer selector which automatically connects to any new jam instance it hears about
|
import sets
import Axon
class PeerSelector(Axon.Component.component):
Inboxes = {"inbox" : "",
"control" : "",
"addPeer" : "",
"peerSet" : ""
}
def __init__(self, localport, localaddress=None):
super(PeerSelector, self).__init__()
self.peers = sets.Set()
self.connectedTo = sets.Set()
if localaddress:
self.local = (localaddress, localport)
else:
self.local = localport
def main(self):
while 1:
if self.dataReady("peerSet"):
data = self.recv("peerSet")
self.peers.update(data)
self.sendConnectMessages()
self.connectedTo.update(self.peers)
if self.dataReady("addPeer"):
data = self.recv("addPeer")
self.peers.add(data)
self.sendConnectMessages()
self.connectedTo.update(self.peers)
if not self.anyReady():
self.pause()
yield 1
def sendConnectMessages(self):
for peer in self.peers.difference(self.connectedTo):
print "Sending connect message - %s:%s" % peer
self.send((peer[0], peer[1], ("Connect", self.local)), "outbox")
|
<commit_before><commit_msg>Add a simple peer selector which automatically connects to any new jam instance it hears about<commit_after>
|
import sets
import Axon
class PeerSelector(Axon.Component.component):
Inboxes = {"inbox" : "",
"control" : "",
"addPeer" : "",
"peerSet" : ""
}
def __init__(self, localport, localaddress=None):
super(PeerSelector, self).__init__()
self.peers = sets.Set()
self.connectedTo = sets.Set()
if localaddress:
self.local = (localaddress, localport)
else:
self.local = localport
def main(self):
while 1:
if self.dataReady("peerSet"):
data = self.recv("peerSet")
self.peers.update(data)
self.sendConnectMessages()
self.connectedTo.update(self.peers)
if self.dataReady("addPeer"):
data = self.recv("addPeer")
self.peers.add(data)
self.sendConnectMessages()
self.connectedTo.update(self.peers)
if not self.anyReady():
self.pause()
yield 1
def sendConnectMessages(self):
for peer in self.peers.difference(self.connectedTo):
print "Sending connect message - %s:%s" % peer
self.send((peer[0], peer[1], ("Connect", self.local)), "outbox")
|
Add a simple peer selector which automatically connects to any new jam instance it hears aboutimport sets
import Axon
class PeerSelector(Axon.Component.component):
Inboxes = {"inbox" : "",
"control" : "",
"addPeer" : "",
"peerSet" : ""
}
def __init__(self, localport, localaddress=None):
super(PeerSelector, self).__init__()
self.peers = sets.Set()
self.connectedTo = sets.Set()
if localaddress:
self.local = (localaddress, localport)
else:
self.local = localport
def main(self):
while 1:
if self.dataReady("peerSet"):
data = self.recv("peerSet")
self.peers.update(data)
self.sendConnectMessages()
self.connectedTo.update(self.peers)
if self.dataReady("addPeer"):
data = self.recv("addPeer")
self.peers.add(data)
self.sendConnectMessages()
self.connectedTo.update(self.peers)
if not self.anyReady():
self.pause()
yield 1
def sendConnectMessages(self):
for peer in self.peers.difference(self.connectedTo):
print "Sending connect message - %s:%s" % peer
self.send((peer[0], peer[1], ("Connect", self.local)), "outbox")
|
<commit_before><commit_msg>Add a simple peer selector which automatically connects to any new jam instance it hears about<commit_after>import sets
import Axon
class PeerSelector(Axon.Component.component):
Inboxes = {"inbox" : "",
"control" : "",
"addPeer" : "",
"peerSet" : ""
}
def __init__(self, localport, localaddress=None):
super(PeerSelector, self).__init__()
self.peers = sets.Set()
self.connectedTo = sets.Set()
if localaddress:
self.local = (localaddress, localport)
else:
self.local = localport
def main(self):
while 1:
if self.dataReady("peerSet"):
data = self.recv("peerSet")
self.peers.update(data)
self.sendConnectMessages()
self.connectedTo.update(self.peers)
if self.dataReady("addPeer"):
data = self.recv("addPeer")
self.peers.add(data)
self.sendConnectMessages()
self.connectedTo.update(self.peers)
if not self.anyReady():
self.pause()
yield 1
def sendConnectMessages(self):
for peer in self.peers.difference(self.connectedTo):
print "Sending connect message - %s:%s" % peer
self.send((peer[0], peer[1], ("Connect", self.local)), "outbox")
|
|
bf15370e98a015fd0a989f7df2be6ba0383fbd78
|
submissions/McLean/myLogic.py
|
submissions/McLean/myLogic.py
|
cats = {
'kb': '''
Name(Felis, Catus)
Name2(Catus, Cat)
Name(Felis, Chaus)
Name2(Chaus, JungleCat)
Name(Panthera, Tigris)
Name2(Tigris, Tiger)
Name(Panthera, Onca)
Name2(Onca, Jaguar)
Name(Panthera, Pardus)
Name2(Pardus, Leopared)
Name(Panthera, Leo)
Name2(Leo, Lion)
Name(Felinae, Lynx)
Name2(Lynx, Lynx)
Name(Acinoyx, Jubatus)
Name2(Jubatus, Cheetah)
Classification(Felidae, Felis)
Classification(Felidae, Panthera)
Classification(Felidae, Felinae)
Classification(Felidae, Acinoyx)
Family(Felidae)
Name(w, x) & Name(x, y) ==> Related(x, y)
Name(w, x) & Name2(x, y) ==> Called(y)
Name(w, x) & Classification(y, w) ==> SameFamily1(y)
Related(w, x) & Related(w, y) ==> SameFamily(x, y)
Name(w, x) & Family(m) ==> TheGenus(w)
Name(g, h) & Family(t) ==> TheSpecies(h)
Name(t, e) & Name(a, s) & SameFamily1(w) ==> TheFamily(w)
''',
'queries':'''
Name(u, i)
Called(c)
TheGenus(j)
TheSpecies(z)
TheFamily(t)
''',
# 'limit': 1,
}
Examples = {
'cats': cats,
}
|
Bring my Github up to date, with latest CSP.
|
Bring my Github up to date, with latest CSP.
|
Python
|
mit
|
JoeLaMartina/AlphametricProject,NolanBecker/aima-python,AmberJBlue/aima-python,JamesDickenson/aima-python,AmberJBlue/aima-python,SimeonFritz/aima-python,jottenlips/aima-python,chandlercr/aima-python,WmHHooper/aima-python,AmberJBlue/aima-python,abbeymiles/aima-python,WhittKinley/aima-python,WmHHooper/aima-python,grantvk/aima-python,SeanCameronConklin/aima-python,JoeLaMartina/aima-python,WhittKinley/aima-python,zayneanderson/aima-python,SeanCameronConklin/aima-python,armadill-odyssey/aima-python,austinban/aima-python,phaller0513/aima-python,willhess/aima-python,JoeLaMartina/aima-python,armadill-odyssey/aima-python,abbeymiles/aima-python,JoeLaMartina/AlphametricProject,NolanBecker/aima-python,JoeLaMartina/aima-python,JamesDickenson/aima-python,SimeonFritz/aima-python,armadill-odyssey/aima-python,AWPorter/aima-python,grantvk/aima-python,chandlercr/aima-python,WhittKinley/aima-python,zayneanderson/aima-python,NolanBecker/aima-python,grantvk/aima-python,WmHHooper/aima-python,willhess/aima-python,phaller0513/aima-python,austinban/aima-python,Fruit-Snacks/aima-python,phaller0513/aima-python,JamesDickenson/aima-python,SimeonFritz/aima-python,SeanCameronConklin/aima-python,willhess/aima-python,abbeymiles/aima-python,jottenlips/aima-python,WmHHooper/aima-python,Fruit-Snacks/aima-python,JoeLaMartina/AlphametricProject,zayneanderson/aima-python,AWPorter/aima-python,chandlercr/aima-python,jottenlips/aima-python,Fruit-Snacks/aima-python,austinban/aima-python,AWPorter/aima-python
|
Bring my Github up to date, with latest CSP.
|
cats = {
'kb': '''
Name(Felis, Catus)
Name2(Catus, Cat)
Name(Felis, Chaus)
Name2(Chaus, JungleCat)
Name(Panthera, Tigris)
Name2(Tigris, Tiger)
Name(Panthera, Onca)
Name2(Onca, Jaguar)
Name(Panthera, Pardus)
Name2(Pardus, Leopared)
Name(Panthera, Leo)
Name2(Leo, Lion)
Name(Felinae, Lynx)
Name2(Lynx, Lynx)
Name(Acinoyx, Jubatus)
Name2(Jubatus, Cheetah)
Classification(Felidae, Felis)
Classification(Felidae, Panthera)
Classification(Felidae, Felinae)
Classification(Felidae, Acinoyx)
Family(Felidae)
Name(w, x) & Name(x, y) ==> Related(x, y)
Name(w, x) & Name2(x, y) ==> Called(y)
Name(w, x) & Classification(y, w) ==> SameFamily1(y)
Related(w, x) & Related(w, y) ==> SameFamily(x, y)
Name(w, x) & Family(m) ==> TheGenus(w)
Name(g, h) & Family(t) ==> TheSpecies(h)
Name(t, e) & Name(a, s) & SameFamily1(w) ==> TheFamily(w)
''',
'queries':'''
Name(u, i)
Called(c)
TheGenus(j)
TheSpecies(z)
TheFamily(t)
''',
# 'limit': 1,
}
Examples = {
'cats': cats,
}
|
<commit_before><commit_msg>Bring my Github up to date, with latest CSP.<commit_after>
|
cats = {
'kb': '''
Name(Felis, Catus)
Name2(Catus, Cat)
Name(Felis, Chaus)
Name2(Chaus, JungleCat)
Name(Panthera, Tigris)
Name2(Tigris, Tiger)
Name(Panthera, Onca)
Name2(Onca, Jaguar)
Name(Panthera, Pardus)
Name2(Pardus, Leopared)
Name(Panthera, Leo)
Name2(Leo, Lion)
Name(Felinae, Lynx)
Name2(Lynx, Lynx)
Name(Acinoyx, Jubatus)
Name2(Jubatus, Cheetah)
Classification(Felidae, Felis)
Classification(Felidae, Panthera)
Classification(Felidae, Felinae)
Classification(Felidae, Acinoyx)
Family(Felidae)
Name(w, x) & Name(x, y) ==> Related(x, y)
Name(w, x) & Name2(x, y) ==> Called(y)
Name(w, x) & Classification(y, w) ==> SameFamily1(y)
Related(w, x) & Related(w, y) ==> SameFamily(x, y)
Name(w, x) & Family(m) ==> TheGenus(w)
Name(g, h) & Family(t) ==> TheSpecies(h)
Name(t, e) & Name(a, s) & SameFamily1(w) ==> TheFamily(w)
''',
'queries':'''
Name(u, i)
Called(c)
TheGenus(j)
TheSpecies(z)
TheFamily(t)
''',
# 'limit': 1,
}
Examples = {
'cats': cats,
}
|
Bring my Github up to date, with latest CSP.cats = {
'kb': '''
Name(Felis, Catus)
Name2(Catus, Cat)
Name(Felis, Chaus)
Name2(Chaus, JungleCat)
Name(Panthera, Tigris)
Name2(Tigris, Tiger)
Name(Panthera, Onca)
Name2(Onca, Jaguar)
Name(Panthera, Pardus)
Name2(Pardus, Leopared)
Name(Panthera, Leo)
Name2(Leo, Lion)
Name(Felinae, Lynx)
Name2(Lynx, Lynx)
Name(Acinoyx, Jubatus)
Name2(Jubatus, Cheetah)
Classification(Felidae, Felis)
Classification(Felidae, Panthera)
Classification(Felidae, Felinae)
Classification(Felidae, Acinoyx)
Family(Felidae)
Name(w, x) & Name(x, y) ==> Related(x, y)
Name(w, x) & Name2(x, y) ==> Called(y)
Name(w, x) & Classification(y, w) ==> SameFamily1(y)
Related(w, x) & Related(w, y) ==> SameFamily(x, y)
Name(w, x) & Family(m) ==> TheGenus(w)
Name(g, h) & Family(t) ==> TheSpecies(h)
Name(t, e) & Name(a, s) & SameFamily1(w) ==> TheFamily(w)
''',
'queries':'''
Name(u, i)
Called(c)
TheGenus(j)
TheSpecies(z)
TheFamily(t)
''',
# 'limit': 1,
}
Examples = {
'cats': cats,
}
|
<commit_before><commit_msg>Bring my Github up to date, with latest CSP.<commit_after>cats = {
'kb': '''
Name(Felis, Catus)
Name2(Catus, Cat)
Name(Felis, Chaus)
Name2(Chaus, JungleCat)
Name(Panthera, Tigris)
Name2(Tigris, Tiger)
Name(Panthera, Onca)
Name2(Onca, Jaguar)
Name(Panthera, Pardus)
Name2(Pardus, Leopared)
Name(Panthera, Leo)
Name2(Leo, Lion)
Name(Felinae, Lynx)
Name2(Lynx, Lynx)
Name(Acinoyx, Jubatus)
Name2(Jubatus, Cheetah)
Classification(Felidae, Felis)
Classification(Felidae, Panthera)
Classification(Felidae, Felinae)
Classification(Felidae, Acinoyx)
Family(Felidae)
Name(w, x) & Name(x, y) ==> Related(x, y)
Name(w, x) & Name2(x, y) ==> Called(y)
Name(w, x) & Classification(y, w) ==> SameFamily1(y)
Related(w, x) & Related(w, y) ==> SameFamily(x, y)
Name(w, x) & Family(m) ==> TheGenus(w)
Name(g, h) & Family(t) ==> TheSpecies(h)
Name(t, e) & Name(a, s) & SameFamily1(w) ==> TheFamily(w)
''',
'queries':'''
Name(u, i)
Called(c)
TheGenus(j)
TheSpecies(z)
TheFamily(t)
''',
# 'limit': 1,
}
Examples = {
'cats': cats,
}
|
|
b27ae2b78d5d21e4a1f5e9fbeba5433e62cd4057
|
source/create_prophage_tbl.py
|
source/create_prophage_tbl.py
|
"""
Create a prophage.tbl file from a phispy directory that does not contain one.
"""
import os
import sys
import argparse
from evaluation import make_prophage_tbl
def make_new_prophage_tbl(phispydir):
"""
Make a new prophage table
:param phispydir: the directory to read the input and create the output
:return: nothing
"""
make_prophage_tbl(os.path.join(phispydir, 'prophage_tbl.txt'), os.path.join(phispydir, 'prophage.tbl'))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="create a prophage.tbl file for a PhiSpy directory")
parser.add_argument('-d', help='phispy directory')
parser.add_argument('-v', help='verbose output', action="store_true")
args = parser.parse_args()
|
Make a new prophage dir
|
Make a new prophage dir
|
Python
|
mit
|
linsalrob/PhiSpy,linsalrob/PhiSpy,linsalrob/PhiSpy,linsalrob/PhiSpy
|
Make a new prophage dir
|
"""
Create a prophage.tbl file from a phispy directory that does not contain one.
"""
import os
import sys
import argparse
from evaluation import make_prophage_tbl
def make_new_prophage_tbl(phispydir):
"""
Make a new prophage table
:param phispydir: the directory to read the input and create the output
:return: nothing
"""
make_prophage_tbl(os.path.join(phispydir, 'prophage_tbl.txt'), os.path.join(phispydir, 'prophage.tbl'))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="create a prophage.tbl file for a PhiSpy directory")
parser.add_argument('-d', help='phispy directory')
parser.add_argument('-v', help='verbose output', action="store_true")
args = parser.parse_args()
|
<commit_before><commit_msg>Make a new prophage dir<commit_after>
|
"""
Create a prophage.tbl file from a phispy directory that does not contain one.
"""
import os
import sys
import argparse
from evaluation import make_prophage_tbl
def make_new_prophage_tbl(phispydir):
"""
Make a new prophage table
:param phispydir: the directory to read the input and create the output
:return: nothing
"""
make_prophage_tbl(os.path.join(phispydir, 'prophage_tbl.txt'), os.path.join(phispydir, 'prophage.tbl'))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="create a prophage.tbl file for a PhiSpy directory")
parser.add_argument('-d', help='phispy directory')
parser.add_argument('-v', help='verbose output', action="store_true")
args = parser.parse_args()
|
Make a new prophage dir"""
Create a prophage.tbl file from a phispy directory that does not contain one.
"""
import os
import sys
import argparse
from evaluation import make_prophage_tbl
def make_new_prophage_tbl(phispydir):
"""
Make a new prophage table
:param phispydir: the directory to read the input and create the output
:return: nothing
"""
make_prophage_tbl(os.path.join(phispydir, 'prophage_tbl.txt'), os.path.join(phispydir, 'prophage.tbl'))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="create a prophage.tbl file for a PhiSpy directory")
parser.add_argument('-d', help='phispy directory')
parser.add_argument('-v', help='verbose output', action="store_true")
args = parser.parse_args()
|
<commit_before><commit_msg>Make a new prophage dir<commit_after>"""
Create a prophage.tbl file from a phispy directory that does not contain one.
"""
import os
import sys
import argparse
from evaluation import make_prophage_tbl
def make_new_prophage_tbl(phispydir):
"""
Make a new prophage table
:param phispydir: the directory to read the input and create the output
:return: nothing
"""
make_prophage_tbl(os.path.join(phispydir, 'prophage_tbl.txt'), os.path.join(phispydir, 'prophage.tbl'))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="create a prophage.tbl file for a PhiSpy directory")
parser.add_argument('-d', help='phispy directory')
parser.add_argument('-v', help='verbose output', action="store_true")
args = parser.parse_args()
|
|
60a27341bb469943271047781a4be8a38b39825a
|
sara_flexbe_states/src/sara_flexbe_states/WonderlandGetPersonById.py
|
sara_flexbe_states/src/sara_flexbe_states/WonderlandGetPersonById.py
|
#!/usr/bin/env python
# encoding=utf8
import json
import requests
from flexbe_core import EventState
from rospy import logerr, loginfo
from sara_msgs.msg import Entity
"""
Created on 15/05/2018
@author: Lucas Maurice
"""
class WonderlandGetPersonById(EventState):
'''
Find a person by ID.
># id int Recognition name of the object
#> entity sara_msgs/Entity
<= done return when one entity exist
<= none return when no entity exist
<= error return when error reading data
'''
def __init__(self):
# See example_state.py for basic explanations.
super(WonderlandGetPersonById, self).__init__(outcomes=['done', 'none', 'error'],
input_keys=['id'], output_keys=['entity'])
def execute(self, userdata):
# Generate URL to contact
url = "http://wonderland:8000/api/people/?peopleId=" + str(userdata.id)
# try the request
try:
response = requests.get(url)
except requests.exceptions.RequestException as e:
logerr(e)
return 'error'
# parse parameter json data
data = json.loads(response.content)
loginfo(data)
if 'peopleId' in data:
userdata.entity = self.generate_entity(data)
return 'done'
else:
return 'none'
@staticmethod
def generate_entity(data):
entity = Entity()
entity.wonderlandId = data['peopleId']
entity.face.id = data['peopleRecognitionId']
if 'peopleColor' in data and data['peopleColor'] is not None:
entity.color = data['peopleColor'].encode('ascii', 'ignore')
if 'peopleName' in data:
entity.aliases.append(data['peopleName'].encode('ascii', 'ignore'))
if 'peoplePose' in data and data['peoplePose'] is not None:
entity.pose = data['peoplePose'].encode('ascii', 'ignore')
entity.poseProbability = data['peopleGenderAccuracy']
if 'peopleGender' in data and data['peopleGender'] is not None:
entity.face.gender = data['peopleGender'].encode('ascii', 'ignore')
entity.face.genderProbability = data['peopleGenderAccuracy']
entity.isOperator = data['peopleIsOperator']
loginfo(entity)
return entity
|
Create the state for get a person from database.
|
Create the state for get a person from database.
|
Python
|
bsd-3-clause
|
WalkingMachine/sara_behaviors,WalkingMachine/sara_behaviors
|
Create the state for get a person from database.
|
#!/usr/bin/env python
# encoding=utf8
import json
import requests
from flexbe_core import EventState
from rospy import logerr, loginfo
from sara_msgs.msg import Entity
"""
Created on 15/05/2018
@author: Lucas Maurice
"""
class WonderlandGetPersonById(EventState):
'''
Find a person by ID.
># id int Recognition name of the object
#> entity sara_msgs/Entity
<= done return when one entity exist
<= none return when no entity exist
<= error return when error reading data
'''
def __init__(self):
# See example_state.py for basic explanations.
super(WonderlandGetPersonById, self).__init__(outcomes=['done', 'none', 'error'],
input_keys=['id'], output_keys=['entity'])
def execute(self, userdata):
# Generate URL to contact
url = "http://wonderland:8000/api/people/?peopleId=" + str(userdata.id)
# try the request
try:
response = requests.get(url)
except requests.exceptions.RequestException as e:
logerr(e)
return 'error'
# parse parameter json data
data = json.loads(response.content)
loginfo(data)
if 'peopleId' in data:
userdata.entity = self.generate_entity(data)
return 'done'
else:
return 'none'
@staticmethod
def generate_entity(data):
entity = Entity()
entity.wonderlandId = data['peopleId']
entity.face.id = data['peopleRecognitionId']
if 'peopleColor' in data and data['peopleColor'] is not None:
entity.color = data['peopleColor'].encode('ascii', 'ignore')
if 'peopleName' in data:
entity.aliases.append(data['peopleName'].encode('ascii', 'ignore'))
if 'peoplePose' in data and data['peoplePose'] is not None:
entity.pose = data['peoplePose'].encode('ascii', 'ignore')
entity.poseProbability = data['peopleGenderAccuracy']
if 'peopleGender' in data and data['peopleGender'] is not None:
entity.face.gender = data['peopleGender'].encode('ascii', 'ignore')
entity.face.genderProbability = data['peopleGenderAccuracy']
entity.isOperator = data['peopleIsOperator']
loginfo(entity)
return entity
|
<commit_before><commit_msg>Create the state for get a person from database.<commit_after>
|
#!/usr/bin/env python
# encoding=utf8
import json
import requests
from flexbe_core import EventState
from rospy import logerr, loginfo
from sara_msgs.msg import Entity
"""
Created on 15/05/2018
@author: Lucas Maurice
"""
class WonderlandGetPersonById(EventState):
'''
Find a person by ID.
># id int Recognition name of the object
#> entity sara_msgs/Entity
<= done return when one entity exist
<= none return when no entity exist
<= error return when error reading data
'''
def __init__(self):
# See example_state.py for basic explanations.
super(WonderlandGetPersonById, self).__init__(outcomes=['done', 'none', 'error'],
input_keys=['id'], output_keys=['entity'])
def execute(self, userdata):
# Generate URL to contact
url = "http://wonderland:8000/api/people/?peopleId=" + str(userdata.id)
# try the request
try:
response = requests.get(url)
except requests.exceptions.RequestException as e:
logerr(e)
return 'error'
# parse parameter json data
data = json.loads(response.content)
loginfo(data)
if 'peopleId' in data:
userdata.entity = self.generate_entity(data)
return 'done'
else:
return 'none'
@staticmethod
def generate_entity(data):
entity = Entity()
entity.wonderlandId = data['peopleId']
entity.face.id = data['peopleRecognitionId']
if 'peopleColor' in data and data['peopleColor'] is not None:
entity.color = data['peopleColor'].encode('ascii', 'ignore')
if 'peopleName' in data:
entity.aliases.append(data['peopleName'].encode('ascii', 'ignore'))
if 'peoplePose' in data and data['peoplePose'] is not None:
entity.pose = data['peoplePose'].encode('ascii', 'ignore')
entity.poseProbability = data['peopleGenderAccuracy']
if 'peopleGender' in data and data['peopleGender'] is not None:
entity.face.gender = data['peopleGender'].encode('ascii', 'ignore')
entity.face.genderProbability = data['peopleGenderAccuracy']
entity.isOperator = data['peopleIsOperator']
loginfo(entity)
return entity
|
Create the state for get a person from database.#!/usr/bin/env python
# encoding=utf8
import json
import requests
from flexbe_core import EventState
from rospy import logerr, loginfo
from sara_msgs.msg import Entity
"""
Created on 15/05/2018
@author: Lucas Maurice
"""
class WonderlandGetPersonById(EventState):
'''
Find a person by ID.
># id int Recognition name of the object
#> entity sara_msgs/Entity
<= done return when one entity exist
<= none return when no entity exist
<= error return when error reading data
'''
def __init__(self):
# See example_state.py for basic explanations.
super(WonderlandGetPersonById, self).__init__(outcomes=['done', 'none', 'error'],
input_keys=['id'], output_keys=['entity'])
def execute(self, userdata):
# Generate URL to contact
url = "http://wonderland:8000/api/people/?peopleId=" + str(userdata.id)
# try the request
try:
response = requests.get(url)
except requests.exceptions.RequestException as e:
logerr(e)
return 'error'
# parse parameter json data
data = json.loads(response.content)
loginfo(data)
if 'peopleId' in data:
userdata.entity = self.generate_entity(data)
return 'done'
else:
return 'none'
@staticmethod
def generate_entity(data):
entity = Entity()
entity.wonderlandId = data['peopleId']
entity.face.id = data['peopleRecognitionId']
if 'peopleColor' in data and data['peopleColor'] is not None:
entity.color = data['peopleColor'].encode('ascii', 'ignore')
if 'peopleName' in data:
entity.aliases.append(data['peopleName'].encode('ascii', 'ignore'))
if 'peoplePose' in data and data['peoplePose'] is not None:
entity.pose = data['peoplePose'].encode('ascii', 'ignore')
entity.poseProbability = data['peopleGenderAccuracy']
if 'peopleGender' in data and data['peopleGender'] is not None:
entity.face.gender = data['peopleGender'].encode('ascii', 'ignore')
entity.face.genderProbability = data['peopleGenderAccuracy']
entity.isOperator = data['peopleIsOperator']
loginfo(entity)
return entity
|
<commit_before><commit_msg>Create the state for get a person from database.<commit_after>#!/usr/bin/env python
# encoding=utf8
import json
import requests
from flexbe_core import EventState
from rospy import logerr, loginfo
from sara_msgs.msg import Entity
"""
Created on 15/05/2018
@author: Lucas Maurice
"""
class WonderlandGetPersonById(EventState):
'''
Find a person by ID.
># id int Recognition name of the object
#> entity sara_msgs/Entity
<= done return when one entity exist
<= none return when no entity exist
<= error return when error reading data
'''
def __init__(self):
# See example_state.py for basic explanations.
super(WonderlandGetPersonById, self).__init__(outcomes=['done', 'none', 'error'],
input_keys=['id'], output_keys=['entity'])
def execute(self, userdata):
# Generate URL to contact
url = "http://wonderland:8000/api/people/?peopleId=" + str(userdata.id)
# try the request
try:
response = requests.get(url)
except requests.exceptions.RequestException as e:
logerr(e)
return 'error'
# parse parameter json data
data = json.loads(response.content)
loginfo(data)
if 'peopleId' in data:
userdata.entity = self.generate_entity(data)
return 'done'
else:
return 'none'
@staticmethod
def generate_entity(data):
entity = Entity()
entity.wonderlandId = data['peopleId']
entity.face.id = data['peopleRecognitionId']
if 'peopleColor' in data and data['peopleColor'] is not None:
entity.color = data['peopleColor'].encode('ascii', 'ignore')
if 'peopleName' in data:
entity.aliases.append(data['peopleName'].encode('ascii', 'ignore'))
if 'peoplePose' in data and data['peoplePose'] is not None:
entity.pose = data['peoplePose'].encode('ascii', 'ignore')
entity.poseProbability = data['peopleGenderAccuracy']
if 'peopleGender' in data and data['peopleGender'] is not None:
entity.face.gender = data['peopleGender'].encode('ascii', 'ignore')
entity.face.genderProbability = data['peopleGenderAccuracy']
entity.isOperator = data['peopleIsOperator']
loginfo(entity)
return entity
|
|
99d6a8d2c1c59dcac90ddd9b3958ed5d74d66a2a
|
tests/integration/test_sns.py
|
tests/integration/test_sns.py
|
import os
from asyncaws import SNS
from tornado.testing import AsyncTestCase, gen_test
from random import randint
aws_key_id = os.environ['AWS_ACCESS_KEY_ID']
aws_key_secret = os.environ['AWS_SECRET_ACCESS_KEY']
aws_region = os.environ['AWS_REGION']
class TestSQS(AsyncTestCase):
@classmethod
def setUpClass(cls):
cls.sns = SNS(aws_key_id, aws_key_secret, aws_region, async=False)
cls.topic_name = "test-topic-%s" % randint(1000, 9999)
cls.topic_arn = cls.sns.create_topic(cls.topic_name)
@classmethod
def tearDownClass(cls):
cls.sns.delete_topic(cls.topic_arn)
@gen_test
def test_topic_actions(self):
self.assertTrue(self.topic_arn.startswith('arn:'))
|
Add integration test for sns
|
Add integration test for sns
|
Python
|
mit
|
MA3STR0/AsyncAWS
|
Add integration test for sns
|
import os
from asyncaws import SNS
from tornado.testing import AsyncTestCase, gen_test
from random import randint
aws_key_id = os.environ['AWS_ACCESS_KEY_ID']
aws_key_secret = os.environ['AWS_SECRET_ACCESS_KEY']
aws_region = os.environ['AWS_REGION']
class TestSQS(AsyncTestCase):
@classmethod
def setUpClass(cls):
cls.sns = SNS(aws_key_id, aws_key_secret, aws_region, async=False)
cls.topic_name = "test-topic-%s" % randint(1000, 9999)
cls.topic_arn = cls.sns.create_topic(cls.topic_name)
@classmethod
def tearDownClass(cls):
cls.sns.delete_topic(cls.topic_arn)
@gen_test
def test_topic_actions(self):
self.assertTrue(self.topic_arn.startswith('arn:'))
|
<commit_before><commit_msg>Add integration test for sns<commit_after>
|
import os
from asyncaws import SNS
from tornado.testing import AsyncTestCase, gen_test
from random import randint
aws_key_id = os.environ['AWS_ACCESS_KEY_ID']
aws_key_secret = os.environ['AWS_SECRET_ACCESS_KEY']
aws_region = os.environ['AWS_REGION']
class TestSQS(AsyncTestCase):
@classmethod
def setUpClass(cls):
cls.sns = SNS(aws_key_id, aws_key_secret, aws_region, async=False)
cls.topic_name = "test-topic-%s" % randint(1000, 9999)
cls.topic_arn = cls.sns.create_topic(cls.topic_name)
@classmethod
def tearDownClass(cls):
cls.sns.delete_topic(cls.topic_arn)
@gen_test
def test_topic_actions(self):
self.assertTrue(self.topic_arn.startswith('arn:'))
|
Add integration test for snsimport os
from asyncaws import SNS
from tornado.testing import AsyncTestCase, gen_test
from random import randint
aws_key_id = os.environ['AWS_ACCESS_KEY_ID']
aws_key_secret = os.environ['AWS_SECRET_ACCESS_KEY']
aws_region = os.environ['AWS_REGION']
class TestSQS(AsyncTestCase):
@classmethod
def setUpClass(cls):
cls.sns = SNS(aws_key_id, aws_key_secret, aws_region, async=False)
cls.topic_name = "test-topic-%s" % randint(1000, 9999)
cls.topic_arn = cls.sns.create_topic(cls.topic_name)
@classmethod
def tearDownClass(cls):
cls.sns.delete_topic(cls.topic_arn)
@gen_test
def test_topic_actions(self):
self.assertTrue(self.topic_arn.startswith('arn:'))
|
<commit_before><commit_msg>Add integration test for sns<commit_after>import os
from asyncaws import SNS
from tornado.testing import AsyncTestCase, gen_test
from random import randint
aws_key_id = os.environ['AWS_ACCESS_KEY_ID']
aws_key_secret = os.environ['AWS_SECRET_ACCESS_KEY']
aws_region = os.environ['AWS_REGION']
class TestSQS(AsyncTestCase):
@classmethod
def setUpClass(cls):
cls.sns = SNS(aws_key_id, aws_key_secret, aws_region, async=False)
cls.topic_name = "test-topic-%s" % randint(1000, 9999)
cls.topic_arn = cls.sns.create_topic(cls.topic_name)
@classmethod
def tearDownClass(cls):
cls.sns.delete_topic(cls.topic_arn)
@gen_test
def test_topic_actions(self):
self.assertTrue(self.topic_arn.startswith('arn:'))
|
|
45685a739efdd215623ca1364f979ed3b5444367
|
tests/test_deploy.py
|
tests/test_deploy.py
|
#!/usr/bin/env python
# coding=utf-8
import unittest
from shub import deploy
from click.testing import CliRunner
from mock import Mock
class DeployTest(unittest.TestCase):
def setUp(self):
self.runner = CliRunner()
def test_fails_when_deploy_is_invoked_outside_of_a_scrapy_project(self):
# given there's no scrapy.cfg file in the current folder
with self.runner.isolated_filesystem():
# when
result = self.runner.invoke(deploy.cli)
# then
self.assertEqual(1, result.exit_code)
def test_parses_project_cfg_and_uploads_egg(self):
# given
valid_scrapy_cfg = """
[deploy]
username = API_KEY
project = -1
[settings]
default = project.settings
"""
with self.runner.isolated_filesystem():
with open('scrapy.cfg', 'w') as f:
f.write(valid_scrapy_cfg)
deploy.make_deploy_request = Mock()
# when
result = self.runner.invoke(deploy.cli)
# then
self.assertEqual(0, result.exit_code)
if __name__ == '__main__':
unittest.main()
|
Add simple tests for shub deploy
|
Add simple tests for shub deploy
|
Python
|
bsd-3-clause
|
scrapinghub/shub
|
Add simple tests for shub deploy
|
#!/usr/bin/env python
# coding=utf-8
import unittest
from shub import deploy
from click.testing import CliRunner
from mock import Mock
class DeployTest(unittest.TestCase):
def setUp(self):
self.runner = CliRunner()
def test_fails_when_deploy_is_invoked_outside_of_a_scrapy_project(self):
# given there's no scrapy.cfg file in the current folder
with self.runner.isolated_filesystem():
# when
result = self.runner.invoke(deploy.cli)
# then
self.assertEqual(1, result.exit_code)
def test_parses_project_cfg_and_uploads_egg(self):
# given
valid_scrapy_cfg = """
[deploy]
username = API_KEY
project = -1
[settings]
default = project.settings
"""
with self.runner.isolated_filesystem():
with open('scrapy.cfg', 'w') as f:
f.write(valid_scrapy_cfg)
deploy.make_deploy_request = Mock()
# when
result = self.runner.invoke(deploy.cli)
# then
self.assertEqual(0, result.exit_code)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add simple tests for shub deploy<commit_after>
|
#!/usr/bin/env python
# coding=utf-8
import unittest
from shub import deploy
from click.testing import CliRunner
from mock import Mock
class DeployTest(unittest.TestCase):
def setUp(self):
self.runner = CliRunner()
def test_fails_when_deploy_is_invoked_outside_of_a_scrapy_project(self):
# given there's no scrapy.cfg file in the current folder
with self.runner.isolated_filesystem():
# when
result = self.runner.invoke(deploy.cli)
# then
self.assertEqual(1, result.exit_code)
def test_parses_project_cfg_and_uploads_egg(self):
# given
valid_scrapy_cfg = """
[deploy]
username = API_KEY
project = -1
[settings]
default = project.settings
"""
with self.runner.isolated_filesystem():
with open('scrapy.cfg', 'w') as f:
f.write(valid_scrapy_cfg)
deploy.make_deploy_request = Mock()
# when
result = self.runner.invoke(deploy.cli)
# then
self.assertEqual(0, result.exit_code)
if __name__ == '__main__':
unittest.main()
|
Add simple tests for shub deploy#!/usr/bin/env python
# coding=utf-8
import unittest
from shub import deploy
from click.testing import CliRunner
from mock import Mock
class DeployTest(unittest.TestCase):
def setUp(self):
self.runner = CliRunner()
def test_fails_when_deploy_is_invoked_outside_of_a_scrapy_project(self):
# given there's no scrapy.cfg file in the current folder
with self.runner.isolated_filesystem():
# when
result = self.runner.invoke(deploy.cli)
# then
self.assertEqual(1, result.exit_code)
def test_parses_project_cfg_and_uploads_egg(self):
# given
valid_scrapy_cfg = """
[deploy]
username = API_KEY
project = -1
[settings]
default = project.settings
"""
with self.runner.isolated_filesystem():
with open('scrapy.cfg', 'w') as f:
f.write(valid_scrapy_cfg)
deploy.make_deploy_request = Mock()
# when
result = self.runner.invoke(deploy.cli)
# then
self.assertEqual(0, result.exit_code)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add simple tests for shub deploy<commit_after>#!/usr/bin/env python
# coding=utf-8
import unittest
from shub import deploy
from click.testing import CliRunner
from mock import Mock
class DeployTest(unittest.TestCase):
def setUp(self):
self.runner = CliRunner()
def test_fails_when_deploy_is_invoked_outside_of_a_scrapy_project(self):
# given there's no scrapy.cfg file in the current folder
with self.runner.isolated_filesystem():
# when
result = self.runner.invoke(deploy.cli)
# then
self.assertEqual(1, result.exit_code)
def test_parses_project_cfg_and_uploads_egg(self):
# given
valid_scrapy_cfg = """
[deploy]
username = API_KEY
project = -1
[settings]
default = project.settings
"""
with self.runner.isolated_filesystem():
with open('scrapy.cfg', 'w') as f:
f.write(valid_scrapy_cfg)
deploy.make_deploy_request = Mock()
# when
result = self.runner.invoke(deploy.cli)
# then
self.assertEqual(0, result.exit_code)
if __name__ == '__main__':
unittest.main()
|
|
b90ef0acfec29aaf317dadce7122e97c8cc103c4
|
tests/test_orbits.py
|
tests/test_orbits.py
|
##########################TESTS ON MULTIPLE ORBITS#############################
import numpy
from galpy import potential
# Test that integrating Orbits agrees with integrating multiple Orbit instances
def test_integration_1d():
from galpy.orbit import Orbit, Orbits
times= numpy.linspace(0.,10.,1001)
orbits_list= [Orbit([1.,0.1]),Orbit([0.1,1.]),Orbit([-0.2,0.3])]
orbits= Orbits(orbits_list)
# Integrate as Orbits
orbits.integrate(times,
potential.toVerticalPotential(potential.MWPotential2014,1.))
# Integrate as multiple Orbits
for o in orbits_list:
o.integrate(times,
potential.toVerticalPotential(potential.MWPotential2014,1.))
# Compare
for ii in range(len(orbits)):
assert numpy.amax(numpy.fabs(orbits_list[ii].x(times)-orbits.x(times)[ii])) < 1e-10, 'Integration of multiple orbits as Orbits does not agree with integrating multiple orbits'
assert numpy.amax(numpy.fabs(orbits_list[ii].vx(times)-orbits.vx(times)[ii])) < 1e-10, 'Integration of multiple orbits as Orbits does not agree with integrating multiple orbits'
return None
|
Test that 1D orbit integration agrees between Orbit and Orbits
|
Test that 1D orbit integration agrees between Orbit and Orbits
|
Python
|
bsd-3-clause
|
jobovy/galpy,jobovy/galpy,jobovy/galpy,jobovy/galpy
|
Test that 1D orbit integration agrees between Orbit and Orbits
|
##########################TESTS ON MULTIPLE ORBITS#############################
import numpy
from galpy import potential
# Test that integrating Orbits agrees with integrating multiple Orbit instances
def test_integration_1d():
from galpy.orbit import Orbit, Orbits
times= numpy.linspace(0.,10.,1001)
orbits_list= [Orbit([1.,0.1]),Orbit([0.1,1.]),Orbit([-0.2,0.3])]
orbits= Orbits(orbits_list)
# Integrate as Orbits
orbits.integrate(times,
potential.toVerticalPotential(potential.MWPotential2014,1.))
# Integrate as multiple Orbits
for o in orbits_list:
o.integrate(times,
potential.toVerticalPotential(potential.MWPotential2014,1.))
# Compare
for ii in range(len(orbits)):
assert numpy.amax(numpy.fabs(orbits_list[ii].x(times)-orbits.x(times)[ii])) < 1e-10, 'Integration of multiple orbits as Orbits does not agree with integrating multiple orbits'
assert numpy.amax(numpy.fabs(orbits_list[ii].vx(times)-orbits.vx(times)[ii])) < 1e-10, 'Integration of multiple orbits as Orbits does not agree with integrating multiple orbits'
return None
|
<commit_before><commit_msg>Test that 1D orbit integration agrees between Orbit and Orbits<commit_after>
|
##########################TESTS ON MULTIPLE ORBITS#############################
import numpy
from galpy import potential
# Test that integrating Orbits agrees with integrating multiple Orbit instances
def test_integration_1d():
from galpy.orbit import Orbit, Orbits
times= numpy.linspace(0.,10.,1001)
orbits_list= [Orbit([1.,0.1]),Orbit([0.1,1.]),Orbit([-0.2,0.3])]
orbits= Orbits(orbits_list)
# Integrate as Orbits
orbits.integrate(times,
potential.toVerticalPotential(potential.MWPotential2014,1.))
# Integrate as multiple Orbits
for o in orbits_list:
o.integrate(times,
potential.toVerticalPotential(potential.MWPotential2014,1.))
# Compare
for ii in range(len(orbits)):
assert numpy.amax(numpy.fabs(orbits_list[ii].x(times)-orbits.x(times)[ii])) < 1e-10, 'Integration of multiple orbits as Orbits does not agree with integrating multiple orbits'
assert numpy.amax(numpy.fabs(orbits_list[ii].vx(times)-orbits.vx(times)[ii])) < 1e-10, 'Integration of multiple orbits as Orbits does not agree with integrating multiple orbits'
return None
|
Test that 1D orbit integration agrees between Orbit and Orbits##########################TESTS ON MULTIPLE ORBITS#############################
import numpy
from galpy import potential
# Test that integrating Orbits agrees with integrating multiple Orbit instances
def test_integration_1d():
from galpy.orbit import Orbit, Orbits
times= numpy.linspace(0.,10.,1001)
orbits_list= [Orbit([1.,0.1]),Orbit([0.1,1.]),Orbit([-0.2,0.3])]
orbits= Orbits(orbits_list)
# Integrate as Orbits
orbits.integrate(times,
potential.toVerticalPotential(potential.MWPotential2014,1.))
# Integrate as multiple Orbits
for o in orbits_list:
o.integrate(times,
potential.toVerticalPotential(potential.MWPotential2014,1.))
# Compare
for ii in range(len(orbits)):
assert numpy.amax(numpy.fabs(orbits_list[ii].x(times)-orbits.x(times)[ii])) < 1e-10, 'Integration of multiple orbits as Orbits does not agree with integrating multiple orbits'
assert numpy.amax(numpy.fabs(orbits_list[ii].vx(times)-orbits.vx(times)[ii])) < 1e-10, 'Integration of multiple orbits as Orbits does not agree with integrating multiple orbits'
return None
|
<commit_before><commit_msg>Test that 1D orbit integration agrees between Orbit and Orbits<commit_after>##########################TESTS ON MULTIPLE ORBITS#############################
import numpy
from galpy import potential
# Test that integrating Orbits agrees with integrating multiple Orbit instances
def test_integration_1d():
from galpy.orbit import Orbit, Orbits
times= numpy.linspace(0.,10.,1001)
orbits_list= [Orbit([1.,0.1]),Orbit([0.1,1.]),Orbit([-0.2,0.3])]
orbits= Orbits(orbits_list)
# Integrate as Orbits
orbits.integrate(times,
potential.toVerticalPotential(potential.MWPotential2014,1.))
# Integrate as multiple Orbits
for o in orbits_list:
o.integrate(times,
potential.toVerticalPotential(potential.MWPotential2014,1.))
# Compare
for ii in range(len(orbits)):
assert numpy.amax(numpy.fabs(orbits_list[ii].x(times)-orbits.x(times)[ii])) < 1e-10, 'Integration of multiple orbits as Orbits does not agree with integrating multiple orbits'
assert numpy.amax(numpy.fabs(orbits_list[ii].vx(times)-orbits.vx(times)[ii])) < 1e-10, 'Integration of multiple orbits as Orbits does not agree with integrating multiple orbits'
return None
|
|
876a182de54b2f1fc54cc051646fc04cd400a81c
|
migrations/versions/3be107806e62_make_testartifact_cascade_on_test_delete.py
|
migrations/versions/3be107806e62_make_testartifact_cascade_on_test_delete.py
|
"""Make testartifact cascade on test delete
Revision ID: 3be107806e62
Revises: 3bf1066f4935
Create Date: 2016-07-06 18:42:33.893405
"""
# revision identifiers, used by Alembic.
revision = '3be107806e62'
down_revision = '3bf1066f4935'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_constraint('testartifact_test_id_fkey', 'testartifact')
op.create_foreign_key('testartifact_test_id_fkey', 'testartifact', 'test', ['test_id'], ['id'], ondelete='CASCADE')
def downgrade():
op.drop_constraint('testartifact_test_id_fkey', 'testartifact')
op.create_foreign_key('testartifact_test_id_fkey', 'testartifact', 'test', ['test_id'], ['id'])
|
Add migration to make testartifact actually CASCADE on delete.
|
Add migration to make testartifact actually CASCADE on delete.
Summary:
This is alright if we don't care much about having dangling references
on S3/ArtifactStore (such as if we clean them separately).
Reviewers: paulruan
Reviewed By: paulruan
Subscribers: changesbot, kylec
Differential Revision: https://tails.corp.dropbox.com/D210560
|
Python
|
apache-2.0
|
dropbox/changes,dropbox/changes,dropbox/changes,dropbox/changes
|
Add migration to make testartifact actually CASCADE on delete.
Summary:
This is alright if we don't care much about having dangling references
on S3/ArtifactStore (such as if we clean them separately).
Reviewers: paulruan
Reviewed By: paulruan
Subscribers: changesbot, kylec
Differential Revision: https://tails.corp.dropbox.com/D210560
|
"""Make testartifact cascade on test delete
Revision ID: 3be107806e62
Revises: 3bf1066f4935
Create Date: 2016-07-06 18:42:33.893405
"""
# revision identifiers, used by Alembic.
revision = '3be107806e62'
down_revision = '3bf1066f4935'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_constraint('testartifact_test_id_fkey', 'testartifact')
op.create_foreign_key('testartifact_test_id_fkey', 'testartifact', 'test', ['test_id'], ['id'], ondelete='CASCADE')
def downgrade():
op.drop_constraint('testartifact_test_id_fkey', 'testartifact')
op.create_foreign_key('testartifact_test_id_fkey', 'testartifact', 'test', ['test_id'], ['id'])
|
<commit_before><commit_msg>Add migration to make testartifact actually CASCADE on delete.
Summary:
This is alright if we don't care much about having dangling references
on S3/ArtifactStore (such as if we clean them separately).
Reviewers: paulruan
Reviewed By: paulruan
Subscribers: changesbot, kylec
Differential Revision: https://tails.corp.dropbox.com/D210560<commit_after>
|
"""Make testartifact cascade on test delete
Revision ID: 3be107806e62
Revises: 3bf1066f4935
Create Date: 2016-07-06 18:42:33.893405
"""
# revision identifiers, used by Alembic.
revision = '3be107806e62'
down_revision = '3bf1066f4935'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_constraint('testartifact_test_id_fkey', 'testartifact')
op.create_foreign_key('testartifact_test_id_fkey', 'testartifact', 'test', ['test_id'], ['id'], ondelete='CASCADE')
def downgrade():
op.drop_constraint('testartifact_test_id_fkey', 'testartifact')
op.create_foreign_key('testartifact_test_id_fkey', 'testartifact', 'test', ['test_id'], ['id'])
|
Add migration to make testartifact actually CASCADE on delete.
Summary:
This is alright if we don't care much about having dangling references
on S3/ArtifactStore (such as if we clean them separately).
Reviewers: paulruan
Reviewed By: paulruan
Subscribers: changesbot, kylec
Differential Revision: https://tails.corp.dropbox.com/D210560"""Make testartifact cascade on test delete
Revision ID: 3be107806e62
Revises: 3bf1066f4935
Create Date: 2016-07-06 18:42:33.893405
"""
# revision identifiers, used by Alembic.
revision = '3be107806e62'
down_revision = '3bf1066f4935'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_constraint('testartifact_test_id_fkey', 'testartifact')
op.create_foreign_key('testartifact_test_id_fkey', 'testartifact', 'test', ['test_id'], ['id'], ondelete='CASCADE')
def downgrade():
op.drop_constraint('testartifact_test_id_fkey', 'testartifact')
op.create_foreign_key('testartifact_test_id_fkey', 'testartifact', 'test', ['test_id'], ['id'])
|
<commit_before><commit_msg>Add migration to make testartifact actually CASCADE on delete.
Summary:
This is alright if we don't care much about having dangling references
on S3/ArtifactStore (such as if we clean them separately).
Reviewers: paulruan
Reviewed By: paulruan
Subscribers: changesbot, kylec
Differential Revision: https://tails.corp.dropbox.com/D210560<commit_after>"""Make testartifact cascade on test delete
Revision ID: 3be107806e62
Revises: 3bf1066f4935
Create Date: 2016-07-06 18:42:33.893405
"""
# revision identifiers, used by Alembic.
revision = '3be107806e62'
down_revision = '3bf1066f4935'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_constraint('testartifact_test_id_fkey', 'testartifact')
op.create_foreign_key('testartifact_test_id_fkey', 'testartifact', 'test', ['test_id'], ['id'], ondelete='CASCADE')
def downgrade():
op.drop_constraint('testartifact_test_id_fkey', 'testartifact')
op.create_foreign_key('testartifact_test_id_fkey', 'testartifact', 'test', ['test_id'], ['id'])
|
|
737000499967a9c910e60868227de9a923f0555e
|
FibonacciSeries.py
|
FibonacciSeries.py
|
import sys
def fibonacci(n):
if n == 0:
return 0
elif n == 1:
return 1
else:
return fibonacci(n-1) + fibonacci(n-2)
input_file = sys.argv[1]
with open(input_file, 'r') as test_cases:
for case in test_cases:
number = int(case.rstrip())
print(fibonacci(number))
sys.exit(0)
|
Solve the Fibonacci Series challenge
|
Solve the Fibonacci Series challenge
|
Python
|
mit
|
TommyN94/CodeEvalSolutions,TommyN94/CodeEvalSolutions
|
Solve the Fibonacci Series challenge
|
import sys
def fibonacci(n):
if n == 0:
return 0
elif n == 1:
return 1
else:
return fibonacci(n-1) + fibonacci(n-2)
input_file = sys.argv[1]
with open(input_file, 'r') as test_cases:
for case in test_cases:
number = int(case.rstrip())
print(fibonacci(number))
sys.exit(0)
|
<commit_before><commit_msg>Solve the Fibonacci Series challenge<commit_after>
|
import sys
def fibonacci(n):
if n == 0:
return 0
elif n == 1:
return 1
else:
return fibonacci(n-1) + fibonacci(n-2)
input_file = sys.argv[1]
with open(input_file, 'r') as test_cases:
for case in test_cases:
number = int(case.rstrip())
print(fibonacci(number))
sys.exit(0)
|
Solve the Fibonacci Series challengeimport sys
def fibonacci(n):
if n == 0:
return 0
elif n == 1:
return 1
else:
return fibonacci(n-1) + fibonacci(n-2)
input_file = sys.argv[1]
with open(input_file, 'r') as test_cases:
for case in test_cases:
number = int(case.rstrip())
print(fibonacci(number))
sys.exit(0)
|
<commit_before><commit_msg>Solve the Fibonacci Series challenge<commit_after>import sys
def fibonacci(n):
if n == 0:
return 0
elif n == 1:
return 1
else:
return fibonacci(n-1) + fibonacci(n-2)
input_file = sys.argv[1]
with open(input_file, 'r') as test_cases:
for case in test_cases:
number = int(case.rstrip())
print(fibonacci(number))
sys.exit(0)
|
|
2b2b7101dfe6953da2a482dfcf16f94eba3c08aa
|
master_data/tests/test_models.py
|
master_data/tests/test_models.py
|
from django.test import TestCase
from django.core.exceptions import ValidationError
from master_data.models import Nationality
class NationalityModeTestCases(TestCase):
def setUp(self):
self.nationality, created = Nationality(name='Egyptian')
def test_create_new_nationality(self):
new_nationality = Nationality(name='American')
new_nationality.save()
self.assertTrue(new_nationality.pk)
self.assertTrue(new_nationality.is_active)
self.assertIsInstance(new_nationality, Nationality)
def test_update_nationality(self):
self.nationality.name = 'Egypt'
self.nationality.save()
self.assertTrue(self.nationality.pk)
self.assertNotEqual('Egyptian', self.nationality.name)
def test_delete_nationality(self):
self.nationality.delete()
self.assertFalse(self.nationality.pk)
def test_add_new_nationality_with_duplicate_name(self):
new_nationality = Nationality(name=self.nationality.name)
with self.assertRaises(ValidationError):
new_nationality.save()
def test_deactivate_nationality(self):
self.nationality.is_active = False
self.nationality.save()
self.assertFalse(self.nationality.is_active)
|
Add test for nationality model
|
Add test for nationality model
|
Python
|
mit
|
EmadMokhtar/halaqat,EmadMokhtar/halaqat,EmadMokhtar/halaqat
|
Add test for nationality model
|
from django.test import TestCase
from django.core.exceptions import ValidationError
from master_data.models import Nationality
class NationalityModeTestCases(TestCase):
def setUp(self):
self.nationality, created = Nationality(name='Egyptian')
def test_create_new_nationality(self):
new_nationality = Nationality(name='American')
new_nationality.save()
self.assertTrue(new_nationality.pk)
self.assertTrue(new_nationality.is_active)
self.assertIsInstance(new_nationality, Nationality)
def test_update_nationality(self):
self.nationality.name = 'Egypt'
self.nationality.save()
self.assertTrue(self.nationality.pk)
self.assertNotEqual('Egyptian', self.nationality.name)
def test_delete_nationality(self):
self.nationality.delete()
self.assertFalse(self.nationality.pk)
def test_add_new_nationality_with_duplicate_name(self):
new_nationality = Nationality(name=self.nationality.name)
with self.assertRaises(ValidationError):
new_nationality.save()
def test_deactivate_nationality(self):
self.nationality.is_active = False
self.nationality.save()
self.assertFalse(self.nationality.is_active)
|
<commit_before><commit_msg>Add test for nationality model<commit_after>
|
from django.test import TestCase
from django.core.exceptions import ValidationError
from master_data.models import Nationality
class NationalityModeTestCases(TestCase):
def setUp(self):
self.nationality, created = Nationality(name='Egyptian')
def test_create_new_nationality(self):
new_nationality = Nationality(name='American')
new_nationality.save()
self.assertTrue(new_nationality.pk)
self.assertTrue(new_nationality.is_active)
self.assertIsInstance(new_nationality, Nationality)
def test_update_nationality(self):
self.nationality.name = 'Egypt'
self.nationality.save()
self.assertTrue(self.nationality.pk)
self.assertNotEqual('Egyptian', self.nationality.name)
def test_delete_nationality(self):
self.nationality.delete()
self.assertFalse(self.nationality.pk)
def test_add_new_nationality_with_duplicate_name(self):
new_nationality = Nationality(name=self.nationality.name)
with self.assertRaises(ValidationError):
new_nationality.save()
def test_deactivate_nationality(self):
self.nationality.is_active = False
self.nationality.save()
self.assertFalse(self.nationality.is_active)
|
Add test for nationality modelfrom django.test import TestCase
from django.core.exceptions import ValidationError
from master_data.models import Nationality
class NationalityModeTestCases(TestCase):
def setUp(self):
self.nationality, created = Nationality(name='Egyptian')
def test_create_new_nationality(self):
new_nationality = Nationality(name='American')
new_nationality.save()
self.assertTrue(new_nationality.pk)
self.assertTrue(new_nationality.is_active)
self.assertIsInstance(new_nationality, Nationality)
def test_update_nationality(self):
self.nationality.name = 'Egypt'
self.nationality.save()
self.assertTrue(self.nationality.pk)
self.assertNotEqual('Egyptian', self.nationality.name)
def test_delete_nationality(self):
self.nationality.delete()
self.assertFalse(self.nationality.pk)
def test_add_new_nationality_with_duplicate_name(self):
new_nationality = Nationality(name=self.nationality.name)
with self.assertRaises(ValidationError):
new_nationality.save()
def test_deactivate_nationality(self):
self.nationality.is_active = False
self.nationality.save()
self.assertFalse(self.nationality.is_active)
|
<commit_before><commit_msg>Add test for nationality model<commit_after>from django.test import TestCase
from django.core.exceptions import ValidationError
from master_data.models import Nationality
class NationalityModeTestCases(TestCase):
def setUp(self):
self.nationality, created = Nationality(name='Egyptian')
def test_create_new_nationality(self):
new_nationality = Nationality(name='American')
new_nationality.save()
self.assertTrue(new_nationality.pk)
self.assertTrue(new_nationality.is_active)
self.assertIsInstance(new_nationality, Nationality)
def test_update_nationality(self):
self.nationality.name = 'Egypt'
self.nationality.save()
self.assertTrue(self.nationality.pk)
self.assertNotEqual('Egyptian', self.nationality.name)
def test_delete_nationality(self):
self.nationality.delete()
self.assertFalse(self.nationality.pk)
def test_add_new_nationality_with_duplicate_name(self):
new_nationality = Nationality(name=self.nationality.name)
with self.assertRaises(ValidationError):
new_nationality.save()
def test_deactivate_nationality(self):
self.nationality.is_active = False
self.nationality.save()
self.assertFalse(self.nationality.is_active)
|
|
27ee10a1a44dee6e4efce6b8187d4cb6dd448e7b
|
tests/compiler/method_definition.py
|
tests/compiler/method_definition.py
|
import pytest
from tests.compiler import compile_base, internal_call
from thinglang.compiler.errors import SelfInStaticMethod
from thinglang.compiler.opcodes import OpcodePushStatic, OpcodePushLocal
SELF_USE_IN_STATIC_METHOD = '''
thing Program
has number n1
static does something
{}
'''
def test_direct_self_use_in_static_function():
with pytest.raises(SelfInStaticMethod):
compile_base(SELF_USE_IN_STATIC_METHOD.format('return self'))
with pytest.raises(SelfInStaticMethod):
compile_base(SELF_USE_IN_STATIC_METHOD.format('Console.print(self)'))
def test_self_dereference_in_static_function():
with pytest.raises(SelfInStaticMethod):
compile_base(SELF_USE_IN_STATIC_METHOD.format('self.n1'))
|
Add test for self usage in static methods
|
Add test for self usage in static methods
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
Add test for self usage in static methods
|
import pytest
from tests.compiler import compile_base, internal_call
from thinglang.compiler.errors import SelfInStaticMethod
from thinglang.compiler.opcodes import OpcodePushStatic, OpcodePushLocal
SELF_USE_IN_STATIC_METHOD = '''
thing Program
has number n1
static does something
{}
'''
def test_direct_self_use_in_static_function():
with pytest.raises(SelfInStaticMethod):
compile_base(SELF_USE_IN_STATIC_METHOD.format('return self'))
with pytest.raises(SelfInStaticMethod):
compile_base(SELF_USE_IN_STATIC_METHOD.format('Console.print(self)'))
def test_self_dereference_in_static_function():
with pytest.raises(SelfInStaticMethod):
compile_base(SELF_USE_IN_STATIC_METHOD.format('self.n1'))
|
<commit_before><commit_msg>Add test for self usage in static methods<commit_after>
|
import pytest
from tests.compiler import compile_base, internal_call
from thinglang.compiler.errors import SelfInStaticMethod
from thinglang.compiler.opcodes import OpcodePushStatic, OpcodePushLocal
SELF_USE_IN_STATIC_METHOD = '''
thing Program
has number n1
static does something
{}
'''
def test_direct_self_use_in_static_function():
with pytest.raises(SelfInStaticMethod):
compile_base(SELF_USE_IN_STATIC_METHOD.format('return self'))
with pytest.raises(SelfInStaticMethod):
compile_base(SELF_USE_IN_STATIC_METHOD.format('Console.print(self)'))
def test_self_dereference_in_static_function():
with pytest.raises(SelfInStaticMethod):
compile_base(SELF_USE_IN_STATIC_METHOD.format('self.n1'))
|
Add test for self usage in static methodsimport pytest
from tests.compiler import compile_base, internal_call
from thinglang.compiler.errors import SelfInStaticMethod
from thinglang.compiler.opcodes import OpcodePushStatic, OpcodePushLocal
SELF_USE_IN_STATIC_METHOD = '''
thing Program
has number n1
static does something
{}
'''
def test_direct_self_use_in_static_function():
with pytest.raises(SelfInStaticMethod):
compile_base(SELF_USE_IN_STATIC_METHOD.format('return self'))
with pytest.raises(SelfInStaticMethod):
compile_base(SELF_USE_IN_STATIC_METHOD.format('Console.print(self)'))
def test_self_dereference_in_static_function():
with pytest.raises(SelfInStaticMethod):
compile_base(SELF_USE_IN_STATIC_METHOD.format('self.n1'))
|
<commit_before><commit_msg>Add test for self usage in static methods<commit_after>import pytest
from tests.compiler import compile_base, internal_call
from thinglang.compiler.errors import SelfInStaticMethod
from thinglang.compiler.opcodes import OpcodePushStatic, OpcodePushLocal
SELF_USE_IN_STATIC_METHOD = '''
thing Program
has number n1
static does something
{}
'''
def test_direct_self_use_in_static_function():
with pytest.raises(SelfInStaticMethod):
compile_base(SELF_USE_IN_STATIC_METHOD.format('return self'))
with pytest.raises(SelfInStaticMethod):
compile_base(SELF_USE_IN_STATIC_METHOD.format('Console.print(self)'))
def test_self_dereference_in_static_function():
with pytest.raises(SelfInStaticMethod):
compile_base(SELF_USE_IN_STATIC_METHOD.format('self.n1'))
|
|
ca346df368c7c798495243199c8693d1b44a5e7f
|
tests/py/test_www_team_receiving.py
|
tests/py/test_www_team_receiving.py
|
from __future__ import absolute_import, division, print_function, unicode_literals
from gratipay.testing import Harness
class Tests(Harness):
def test_receiving_returns_404_for_unapproved_teams(self):
self.make_team(is_approved=False)
assert self.client.GxT('/TheEnterprise/receiving/').code == 404
def test_receiving_is_not_visible_to_anon(self):
self.make_team(is_approved=True)
assert self.client.GxT('/TheEnterprise/receiving/', auth_as=None).code == 401
def test_receiving_is_not_visible_to_random(self):
self.make_team(is_approved=True)
alice = self.make_participant('alice')
assert self.client.GxT('/TheEnterprise/receiving/', auth_as='alice').code == 401
def test_receiving_is_visible_to_admin(self):
self.make_team(is_approved=True)
admin = self.make_participant('admin', is_admin=True)
assert self.client.GET('/TheEnterprise/receiving/', auth_as='admin').code == 200
def test_receiving_is_visible_to_team_owner(self):
self.make_team(is_approved=True)
assert self.client.GET('/TheEnterprise/receiving/', auth_as='picard').code == 200
|
Add failing test for anon viewing receiving page
|
Add failing test for anon viewing receiving page
|
Python
|
mit
|
gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com
|
Add failing test for anon viewing receiving page
|
from __future__ import absolute_import, division, print_function, unicode_literals
from gratipay.testing import Harness
class Tests(Harness):
def test_receiving_returns_404_for_unapproved_teams(self):
self.make_team(is_approved=False)
assert self.client.GxT('/TheEnterprise/receiving/').code == 404
def test_receiving_is_not_visible_to_anon(self):
self.make_team(is_approved=True)
assert self.client.GxT('/TheEnterprise/receiving/', auth_as=None).code == 401
def test_receiving_is_not_visible_to_random(self):
self.make_team(is_approved=True)
alice = self.make_participant('alice')
assert self.client.GxT('/TheEnterprise/receiving/', auth_as='alice').code == 401
def test_receiving_is_visible_to_admin(self):
self.make_team(is_approved=True)
admin = self.make_participant('admin', is_admin=True)
assert self.client.GET('/TheEnterprise/receiving/', auth_as='admin').code == 200
def test_receiving_is_visible_to_team_owner(self):
self.make_team(is_approved=True)
assert self.client.GET('/TheEnterprise/receiving/', auth_as='picard').code == 200
|
<commit_before><commit_msg>Add failing test for anon viewing receiving page<commit_after>
|
from __future__ import absolute_import, division, print_function, unicode_literals
from gratipay.testing import Harness
class Tests(Harness):
def test_receiving_returns_404_for_unapproved_teams(self):
self.make_team(is_approved=False)
assert self.client.GxT('/TheEnterprise/receiving/').code == 404
def test_receiving_is_not_visible_to_anon(self):
self.make_team(is_approved=True)
assert self.client.GxT('/TheEnterprise/receiving/', auth_as=None).code == 401
def test_receiving_is_not_visible_to_random(self):
self.make_team(is_approved=True)
alice = self.make_participant('alice')
assert self.client.GxT('/TheEnterprise/receiving/', auth_as='alice').code == 401
def test_receiving_is_visible_to_admin(self):
self.make_team(is_approved=True)
admin = self.make_participant('admin', is_admin=True)
assert self.client.GET('/TheEnterprise/receiving/', auth_as='admin').code == 200
def test_receiving_is_visible_to_team_owner(self):
self.make_team(is_approved=True)
assert self.client.GET('/TheEnterprise/receiving/', auth_as='picard').code == 200
|
Add failing test for anon viewing receiving pagefrom __future__ import absolute_import, division, print_function, unicode_literals
from gratipay.testing import Harness
class Tests(Harness):
def test_receiving_returns_404_for_unapproved_teams(self):
self.make_team(is_approved=False)
assert self.client.GxT('/TheEnterprise/receiving/').code == 404
def test_receiving_is_not_visible_to_anon(self):
self.make_team(is_approved=True)
assert self.client.GxT('/TheEnterprise/receiving/', auth_as=None).code == 401
def test_receiving_is_not_visible_to_random(self):
self.make_team(is_approved=True)
alice = self.make_participant('alice')
assert self.client.GxT('/TheEnterprise/receiving/', auth_as='alice').code == 401
def test_receiving_is_visible_to_admin(self):
self.make_team(is_approved=True)
admin = self.make_participant('admin', is_admin=True)
assert self.client.GET('/TheEnterprise/receiving/', auth_as='admin').code == 200
def test_receiving_is_visible_to_team_owner(self):
self.make_team(is_approved=True)
assert self.client.GET('/TheEnterprise/receiving/', auth_as='picard').code == 200
|
<commit_before><commit_msg>Add failing test for anon viewing receiving page<commit_after>from __future__ import absolute_import, division, print_function, unicode_literals
from gratipay.testing import Harness
class Tests(Harness):
def test_receiving_returns_404_for_unapproved_teams(self):
self.make_team(is_approved=False)
assert self.client.GxT('/TheEnterprise/receiving/').code == 404
def test_receiving_is_not_visible_to_anon(self):
self.make_team(is_approved=True)
assert self.client.GxT('/TheEnterprise/receiving/', auth_as=None).code == 401
def test_receiving_is_not_visible_to_random(self):
self.make_team(is_approved=True)
alice = self.make_participant('alice')
assert self.client.GxT('/TheEnterprise/receiving/', auth_as='alice').code == 401
def test_receiving_is_visible_to_admin(self):
self.make_team(is_approved=True)
admin = self.make_participant('admin', is_admin=True)
assert self.client.GET('/TheEnterprise/receiving/', auth_as='admin').code == 200
def test_receiving_is_visible_to_team_owner(self):
self.make_team(is_approved=True)
assert self.client.GET('/TheEnterprise/receiving/', auth_as='picard').code == 200
|
|
0476e30119c02d715c8674d1e362207bd9a464c9
|
tests/startsymbol_tests/__init__.py
|
tests/startsymbol_tests/__init__.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
|
Add directory for start symbol tests
|
Add directory for start symbol tests
|
Python
|
mit
|
PatrikValkovic/grammpy
|
Add directory for start symbol tests
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
|
<commit_before><commit_msg>Add directory for start symbol tests<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
|
Add directory for start symbol tests#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
|
<commit_before><commit_msg>Add directory for start symbol tests<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
|
|
4ff445748a0d1187cc1a6ad799564c05240a6a17
|
common/djangoapps/student/management/commands/anonymized_id_mapping.py
|
common/djangoapps/student/management/commands/anonymized_id_mapping.py
|
import csv
import sys
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
from student.models import unique_id_for_user
class Command(BaseCommand):
# It appears that with the way Rake invokes these commands, we can't
# have more than one arg passed through...annoying.
args = ("course_id", )
help = """
Exports a CSV document mapping from a username to the anonymized,
unique user ID for every user in the specified course.
"""
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError("Usage: unique_id_mapping %s" %
" ".join(("<%s>" % arg for arg in Command.args)))
course_id = args[0]
# Generate the output filename from the course ID.
# Change slashes to dashes first, and then append .csv extension.
output_filename = course_id.replace('/', '-') + ".csv"
# Figure out which students are enrolled in the course
students = User.objects.filter(courseenrollment__course_id=course_id)
if len(students) == 0:
self.stdout.write("No students enrolled in %s" % course_id)
return
# Write mapping to output file in CSV format with a simple header
try:
with open(output_filename, 'wb') as output_file:
csv_writer = csv.writer(output_file)
csv_writer.writerow(("User ID", "Anonymized user ID"))
for student in students:
csv_writer.writerow((student.id, unique_id_for_user(student)))
except IOError:
raise CommandError("Error writing to file: %s" % output_filename)
|
Add mgmt cmd to generate anonymized ID mapping
|
Add mgmt cmd to generate anonymized ID mapping
So that instructors have easy access to the mapping from anonymized
IDs (a simple MD5 hash of the user ID that's used when integrating
with third-party services like Qualtrics) to user IDs, we have a
simple Django management command to generate a CSV mapping. To run,
use the following:
rake django-admin[anonymized_id_mapping,<system>,<env>,<course_id>]
And, of course, substitute the appropriate values for <system>, <env>,
and <course_id>. (E.g., lms, dev, and MITx/6.002x/Circuits)
|
Python
|
agpl-3.0
|
caesar2164/edx-platform,Stanford-Online/edx-platform,jbassen/edx-platform,jbassen/edx-platform,Stanford-Online/edx-platform,jbassen/edx-platform,caesar2164/edx-platform,jbassen/edx-platform,Stanford-Online/edx-platform,jbassen/edx-platform,caesar2164/edx-platform,caesar2164/edx-platform,Stanford-Online/edx-platform
|
Add mgmt cmd to generate anonymized ID mapping
So that instructors have easy access to the mapping from anonymized
IDs (a simple MD5 hash of the user ID that's used when integrating
with third-party services like Qualtrics) to user IDs, we have a
simple Django management command to generate a CSV mapping. To run,
use the following:
rake django-admin[anonymized_id_mapping,<system>,<env>,<course_id>]
And, of course, substitute the appropriate values for <system>, <env>,
and <course_id>. (E.g., lms, dev, and MITx/6.002x/Circuits)
|
import csv
import sys
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
from student.models import unique_id_for_user
class Command(BaseCommand):
# It appears that with the way Rake invokes these commands, we can't
# have more than one arg passed through...annoying.
args = ("course_id", )
help = """
Exports a CSV document mapping from a username to the anonymized,
unique user ID for every user in the specified course.
"""
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError("Usage: unique_id_mapping %s" %
" ".join(("<%s>" % arg for arg in Command.args)))
course_id = args[0]
# Generate the output filename from the course ID.
# Change slashes to dashes first, and then append .csv extension.
output_filename = course_id.replace('/', '-') + ".csv"
# Figure out which students are enrolled in the course
students = User.objects.filter(courseenrollment__course_id=course_id)
if len(students) == 0:
self.stdout.write("No students enrolled in %s" % course_id)
return
# Write mapping to output file in CSV format with a simple header
try:
with open(output_filename, 'wb') as output_file:
csv_writer = csv.writer(output_file)
csv_writer.writerow(("User ID", "Anonymized user ID"))
for student in students:
csv_writer.writerow((student.id, unique_id_for_user(student)))
except IOError:
raise CommandError("Error writing to file: %s" % output_filename)
|
<commit_before><commit_msg>Add mgmt cmd to generate anonymized ID mapping
So that instructors have easy access to the mapping from anonymized
IDs (a simple MD5 hash of the user ID that's used when integrating
with third-party services like Qualtrics) to user IDs, we have a
simple Django management command to generate a CSV mapping. To run,
use the following:
rake django-admin[anonymized_id_mapping,<system>,<env>,<course_id>]
And, of course, substitute the appropriate values for <system>, <env>,
and <course_id>. (E.g., lms, dev, and MITx/6.002x/Circuits)<commit_after>
|
import csv
import sys
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
from student.models import unique_id_for_user
class Command(BaseCommand):
# It appears that with the way Rake invokes these commands, we can't
# have more than one arg passed through...annoying.
args = ("course_id", )
help = """
Exports a CSV document mapping from a username to the anonymized,
unique user ID for every user in the specified course.
"""
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError("Usage: unique_id_mapping %s" %
" ".join(("<%s>" % arg for arg in Command.args)))
course_id = args[0]
# Generate the output filename from the course ID.
# Change slashes to dashes first, and then append .csv extension.
output_filename = course_id.replace('/', '-') + ".csv"
# Figure out which students are enrolled in the course
students = User.objects.filter(courseenrollment__course_id=course_id)
if len(students) == 0:
self.stdout.write("No students enrolled in %s" % course_id)
return
# Write mapping to output file in CSV format with a simple header
try:
with open(output_filename, 'wb') as output_file:
csv_writer = csv.writer(output_file)
csv_writer.writerow(("User ID", "Anonymized user ID"))
for student in students:
csv_writer.writerow((student.id, unique_id_for_user(student)))
except IOError:
raise CommandError("Error writing to file: %s" % output_filename)
|
Add mgmt cmd to generate anonymized ID mapping
So that instructors have easy access to the mapping from anonymized
IDs (a simple MD5 hash of the user ID that's used when integrating
with third-party services like Qualtrics) to user IDs, we have a
simple Django management command to generate a CSV mapping. To run,
use the following:
rake django-admin[anonymized_id_mapping,<system>,<env>,<course_id>]
And, of course, substitute the appropriate values for <system>, <env>,
and <course_id>. (E.g., lms, dev, and MITx/6.002x/Circuits)import csv
import sys
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
from student.models import unique_id_for_user
class Command(BaseCommand):
# It appears that with the way Rake invokes these commands, we can't
# have more than one arg passed through...annoying.
args = ("course_id", )
help = """
Exports a CSV document mapping from a username to the anonymized,
unique user ID for every user in the specified course.
"""
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError("Usage: unique_id_mapping %s" %
" ".join(("<%s>" % arg for arg in Command.args)))
course_id = args[0]
# Generate the output filename from the course ID.
# Change slashes to dashes first, and then append .csv extension.
output_filename = course_id.replace('/', '-') + ".csv"
# Figure out which students are enrolled in the course
students = User.objects.filter(courseenrollment__course_id=course_id)
if len(students) == 0:
self.stdout.write("No students enrolled in %s" % course_id)
return
# Write mapping to output file in CSV format with a simple header
try:
with open(output_filename, 'wb') as output_file:
csv_writer = csv.writer(output_file)
csv_writer.writerow(("User ID", "Anonymized user ID"))
for student in students:
csv_writer.writerow((student.id, unique_id_for_user(student)))
except IOError:
raise CommandError("Error writing to file: %s" % output_filename)
|
<commit_before><commit_msg>Add mgmt cmd to generate anonymized ID mapping
So that instructors have easy access to the mapping from anonymized
IDs (a simple MD5 hash of the user ID that's used when integrating
with third-party services like Qualtrics) to user IDs, we have a
simple Django management command to generate a CSV mapping. To run,
use the following:
rake django-admin[anonymized_id_mapping,<system>,<env>,<course_id>]
And, of course, substitute the appropriate values for <system>, <env>,
and <course_id>. (E.g., lms, dev, and MITx/6.002x/Circuits)<commit_after>import csv
import sys
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
from student.models import unique_id_for_user
class Command(BaseCommand):
# It appears that with the way Rake invokes these commands, we can't
# have more than one arg passed through...annoying.
args = ("course_id", )
help = """
Exports a CSV document mapping from a username to the anonymized,
unique user ID for every user in the specified course.
"""
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError("Usage: unique_id_mapping %s" %
" ".join(("<%s>" % arg for arg in Command.args)))
course_id = args[0]
# Generate the output filename from the course ID.
# Change slashes to dashes first, and then append .csv extension.
output_filename = course_id.replace('/', '-') + ".csv"
# Figure out which students are enrolled in the course
students = User.objects.filter(courseenrollment__course_id=course_id)
if len(students) == 0:
self.stdout.write("No students enrolled in %s" % course_id)
return
# Write mapping to output file in CSV format with a simple header
try:
with open(output_filename, 'wb') as output_file:
csv_writer = csv.writer(output_file)
csv_writer.writerow(("User ID", "Anonymized user ID"))
for student in students:
csv_writer.writerow((student.id, unique_id_for_user(student)))
except IOError:
raise CommandError("Error writing to file: %s" % output_filename)
|
|
318ebb141ebb50010964821145811aa36e46877f
|
temba/flows/migrations/0030_auto_20150825_1406.py
|
temba/flows/migrations/0030_auto_20150825_1406.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('flows', '0029_populate_run_modified_on'),
]
operations = [
migrations.AlterField(
model_name='flowrun',
name='modified_on',
field=models.DateTimeField(help_text='When this flow run was last updated', auto_now=True),
preserve_default=True,
),
migrations.AlterField(
model_name='flowrun',
name='org',
field=models.ForeignKey(related_name='runs', to='orgs.Org', db_index=False),
preserve_default=True,
),
]
|
Make modified_on and org no longer be nullable
|
Make modified_on and org no longer be nullable
|
Python
|
agpl-3.0
|
tsotetsi/textily-web,reyrodrigues/EU-SMS,praekelt/rapidpro,pulilab/rapidpro,ewheeler/rapidpro,ewheeler/rapidpro,ewheeler/rapidpro,ewheeler/rapidpro,reyrodrigues/EU-SMS,reyrodrigues/EU-SMS,tsotetsi/textily-web,tsotetsi/textily-web,pulilab/rapidpro,praekelt/rapidpro,tsotetsi/textily-web,pulilab/rapidpro,pulilab/rapidpro,tsotetsi/textily-web,pulilab/rapidpro,praekelt/rapidpro,praekelt/rapidpro
|
Make modified_on and org no longer be nullable
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('flows', '0029_populate_run_modified_on'),
]
operations = [
migrations.AlterField(
model_name='flowrun',
name='modified_on',
field=models.DateTimeField(help_text='When this flow run was last updated', auto_now=True),
preserve_default=True,
),
migrations.AlterField(
model_name='flowrun',
name='org',
field=models.ForeignKey(related_name='runs', to='orgs.Org', db_index=False),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Make modified_on and org no longer be nullable<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('flows', '0029_populate_run_modified_on'),
]
operations = [
migrations.AlterField(
model_name='flowrun',
name='modified_on',
field=models.DateTimeField(help_text='When this flow run was last updated', auto_now=True),
preserve_default=True,
),
migrations.AlterField(
model_name='flowrun',
name='org',
field=models.ForeignKey(related_name='runs', to='orgs.Org', db_index=False),
preserve_default=True,
),
]
|
Make modified_on and org no longer be nullable# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('flows', '0029_populate_run_modified_on'),
]
operations = [
migrations.AlterField(
model_name='flowrun',
name='modified_on',
field=models.DateTimeField(help_text='When this flow run was last updated', auto_now=True),
preserve_default=True,
),
migrations.AlterField(
model_name='flowrun',
name='org',
field=models.ForeignKey(related_name='runs', to='orgs.Org', db_index=False),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Make modified_on and org no longer be nullable<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('flows', '0029_populate_run_modified_on'),
]
operations = [
migrations.AlterField(
model_name='flowrun',
name='modified_on',
field=models.DateTimeField(help_text='When this flow run was last updated', auto_now=True),
preserve_default=True,
),
migrations.AlterField(
model_name='flowrun',
name='org',
field=models.ForeignKey(related_name='runs', to='orgs.Org', db_index=False),
preserve_default=True,
),
]
|
|
fa6e1b638f7d8832498ddb75b94420607ffc109c
|
Python_Data/Fraction.py
|
Python_Data/Fraction.py
|
#gdc(a,b) = gcd(b, a%b) OBS: Positive integer only
def gcd(m,n):
while(m%n != 0):
aux = m
aux2 = n
m = aux2
n = aux%aux2
return n
class Fraction(object):
""" Represents a fraction, which consists of two integer numbers:
The first one is called the numerator, and the second one is
the denominator.
To instantiate an object of this class you need to provide a
default numerator and denominator to the init method.
"""
def __init__(self, num, den):
self.num = num
self.den = den
#Overriding string conversion method
def __str__(self):
return str(self.num) + " / " + str(self.den)
#Overriding the arithmetic operation for Fractions
def __add__(self, other):
den = other.den * self.den
num = (self.num*den//self.den)+(other.num*den//other.den)
common = gcd(num, den)
return Fraction(num//common,den//common)
def __sub__(self, other):
other.num *= -1
return self.__add__(other)
def __mul__(self, other):
num = self.num * other.num
den = self.den * self.den
common = gcd(num, den)
return Fraction(num//common , den//common)
def __div__(self, other):
other.num, other.den = other.den, other.num
#Right way to check if two fractions are equal
def __eq__(self, other):
# Cross product
num1 = self.num*other.den
num2 = self.den*other.num
return num1==num2
def __lt__(self, other):
return(self.num/self.den < other.num/other.den)
def __gt__(self, other):
return(other.__lt__(self))
def main():
myf = Fraction(1,4)
myf2 = Fraction(1,2)
print(myf>myf2)
if __name__ == '__main__':
main()
|
Add a class to implement a fraction in python, to deal with method overriding
|
Add a class to implement a fraction in python, to deal with method overriding
|
Python
|
unlicense
|
robotenique/RandomAccessMemory,robotenique/RandomAccessMemory,robotenique/RandomAccessMemory
|
Add a class to implement a fraction in python, to deal with method overriding
|
#gdc(a,b) = gcd(b, a%b) OBS: Positive integer only
def gcd(m,n):
while(m%n != 0):
aux = m
aux2 = n
m = aux2
n = aux%aux2
return n
class Fraction(object):
""" Represents a fraction, which consists of two integer numbers:
The first one is called the numerator, and the second one is
the denominator.
To instantiate an object of this class you need to provide a
default numerator and denominator to the init method.
"""
def __init__(self, num, den):
self.num = num
self.den = den
#Overriding string conversion method
def __str__(self):
return str(self.num) + " / " + str(self.den)
#Overriding the arithmetic operation for Fractions
def __add__(self, other):
den = other.den * self.den
num = (self.num*den//self.den)+(other.num*den//other.den)
common = gcd(num, den)
return Fraction(num//common,den//common)
def __sub__(self, other):
other.num *= -1
return self.__add__(other)
def __mul__(self, other):
num = self.num * other.num
den = self.den * self.den
common = gcd(num, den)
return Fraction(num//common , den//common)
def __div__(self, other):
other.num, other.den = other.den, other.num
#Right way to check if two fractions are equal
def __eq__(self, other):
# Cross product
num1 = self.num*other.den
num2 = self.den*other.num
return num1==num2
def __lt__(self, other):
return(self.num/self.den < other.num/other.den)
def __gt__(self, other):
return(other.__lt__(self))
def main():
myf = Fraction(1,4)
myf2 = Fraction(1,2)
print(myf>myf2)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a class to implement a fraction in python, to deal with method overriding<commit_after>
|
#gdc(a,b) = gcd(b, a%b) OBS: Positive integer only
def gcd(m,n):
while(m%n != 0):
aux = m
aux2 = n
m = aux2
n = aux%aux2
return n
class Fraction(object):
""" Represents a fraction, which consists of two integer numbers:
The first one is called the numerator, and the second one is
the denominator.
To instantiate an object of this class you need to provide a
default numerator and denominator to the init method.
"""
def __init__(self, num, den):
self.num = num
self.den = den
#Overriding string conversion method
def __str__(self):
return str(self.num) + " / " + str(self.den)
#Overriding the arithmetic operation for Fractions
def __add__(self, other):
den = other.den * self.den
num = (self.num*den//self.den)+(other.num*den//other.den)
common = gcd(num, den)
return Fraction(num//common,den//common)
def __sub__(self, other):
other.num *= -1
return self.__add__(other)
def __mul__(self, other):
num = self.num * other.num
den = self.den * self.den
common = gcd(num, den)
return Fraction(num//common , den//common)
def __div__(self, other):
other.num, other.den = other.den, other.num
#Right way to check if two fractions are equal
def __eq__(self, other):
# Cross product
num1 = self.num*other.den
num2 = self.den*other.num
return num1==num2
def __lt__(self, other):
return(self.num/self.den < other.num/other.den)
def __gt__(self, other):
return(other.__lt__(self))
def main():
myf = Fraction(1,4)
myf2 = Fraction(1,2)
print(myf>myf2)
if __name__ == '__main__':
main()
|
Add a class to implement a fraction in python, to deal with method overriding#gdc(a,b) = gcd(b, a%b) OBS: Positive integer only
def gcd(m,n):
while(m%n != 0):
aux = m
aux2 = n
m = aux2
n = aux%aux2
return n
class Fraction(object):
""" Represents a fraction, which consists of two integer numbers:
The first one is called the numerator, and the second one is
the denominator.
To instantiate an object of this class you need to provide a
default numerator and denominator to the init method.
"""
def __init__(self, num, den):
self.num = num
self.den = den
#Overriding string conversion method
def __str__(self):
return str(self.num) + " / " + str(self.den)
#Overriding the arithmetic operation for Fractions
def __add__(self, other):
den = other.den * self.den
num = (self.num*den//self.den)+(other.num*den//other.den)
common = gcd(num, den)
return Fraction(num//common,den//common)
def __sub__(self, other):
other.num *= -1
return self.__add__(other)
def __mul__(self, other):
num = self.num * other.num
den = self.den * self.den
common = gcd(num, den)
return Fraction(num//common , den//common)
def __div__(self, other):
other.num, other.den = other.den, other.num
#Right way to check if two fractions are equal
def __eq__(self, other):
# Cross product
num1 = self.num*other.den
num2 = self.den*other.num
return num1==num2
def __lt__(self, other):
return(self.num/self.den < other.num/other.den)
def __gt__(self, other):
return(other.__lt__(self))
def main():
myf = Fraction(1,4)
myf2 = Fraction(1,2)
print(myf>myf2)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a class to implement a fraction in python, to deal with method overriding<commit_after>#gdc(a,b) = gcd(b, a%b) OBS: Positive integer only
def gcd(m,n):
while(m%n != 0):
aux = m
aux2 = n
m = aux2
n = aux%aux2
return n
class Fraction(object):
""" Represents a fraction, which consists of two integer numbers:
The first one is called the numerator, and the second one is
the denominator.
To instantiate an object of this class you need to provide a
default numerator and denominator to the init method.
"""
def __init__(self, num, den):
self.num = num
self.den = den
#Overriding string conversion method
def __str__(self):
return str(self.num) + " / " + str(self.den)
#Overriding the arithmetic operation for Fractions
def __add__(self, other):
den = other.den * self.den
num = (self.num*den//self.den)+(other.num*den//other.den)
common = gcd(num, den)
return Fraction(num//common,den//common)
def __sub__(self, other):
other.num *= -1
return self.__add__(other)
def __mul__(self, other):
num = self.num * other.num
den = self.den * self.den
common = gcd(num, den)
return Fraction(num//common , den//common)
def __div__(self, other):
other.num, other.den = other.den, other.num
#Right way to check if two fractions are equal
def __eq__(self, other):
# Cross product
num1 = self.num*other.den
num2 = self.den*other.num
return num1==num2
def __lt__(self, other):
return(self.num/self.den < other.num/other.den)
def __gt__(self, other):
return(other.__lt__(self))
def main():
myf = Fraction(1,4)
myf2 = Fraction(1,2)
print(myf>myf2)
if __name__ == '__main__':
main()
|
|
1bae1bc94fea8fd9b8fe25d832ebfce07ad08a1c
|
utilities/build/update_xform_translate_path.py
|
utilities/build/update_xform_translate_path.py
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
"""
The sole purpose of the following script is to update the
local.ini file used by the dimagi teamcity buildserver
so that the path to xform_translate.jar is updated dynamically.
It does this by identifying the jar_path_placeholder in the file
identified by the environment variable RAPIDSMS_INI and replacing
it with the value of {project.dir}/lib
CONFIGURATION
jar_path_placeholder: the setting in local.build.ini which we
want to update dynamically
"""
jar_path_placeholder = 'DYNAMIC_PATH_TO_XFORM_TRANSLATE_JAR'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
fin = open(local_ini,"r")
ini = fin.read()
fin.close()
if jar_path_placeholder in ini:
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ini.replace(jar_path_placeholder, xform_jar_path)
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
"""
The sole purpose of the following script is to update the
local.ini file used by the dimagi teamcity buildserver
so that xform_translate_path gets updated to point to the folder
{project.dir}/lib
"""
JAR_PATH_SETTING = 'xform_translate_path'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ""
should_update = False
fin = open(local_ini,"r")
for line in fin:
if JAR_PATH_SETTING in line:
line = 'xform_translate_path=%s\n' % xform_jar_path
should_update = True
ini = ini + line
fin.close()
if should_update:
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
else:
print "Nothing to update"
|
Fix up local.ini updater code to look specifically for 'xform_translate_path'
|
Fix up local.ini updater code to look specifically for 'xform_translate_path'
|
Python
|
bsd-3-clause
|
puttarajubr/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,gmimano/commcaretest
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
"""
The sole purpose of the following script is to update the
local.ini file used by the dimagi teamcity buildserver
so that the path to xform_translate.jar is updated dynamically.
It does this by identifying the jar_path_placeholder in the file
identified by the environment variable RAPIDSMS_INI and replacing
it with the value of {project.dir}/lib
CONFIGURATION
jar_path_placeholder: the setting in local.build.ini which we
want to update dynamically
"""
jar_path_placeholder = 'DYNAMIC_PATH_TO_XFORM_TRANSLATE_JAR'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
fin = open(local_ini,"r")
ini = fin.read()
fin.close()
if jar_path_placeholder in ini:
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ini.replace(jar_path_placeholder, xform_jar_path)
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
Fix up local.ini updater code to look specifically for 'xform_translate_path'
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
"""
The sole purpose of the following script is to update the
local.ini file used by the dimagi teamcity buildserver
so that xform_translate_path gets updated to point to the folder
{project.dir}/lib
"""
JAR_PATH_SETTING = 'xform_translate_path'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ""
should_update = False
fin = open(local_ini,"r")
for line in fin:
if JAR_PATH_SETTING in line:
line = 'xform_translate_path=%s\n' % xform_jar_path
should_update = True
ini = ini + line
fin.close()
if should_update:
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
else:
print "Nothing to update"
|
<commit_before>#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
"""
The sole purpose of the following script is to update the
local.ini file used by the dimagi teamcity buildserver
so that the path to xform_translate.jar is updated dynamically.
It does this by identifying the jar_path_placeholder in the file
identified by the environment variable RAPIDSMS_INI and replacing
it with the value of {project.dir}/lib
CONFIGURATION
jar_path_placeholder: the setting in local.build.ini which we
want to update dynamically
"""
jar_path_placeholder = 'DYNAMIC_PATH_TO_XFORM_TRANSLATE_JAR'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
fin = open(local_ini,"r")
ini = fin.read()
fin.close()
if jar_path_placeholder in ini:
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ini.replace(jar_path_placeholder, xform_jar_path)
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
<commit_msg>Fix up local.ini updater code to look specifically for 'xform_translate_path'<commit_after>
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
"""
The sole purpose of the following script is to update the
local.ini file used by the dimagi teamcity buildserver
so that xform_translate_path gets updated to point to the folder
{project.dir}/lib
"""
JAR_PATH_SETTING = 'xform_translate_path'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ""
should_update = False
fin = open(local_ini,"r")
for line in fin:
if JAR_PATH_SETTING in line:
line = 'xform_translate_path=%s\n' % xform_jar_path
should_update = True
ini = ini + line
fin.close()
if should_update:
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
else:
print "Nothing to update"
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
"""
The sole purpose of the following script is to update the
local.ini file used by the dimagi teamcity buildserver
so that the path to xform_translate.jar is updated dynamically.
It does this by identifying the jar_path_placeholder in the file
identified by the environment variable RAPIDSMS_INI and replacing
it with the value of {project.dir}/lib
CONFIGURATION
jar_path_placeholder: the setting in local.build.ini which we
want to update dynamically
"""
jar_path_placeholder = 'DYNAMIC_PATH_TO_XFORM_TRANSLATE_JAR'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
fin = open(local_ini,"r")
ini = fin.read()
fin.close()
if jar_path_placeholder in ini:
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ini.replace(jar_path_placeholder, xform_jar_path)
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
Fix up local.ini updater code to look specifically for 'xform_translate_path'#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
"""
The sole purpose of the following script is to update the
local.ini file used by the dimagi teamcity buildserver
so that xform_translate_path gets updated to point to the folder
{project.dir}/lib
"""
JAR_PATH_SETTING = 'xform_translate_path'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ""
should_update = False
fin = open(local_ini,"r")
for line in fin:
if JAR_PATH_SETTING in line:
line = 'xform_translate_path=%s\n' % xform_jar_path
should_update = True
ini = ini + line
fin.close()
if should_update:
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
else:
print "Nothing to update"
|
<commit_before>#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
"""
The sole purpose of the following script is to update the
local.ini file used by the dimagi teamcity buildserver
so that the path to xform_translate.jar is updated dynamically.
It does this by identifying the jar_path_placeholder in the file
identified by the environment variable RAPIDSMS_INI and replacing
it with the value of {project.dir}/lib
CONFIGURATION
jar_path_placeholder: the setting in local.build.ini which we
want to update dynamically
"""
jar_path_placeholder = 'DYNAMIC_PATH_TO_XFORM_TRANSLATE_JAR'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
fin = open(local_ini,"r")
ini = fin.read()
fin.close()
if jar_path_placeholder in ini:
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ini.replace(jar_path_placeholder, xform_jar_path)
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
<commit_msg>Fix up local.ini updater code to look specifically for 'xform_translate_path'<commit_after>#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
"""
The sole purpose of the following script is to update the
local.ini file used by the dimagi teamcity buildserver
so that xform_translate_path gets updated to point to the folder
{project.dir}/lib
"""
JAR_PATH_SETTING = 'xform_translate_path'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ""
should_update = False
fin = open(local_ini,"r")
for line in fin:
if JAR_PATH_SETTING in line:
line = 'xform_translate_path=%s\n' % xform_jar_path
should_update = True
ini = ini + line
fin.close()
if should_update:
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
else:
print "Nothing to update"
|
c91ab4c38fabb78842b97584356e816903f4ec0e
|
profile_benchmark.py
|
profile_benchmark.py
|
# Profile the basic test execution
from pyresttest import resttest
from pyresttest.benchmarks import Benchmark
import cProfile
test = Benchmark()
test.warmup_runs = 0
test.benchmark_runs = 1000
test.raw_metrics = set()
test.metrics = {'total_time'}
test.aggregated_metrics = {'total_time': ['total','mean']}
test.url = 'http://localhost:8000/api/person/'
test.name = 'Basic GET'
cProfile.run('resttest.run_benchmark(test)', sort='cumtime')
|
Add script to profile benchmarking time
|
Add script to profile benchmarking time
|
Python
|
apache-2.0
|
satish-suradkar/pyresttest,TimYi/pyresttest,janusnic/pyresttest,netjunki/pyresttest,janusnic/pyresttest,svanoort/pyresttest,sunyanhui/pyresttest,suvarnaraju/pyresttest,satish-suradkar/pyresttest,TimYi/pyresttest,sunyanhui/pyresttest,holdenweb/pyresttest,netjunki/pyresttest,MorrisJobke/pyresttest,holdenweb/pyresttest,svanoort/pyresttest,MorrisJobke/pyresttest,suvarnaraju/pyresttest,alazaro/pyresttest,wirewit/pyresttest,wirewit/pyresttest,alazaro/pyresttest
|
Add script to profile benchmarking time
|
# Profile the basic test execution
from pyresttest import resttest
from pyresttest.benchmarks import Benchmark
import cProfile
test = Benchmark()
test.warmup_runs = 0
test.benchmark_runs = 1000
test.raw_metrics = set()
test.metrics = {'total_time'}
test.aggregated_metrics = {'total_time': ['total','mean']}
test.url = 'http://localhost:8000/api/person/'
test.name = 'Basic GET'
cProfile.run('resttest.run_benchmark(test)', sort='cumtime')
|
<commit_before><commit_msg>Add script to profile benchmarking time<commit_after>
|
# Profile the basic test execution
from pyresttest import resttest
from pyresttest.benchmarks import Benchmark
import cProfile
test = Benchmark()
test.warmup_runs = 0
test.benchmark_runs = 1000
test.raw_metrics = set()
test.metrics = {'total_time'}
test.aggregated_metrics = {'total_time': ['total','mean']}
test.url = 'http://localhost:8000/api/person/'
test.name = 'Basic GET'
cProfile.run('resttest.run_benchmark(test)', sort='cumtime')
|
Add script to profile benchmarking time# Profile the basic test execution
from pyresttest import resttest
from pyresttest.benchmarks import Benchmark
import cProfile
test = Benchmark()
test.warmup_runs = 0
test.benchmark_runs = 1000
test.raw_metrics = set()
test.metrics = {'total_time'}
test.aggregated_metrics = {'total_time': ['total','mean']}
test.url = 'http://localhost:8000/api/person/'
test.name = 'Basic GET'
cProfile.run('resttest.run_benchmark(test)', sort='cumtime')
|
<commit_before><commit_msg>Add script to profile benchmarking time<commit_after># Profile the basic test execution
from pyresttest import resttest
from pyresttest.benchmarks import Benchmark
import cProfile
test = Benchmark()
test.warmup_runs = 0
test.benchmark_runs = 1000
test.raw_metrics = set()
test.metrics = {'total_time'}
test.aggregated_metrics = {'total_time': ['total','mean']}
test.url = 'http://localhost:8000/api/person/'
test.name = 'Basic GET'
cProfile.run('resttest.run_benchmark(test)', sort='cumtime')
|
|
5eaf915f7fe6ed590e58b5cc554b4477568dda85
|
tests/test_processing_invariant.py
|
tests/test_processing_invariant.py
|
"""Test that we can read and write bib entries with postprocessing."""
import bibpy
import pytest
@pytest.fixture
def test_string():
return """@article{key,
author = {James Conway and Archer Sterling and},
xdata = {key1, key2,key3, key4, key5 ,},
urldate = {2017-01-14},
keywords = {parsing; computer science ; databases; },
year = {1957},
month = {11},
msg = "Part of " # var # " string",
foreword = {Jan Leo {and} the Editors}
}"""
def test_processing_invariant(test_string):
entry = bibpy.read_string(test_string, 'relaxed',
postprocess=True).entries[0]
assert entry.author == ['James Conway', 'Archer Sterling']
assert entry.xdata == ['key1', 'key2', 'key3', 'key4', 'key5']
assert entry.urldate == bibpy.date.DateRange.fromstring('2017-01-14')
assert entry.keywords == ['parsing', 'computer science', 'databases']
assert entry.year == 1957
assert entry.month == 'November'
assert entry.msg == '"Part of " # var # " string"'
assert entry.foreword == ['Jan Leo and the Editors']
entry = bibpy.read_string(bibpy.write_string([entry]), 'relaxed',
postprocess=True).entries[0]
assert entry.xdata == ['key1', 'key2', 'key3', 'key4', 'key5']
assert entry.urldate == bibpy.date.DateRange.fromstring('2017-01-14')
assert entry.keywords == ['parsing', 'computer science', 'databases']
assert entry.year == 1957
assert entry.month == 'November'
assert entry.msg == '"Part of " # var # " string"'
assert entry.foreword == ['Jan Leo and the Editors']
|
Add test for processing invariant
|
Add test for processing invariant
|
Python
|
mit
|
MisanthropicBit/bibpy,MisanthropicBit/bibpy
|
Add test for processing invariant
|
"""Test that we can read and write bib entries with postprocessing."""
import bibpy
import pytest
@pytest.fixture
def test_string():
return """@article{key,
author = {James Conway and Archer Sterling and},
xdata = {key1, key2,key3, key4, key5 ,},
urldate = {2017-01-14},
keywords = {parsing; computer science ; databases; },
year = {1957},
month = {11},
msg = "Part of " # var # " string",
foreword = {Jan Leo {and} the Editors}
}"""
def test_processing_invariant(test_string):
entry = bibpy.read_string(test_string, 'relaxed',
postprocess=True).entries[0]
assert entry.author == ['James Conway', 'Archer Sterling']
assert entry.xdata == ['key1', 'key2', 'key3', 'key4', 'key5']
assert entry.urldate == bibpy.date.DateRange.fromstring('2017-01-14')
assert entry.keywords == ['parsing', 'computer science', 'databases']
assert entry.year == 1957
assert entry.month == 'November'
assert entry.msg == '"Part of " # var # " string"'
assert entry.foreword == ['Jan Leo and the Editors']
entry = bibpy.read_string(bibpy.write_string([entry]), 'relaxed',
postprocess=True).entries[0]
assert entry.xdata == ['key1', 'key2', 'key3', 'key4', 'key5']
assert entry.urldate == bibpy.date.DateRange.fromstring('2017-01-14')
assert entry.keywords == ['parsing', 'computer science', 'databases']
assert entry.year == 1957
assert entry.month == 'November'
assert entry.msg == '"Part of " # var # " string"'
assert entry.foreword == ['Jan Leo and the Editors']
|
<commit_before><commit_msg>Add test for processing invariant<commit_after>
|
"""Test that we can read and write bib entries with postprocessing."""
import bibpy
import pytest
@pytest.fixture
def test_string():
return """@article{key,
author = {James Conway and Archer Sterling and},
xdata = {key1, key2,key3, key4, key5 ,},
urldate = {2017-01-14},
keywords = {parsing; computer science ; databases; },
year = {1957},
month = {11},
msg = "Part of " # var # " string",
foreword = {Jan Leo {and} the Editors}
}"""
def test_processing_invariant(test_string):
entry = bibpy.read_string(test_string, 'relaxed',
postprocess=True).entries[0]
assert entry.author == ['James Conway', 'Archer Sterling']
assert entry.xdata == ['key1', 'key2', 'key3', 'key4', 'key5']
assert entry.urldate == bibpy.date.DateRange.fromstring('2017-01-14')
assert entry.keywords == ['parsing', 'computer science', 'databases']
assert entry.year == 1957
assert entry.month == 'November'
assert entry.msg == '"Part of " # var # " string"'
assert entry.foreword == ['Jan Leo and the Editors']
entry = bibpy.read_string(bibpy.write_string([entry]), 'relaxed',
postprocess=True).entries[0]
assert entry.xdata == ['key1', 'key2', 'key3', 'key4', 'key5']
assert entry.urldate == bibpy.date.DateRange.fromstring('2017-01-14')
assert entry.keywords == ['parsing', 'computer science', 'databases']
assert entry.year == 1957
assert entry.month == 'November'
assert entry.msg == '"Part of " # var # " string"'
assert entry.foreword == ['Jan Leo and the Editors']
|
Add test for processing invariant"""Test that we can read and write bib entries with postprocessing."""
import bibpy
import pytest
@pytest.fixture
def test_string():
return """@article{key,
author = {James Conway and Archer Sterling and},
xdata = {key1, key2,key3, key4, key5 ,},
urldate = {2017-01-14},
keywords = {parsing; computer science ; databases; },
year = {1957},
month = {11},
msg = "Part of " # var # " string",
foreword = {Jan Leo {and} the Editors}
}"""
def test_processing_invariant(test_string):
entry = bibpy.read_string(test_string, 'relaxed',
postprocess=True).entries[0]
assert entry.author == ['James Conway', 'Archer Sterling']
assert entry.xdata == ['key1', 'key2', 'key3', 'key4', 'key5']
assert entry.urldate == bibpy.date.DateRange.fromstring('2017-01-14')
assert entry.keywords == ['parsing', 'computer science', 'databases']
assert entry.year == 1957
assert entry.month == 'November'
assert entry.msg == '"Part of " # var # " string"'
assert entry.foreword == ['Jan Leo and the Editors']
entry = bibpy.read_string(bibpy.write_string([entry]), 'relaxed',
postprocess=True).entries[0]
assert entry.xdata == ['key1', 'key2', 'key3', 'key4', 'key5']
assert entry.urldate == bibpy.date.DateRange.fromstring('2017-01-14')
assert entry.keywords == ['parsing', 'computer science', 'databases']
assert entry.year == 1957
assert entry.month == 'November'
assert entry.msg == '"Part of " # var # " string"'
assert entry.foreword == ['Jan Leo and the Editors']
|
<commit_before><commit_msg>Add test for processing invariant<commit_after>"""Test that we can read and write bib entries with postprocessing."""
import bibpy
import pytest
@pytest.fixture
def test_string():
return """@article{key,
author = {James Conway and Archer Sterling and},
xdata = {key1, key2,key3, key4, key5 ,},
urldate = {2017-01-14},
keywords = {parsing; computer science ; databases; },
year = {1957},
month = {11},
msg = "Part of " # var # " string",
foreword = {Jan Leo {and} the Editors}
}"""
def test_processing_invariant(test_string):
entry = bibpy.read_string(test_string, 'relaxed',
postprocess=True).entries[0]
assert entry.author == ['James Conway', 'Archer Sterling']
assert entry.xdata == ['key1', 'key2', 'key3', 'key4', 'key5']
assert entry.urldate == bibpy.date.DateRange.fromstring('2017-01-14')
assert entry.keywords == ['parsing', 'computer science', 'databases']
assert entry.year == 1957
assert entry.month == 'November'
assert entry.msg == '"Part of " # var # " string"'
assert entry.foreword == ['Jan Leo and the Editors']
entry = bibpy.read_string(bibpy.write_string([entry]), 'relaxed',
postprocess=True).entries[0]
assert entry.xdata == ['key1', 'key2', 'key3', 'key4', 'key5']
assert entry.urldate == bibpy.date.DateRange.fromstring('2017-01-14')
assert entry.keywords == ['parsing', 'computer science', 'databases']
assert entry.year == 1957
assert entry.month == 'November'
assert entry.msg == '"Part of " # var # " string"'
assert entry.foreword == ['Jan Leo and the Editors']
|
|
33e88fa6d68172f49e3e23d38f3dc58044cf92a0
|
python/ctci_fibonacci_numbers.py
|
python/ctci_fibonacci_numbers.py
|
def fibonacci(n):
if n == 0:
return 0
elif n == 1:
return 1
else:
return fibonacci(n - 1) + fibonacci(n - 2)
n = int(input())
print(fibonacci(n))
|
Solve all but first test case
|
Solve all but first test case
|
Python
|
mit
|
rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank
|
Solve all but first test case
|
def fibonacci(n):
if n == 0:
return 0
elif n == 1:
return 1
else:
return fibonacci(n - 1) + fibonacci(n - 2)
n = int(input())
print(fibonacci(n))
|
<commit_before><commit_msg>Solve all but first test case<commit_after>
|
def fibonacci(n):
if n == 0:
return 0
elif n == 1:
return 1
else:
return fibonacci(n - 1) + fibonacci(n - 2)
n = int(input())
print(fibonacci(n))
|
Solve all but first test casedef fibonacci(n):
if n == 0:
return 0
elif n == 1:
return 1
else:
return fibonacci(n - 1) + fibonacci(n - 2)
n = int(input())
print(fibonacci(n))
|
<commit_before><commit_msg>Solve all but first test case<commit_after>def fibonacci(n):
if n == 0:
return 0
elif n == 1:
return 1
else:
return fibonacci(n - 1) + fibonacci(n - 2)
n = int(input())
print(fibonacci(n))
|
|
8e833781bc121e07e49331c277179d9b587dd5e7
|
tests/test_task_processing.py
|
tests/test_task_processing.py
|
# Copyright 2015-2017 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# We just want to test that task_processing is available in the virtualenv
from __future__ import absolute_import
from __future__ import unicode_literals
def test_import():
from task_processing.task_processor import TaskProcessor
tp = TaskProcessor()
tp.load_plugin('task_processing.plugins.mesos')
|
Add basic smoke test that task_processing is available
|
Add basic smoke test that task_processing is available
|
Python
|
apache-2.0
|
somic/paasta,somic/paasta,Yelp/paasta,Yelp/paasta
|
Add basic smoke test that task_processing is available
|
# Copyright 2015-2017 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# We just want to test that task_processing is available in the virtualenv
from __future__ import absolute_import
from __future__ import unicode_literals
def test_import():
from task_processing.task_processor import TaskProcessor
tp = TaskProcessor()
tp.load_plugin('task_processing.plugins.mesos')
|
<commit_before><commit_msg>Add basic smoke test that task_processing is available<commit_after>
|
# Copyright 2015-2017 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# We just want to test that task_processing is available in the virtualenv
from __future__ import absolute_import
from __future__ import unicode_literals
def test_import():
from task_processing.task_processor import TaskProcessor
tp = TaskProcessor()
tp.load_plugin('task_processing.plugins.mesos')
|
Add basic smoke test that task_processing is available# Copyright 2015-2017 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# We just want to test that task_processing is available in the virtualenv
from __future__ import absolute_import
from __future__ import unicode_literals
def test_import():
from task_processing.task_processor import TaskProcessor
tp = TaskProcessor()
tp.load_plugin('task_processing.plugins.mesos')
|
<commit_before><commit_msg>Add basic smoke test that task_processing is available<commit_after># Copyright 2015-2017 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# We just want to test that task_processing is available in the virtualenv
from __future__ import absolute_import
from __future__ import unicode_literals
def test_import():
from task_processing.task_processor import TaskProcessor
tp = TaskProcessor()
tp.load_plugin('task_processing.plugins.mesos')
|
|
d5442c89c27b672496ab5aa71e68b38aa8166f73
|
tests/unit/test_gcs_client.py
|
tests/unit/test_gcs_client.py
|
import uuid
import pytest
from globus_cli.services.gcs import get_gcs_client
def test_get_gcs_client_missing_required_auth():
gcsid = str(uuid.uuid1())
with pytest.raises(ValueError) as excinfo:
get_gcs_client(gcsid)
assert f"--gcs {gcsid}" in str(excinfo.value)
|
Add test for get_gcs_client with un-authed ID
|
Add test for get_gcs_client with un-authed ID
|
Python
|
apache-2.0
|
globus/globus-cli,globus/globus-cli
|
Add test for get_gcs_client with un-authed ID
|
import uuid
import pytest
from globus_cli.services.gcs import get_gcs_client
def test_get_gcs_client_missing_required_auth():
gcsid = str(uuid.uuid1())
with pytest.raises(ValueError) as excinfo:
get_gcs_client(gcsid)
assert f"--gcs {gcsid}" in str(excinfo.value)
|
<commit_before><commit_msg>Add test for get_gcs_client with un-authed ID<commit_after>
|
import uuid
import pytest
from globus_cli.services.gcs import get_gcs_client
def test_get_gcs_client_missing_required_auth():
gcsid = str(uuid.uuid1())
with pytest.raises(ValueError) as excinfo:
get_gcs_client(gcsid)
assert f"--gcs {gcsid}" in str(excinfo.value)
|
Add test for get_gcs_client with un-authed IDimport uuid
import pytest
from globus_cli.services.gcs import get_gcs_client
def test_get_gcs_client_missing_required_auth():
gcsid = str(uuid.uuid1())
with pytest.raises(ValueError) as excinfo:
get_gcs_client(gcsid)
assert f"--gcs {gcsid}" in str(excinfo.value)
|
<commit_before><commit_msg>Add test for get_gcs_client with un-authed ID<commit_after>import uuid
import pytest
from globus_cli.services.gcs import get_gcs_client
def test_get_gcs_client_missing_required_auth():
gcsid = str(uuid.uuid1())
with pytest.raises(ValueError) as excinfo:
get_gcs_client(gcsid)
assert f"--gcs {gcsid}" in str(excinfo.value)
|
|
88409bd0d71138d02729d03bf66aebc65e98acf8
|
python/test/test_site_tensors.py
|
python/test/test_site_tensors.py
|
import unittest
import numpy as np
from spglib import get_symmetry
class TestGetOperationsWithSiteTensors(unittest.TestCase):
def setUp(self):
lattice = [[0, 2, 2], [2, 0, 2], [2, 2, 0]]
positions = [[0, 0, 0]]
numbers = [1]
magmoms = [[0, 0, 1]]
self._cell_Ni = (lattice, positions, numbers, magmoms)
def tearDown(self):
pass
def test_get_symmetry_non_collinear(self):
sym = get_symmetry(self._cell_Ni)
self.assertEqual(8, len(sym['rotations']))
np.testing.assert_equal(sym['equivalent_atoms'], [0])
def test_get_symmetry_vectors(self):
pass
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(
TestGetOperationsWithSiteTensors)
unittest.TextTestRunner(verbosity=2).run(suite)
# unittest.main()
|
Add new test for symmetry with site tensors
|
Add new test for symmetry with site tensors
|
Python
|
bsd-3-clause
|
atztogo/spglib,atztogo/spglib,atztogo/spglib,atztogo/spglib,atztogo/spglib
|
Add new test for symmetry with site tensors
|
import unittest
import numpy as np
from spglib import get_symmetry
class TestGetOperationsWithSiteTensors(unittest.TestCase):
def setUp(self):
lattice = [[0, 2, 2], [2, 0, 2], [2, 2, 0]]
positions = [[0, 0, 0]]
numbers = [1]
magmoms = [[0, 0, 1]]
self._cell_Ni = (lattice, positions, numbers, magmoms)
def tearDown(self):
pass
def test_get_symmetry_non_collinear(self):
sym = get_symmetry(self._cell_Ni)
self.assertEqual(8, len(sym['rotations']))
np.testing.assert_equal(sym['equivalent_atoms'], [0])
def test_get_symmetry_vectors(self):
pass
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(
TestGetOperationsWithSiteTensors)
unittest.TextTestRunner(verbosity=2).run(suite)
# unittest.main()
|
<commit_before><commit_msg>Add new test for symmetry with site tensors<commit_after>
|
import unittest
import numpy as np
from spglib import get_symmetry
class TestGetOperationsWithSiteTensors(unittest.TestCase):
def setUp(self):
lattice = [[0, 2, 2], [2, 0, 2], [2, 2, 0]]
positions = [[0, 0, 0]]
numbers = [1]
magmoms = [[0, 0, 1]]
self._cell_Ni = (lattice, positions, numbers, magmoms)
def tearDown(self):
pass
def test_get_symmetry_non_collinear(self):
sym = get_symmetry(self._cell_Ni)
self.assertEqual(8, len(sym['rotations']))
np.testing.assert_equal(sym['equivalent_atoms'], [0])
def test_get_symmetry_vectors(self):
pass
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(
TestGetOperationsWithSiteTensors)
unittest.TextTestRunner(verbosity=2).run(suite)
# unittest.main()
|
Add new test for symmetry with site tensorsimport unittest
import numpy as np
from spglib import get_symmetry
class TestGetOperationsWithSiteTensors(unittest.TestCase):
def setUp(self):
lattice = [[0, 2, 2], [2, 0, 2], [2, 2, 0]]
positions = [[0, 0, 0]]
numbers = [1]
magmoms = [[0, 0, 1]]
self._cell_Ni = (lattice, positions, numbers, magmoms)
def tearDown(self):
pass
def test_get_symmetry_non_collinear(self):
sym = get_symmetry(self._cell_Ni)
self.assertEqual(8, len(sym['rotations']))
np.testing.assert_equal(sym['equivalent_atoms'], [0])
def test_get_symmetry_vectors(self):
pass
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(
TestGetOperationsWithSiteTensors)
unittest.TextTestRunner(verbosity=2).run(suite)
# unittest.main()
|
<commit_before><commit_msg>Add new test for symmetry with site tensors<commit_after>import unittest
import numpy as np
from spglib import get_symmetry
class TestGetOperationsWithSiteTensors(unittest.TestCase):
def setUp(self):
lattice = [[0, 2, 2], [2, 0, 2], [2, 2, 0]]
positions = [[0, 0, 0]]
numbers = [1]
magmoms = [[0, 0, 1]]
self._cell_Ni = (lattice, positions, numbers, magmoms)
def tearDown(self):
pass
def test_get_symmetry_non_collinear(self):
sym = get_symmetry(self._cell_Ni)
self.assertEqual(8, len(sym['rotations']))
np.testing.assert_equal(sym['equivalent_atoms'], [0])
def test_get_symmetry_vectors(self):
pass
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(
TestGetOperationsWithSiteTensors)
unittest.TextTestRunner(verbosity=2).run(suite)
# unittest.main()
|
|
f47f30d9f6e19c99c0beff94aa5d7a8d276b78b2
|
branchandcommit.py
|
branchandcommit.py
|
import tempfile
import pygit2
tmpdir = tempfile.TemporaryDirectory(suffix='.git', prefix='branchandcommit-')
print('Creating bare repository: {}'.format(tmpdir.name))
repo = pygit2.init_repository(tmpdir.name, True)
|
Add initial bare repository generation.
|
Add initial bare repository generation.
|
Python
|
mit
|
davidstrauss/branch-and-commit-to-bare-repo
|
Add initial bare repository generation.
|
import tempfile
import pygit2
tmpdir = tempfile.TemporaryDirectory(suffix='.git', prefix='branchandcommit-')
print('Creating bare repository: {}'.format(tmpdir.name))
repo = pygit2.init_repository(tmpdir.name, True)
|
<commit_before><commit_msg>Add initial bare repository generation.<commit_after>
|
import tempfile
import pygit2
tmpdir = tempfile.TemporaryDirectory(suffix='.git', prefix='branchandcommit-')
print('Creating bare repository: {}'.format(tmpdir.name))
repo = pygit2.init_repository(tmpdir.name, True)
|
Add initial bare repository generation.import tempfile
import pygit2
tmpdir = tempfile.TemporaryDirectory(suffix='.git', prefix='branchandcommit-')
print('Creating bare repository: {}'.format(tmpdir.name))
repo = pygit2.init_repository(tmpdir.name, True)
|
<commit_before><commit_msg>Add initial bare repository generation.<commit_after>import tempfile
import pygit2
tmpdir = tempfile.TemporaryDirectory(suffix='.git', prefix='branchandcommit-')
print('Creating bare repository: {}'.format(tmpdir.name))
repo = pygit2.init_repository(tmpdir.name, True)
|
|
88102b625d239d3c35187d6b398cf0f1a8171015
|
pajbot/migration_revisions/db/0006_create_user_rank_materialized_view.py
|
pajbot/migration_revisions/db/0006_create_user_rank_materialized_view.py
|
def up(cursor, bot):
cursor.execute(
"""
CREATE MATERIALIZED VIEW user_rank AS (
SELECT
id as user_id,
RANK() OVER (ORDER BY points DESC) points_rank,
RANK() OVER (ORDER BY num_lines DESC) num_lines_rank
FROM "user"
)
"""
)
cursor.execute("CREATE UNIQUE INDEX ON user_rank(user_id)")
|
Add migration to create user_rank materialized view
|
Add migration to create user_rank materialized view
|
Python
|
mit
|
pajlada/tyggbot,pajlada/pajbot,pajlada/pajbot,pajlada/tyggbot,pajlada/tyggbot,pajlada/pajbot,pajlada/tyggbot,pajlada/pajbot
|
Add migration to create user_rank materialized view
|
def up(cursor, bot):
cursor.execute(
"""
CREATE MATERIALIZED VIEW user_rank AS (
SELECT
id as user_id,
RANK() OVER (ORDER BY points DESC) points_rank,
RANK() OVER (ORDER BY num_lines DESC) num_lines_rank
FROM "user"
)
"""
)
cursor.execute("CREATE UNIQUE INDEX ON user_rank(user_id)")
|
<commit_before><commit_msg>Add migration to create user_rank materialized view<commit_after>
|
def up(cursor, bot):
cursor.execute(
"""
CREATE MATERIALIZED VIEW user_rank AS (
SELECT
id as user_id,
RANK() OVER (ORDER BY points DESC) points_rank,
RANK() OVER (ORDER BY num_lines DESC) num_lines_rank
FROM "user"
)
"""
)
cursor.execute("CREATE UNIQUE INDEX ON user_rank(user_id)")
|
Add migration to create user_rank materialized viewdef up(cursor, bot):
cursor.execute(
"""
CREATE MATERIALIZED VIEW user_rank AS (
SELECT
id as user_id,
RANK() OVER (ORDER BY points DESC) points_rank,
RANK() OVER (ORDER BY num_lines DESC) num_lines_rank
FROM "user"
)
"""
)
cursor.execute("CREATE UNIQUE INDEX ON user_rank(user_id)")
|
<commit_before><commit_msg>Add migration to create user_rank materialized view<commit_after>def up(cursor, bot):
cursor.execute(
"""
CREATE MATERIALIZED VIEW user_rank AS (
SELECT
id as user_id,
RANK() OVER (ORDER BY points DESC) points_rank,
RANK() OVER (ORDER BY num_lines DESC) num_lines_rank
FROM "user"
)
"""
)
cursor.execute("CREATE UNIQUE INDEX ON user_rank(user_id)")
|
|
2cc2a1318da0980911dd0cd0868efb8fad4dd1d8
|
python/ctci-is-binary-search-tree.py
|
python/ctci-is-binary-search-tree.py
|
""" Node is defined as
class node:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
"""
import sys
def checkBST(root):
return validBST(root)
def validBST(node):
if emptyNode(node):
return True
if not validNode(node):
return False
else:
return validBST(node.left) and validBST(node.right)
def validNode(node):
return validLeftSubtree(node) and validRightSubtree(node)
def validLeftSubtree(node):
return node.data > getMax(node.left)
def validRightSubtree(node):
return node.data < getMin(node.right)
def getMax(node):
if emptyNode(node):
return -sys.maxsize - 1
return max(node.data, getMax(node.left), getMax(node.right))
def getMin(node):
if emptyNode(node):
return sys.maxsize
return min(node.data, getMin(node.left), getMin(node.right))
def emptyNode(node):
if node is None:
return True
|
Solve is binary search tree
|
Solve is binary search tree
|
Python
|
mit
|
rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank
|
Solve is binary search tree
|
""" Node is defined as
class node:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
"""
import sys
def checkBST(root):
return validBST(root)
def validBST(node):
if emptyNode(node):
return True
if not validNode(node):
return False
else:
return validBST(node.left) and validBST(node.right)
def validNode(node):
return validLeftSubtree(node) and validRightSubtree(node)
def validLeftSubtree(node):
return node.data > getMax(node.left)
def validRightSubtree(node):
return node.data < getMin(node.right)
def getMax(node):
if emptyNode(node):
return -sys.maxsize - 1
return max(node.data, getMax(node.left), getMax(node.right))
def getMin(node):
if emptyNode(node):
return sys.maxsize
return min(node.data, getMin(node.left), getMin(node.right))
def emptyNode(node):
if node is None:
return True
|
<commit_before><commit_msg>Solve is binary search tree<commit_after>
|
""" Node is defined as
class node:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
"""
import sys
def checkBST(root):
return validBST(root)
def validBST(node):
if emptyNode(node):
return True
if not validNode(node):
return False
else:
return validBST(node.left) and validBST(node.right)
def validNode(node):
return validLeftSubtree(node) and validRightSubtree(node)
def validLeftSubtree(node):
return node.data > getMax(node.left)
def validRightSubtree(node):
return node.data < getMin(node.right)
def getMax(node):
if emptyNode(node):
return -sys.maxsize - 1
return max(node.data, getMax(node.left), getMax(node.right))
def getMin(node):
if emptyNode(node):
return sys.maxsize
return min(node.data, getMin(node.left), getMin(node.right))
def emptyNode(node):
if node is None:
return True
|
Solve is binary search tree""" Node is defined as
class node:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
"""
import sys
def checkBST(root):
return validBST(root)
def validBST(node):
if emptyNode(node):
return True
if not validNode(node):
return False
else:
return validBST(node.left) and validBST(node.right)
def validNode(node):
return validLeftSubtree(node) and validRightSubtree(node)
def validLeftSubtree(node):
return node.data > getMax(node.left)
def validRightSubtree(node):
return node.data < getMin(node.right)
def getMax(node):
if emptyNode(node):
return -sys.maxsize - 1
return max(node.data, getMax(node.left), getMax(node.right))
def getMin(node):
if emptyNode(node):
return sys.maxsize
return min(node.data, getMin(node.left), getMin(node.right))
def emptyNode(node):
if node is None:
return True
|
<commit_before><commit_msg>Solve is binary search tree<commit_after>""" Node is defined as
class node:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
"""
import sys
def checkBST(root):
return validBST(root)
def validBST(node):
if emptyNode(node):
return True
if not validNode(node):
return False
else:
return validBST(node.left) and validBST(node.right)
def validNode(node):
return validLeftSubtree(node) and validRightSubtree(node)
def validLeftSubtree(node):
return node.data > getMax(node.left)
def validRightSubtree(node):
return node.data < getMin(node.right)
def getMax(node):
if emptyNode(node):
return -sys.maxsize - 1
return max(node.data, getMax(node.left), getMax(node.right))
def getMin(node):
if emptyNode(node):
return sys.maxsize
return min(node.data, getMin(node.left), getMin(node.right))
def emptyNode(node):
if node is None:
return True
|
|
41055e52ee2f626c82dafa10c5df1cd90adf288a
|
python/qisrc/test/test_qisrc_diff.py
|
python/qisrc/test/test_qisrc_diff.py
|
## Copyright (c) 2012-2015 Aldebaran Robotics. All rights reserved.
## Use of this source code is governed by a BSD-style license that can be
## found in the COPYING file.
import qisrc.git
def test_simple(git_server, qisrc_action, record_messages):
git_server.create_repo("foo.git")
git_server.switch_manifest_branch("devel")
git_server.change_branch("foo.git", "devel")
git_server.push_file("foo.git", "devel",
"this is devel\n", branch="devel",
message="start developing")
qisrc_action("init", git_server.manifest_url, "--branch", "devel")
record_messages.reset()
qisrc_action("diff", "--all", "master")
assert record_messages.find("devel | 1 +")
|
Add a test for `qisrc diff`
|
Add a test for `qisrc diff`
Change-Id: I32fe0d298dd8bced991c3b314ed543614266cc24
Reviewed-on: http://gerrit.aldebaran.lan/60721
Tested-by: gerrit
Reviewed-by: vbarbaresi <371b46c96c99af52f4f920034e4fcb63ece5bdb5@aldebaran-robotics.com>
|
Python
|
bsd-3-clause
|
aldebaran/qibuild,aldebaran/qibuild,aldebaran/qibuild,aldebaran/qibuild
|
Add a test for `qisrc diff`
Change-Id: I32fe0d298dd8bced991c3b314ed543614266cc24
Reviewed-on: http://gerrit.aldebaran.lan/60721
Tested-by: gerrit
Reviewed-by: vbarbaresi <371b46c96c99af52f4f920034e4fcb63ece5bdb5@aldebaran-robotics.com>
|
## Copyright (c) 2012-2015 Aldebaran Robotics. All rights reserved.
## Use of this source code is governed by a BSD-style license that can be
## found in the COPYING file.
import qisrc.git
def test_simple(git_server, qisrc_action, record_messages):
git_server.create_repo("foo.git")
git_server.switch_manifest_branch("devel")
git_server.change_branch("foo.git", "devel")
git_server.push_file("foo.git", "devel",
"this is devel\n", branch="devel",
message="start developing")
qisrc_action("init", git_server.manifest_url, "--branch", "devel")
record_messages.reset()
qisrc_action("diff", "--all", "master")
assert record_messages.find("devel | 1 +")
|
<commit_before><commit_msg>Add a test for `qisrc diff`
Change-Id: I32fe0d298dd8bced991c3b314ed543614266cc24
Reviewed-on: http://gerrit.aldebaran.lan/60721
Tested-by: gerrit
Reviewed-by: vbarbaresi <371b46c96c99af52f4f920034e4fcb63ece5bdb5@aldebaran-robotics.com><commit_after>
|
## Copyright (c) 2012-2015 Aldebaran Robotics. All rights reserved.
## Use of this source code is governed by a BSD-style license that can be
## found in the COPYING file.
import qisrc.git
def test_simple(git_server, qisrc_action, record_messages):
git_server.create_repo("foo.git")
git_server.switch_manifest_branch("devel")
git_server.change_branch("foo.git", "devel")
git_server.push_file("foo.git", "devel",
"this is devel\n", branch="devel",
message="start developing")
qisrc_action("init", git_server.manifest_url, "--branch", "devel")
record_messages.reset()
qisrc_action("diff", "--all", "master")
assert record_messages.find("devel | 1 +")
|
Add a test for `qisrc diff`
Change-Id: I32fe0d298dd8bced991c3b314ed543614266cc24
Reviewed-on: http://gerrit.aldebaran.lan/60721
Tested-by: gerrit
Reviewed-by: vbarbaresi <371b46c96c99af52f4f920034e4fcb63ece5bdb5@aldebaran-robotics.com>## Copyright (c) 2012-2015 Aldebaran Robotics. All rights reserved.
## Use of this source code is governed by a BSD-style license that can be
## found in the COPYING file.
import qisrc.git
def test_simple(git_server, qisrc_action, record_messages):
git_server.create_repo("foo.git")
git_server.switch_manifest_branch("devel")
git_server.change_branch("foo.git", "devel")
git_server.push_file("foo.git", "devel",
"this is devel\n", branch="devel",
message="start developing")
qisrc_action("init", git_server.manifest_url, "--branch", "devel")
record_messages.reset()
qisrc_action("diff", "--all", "master")
assert record_messages.find("devel | 1 +")
|
<commit_before><commit_msg>Add a test for `qisrc diff`
Change-Id: I32fe0d298dd8bced991c3b314ed543614266cc24
Reviewed-on: http://gerrit.aldebaran.lan/60721
Tested-by: gerrit
Reviewed-by: vbarbaresi <371b46c96c99af52f4f920034e4fcb63ece5bdb5@aldebaran-robotics.com><commit_after>## Copyright (c) 2012-2015 Aldebaran Robotics. All rights reserved.
## Use of this source code is governed by a BSD-style license that can be
## found in the COPYING file.
import qisrc.git
def test_simple(git_server, qisrc_action, record_messages):
git_server.create_repo("foo.git")
git_server.switch_manifest_branch("devel")
git_server.change_branch("foo.git", "devel")
git_server.push_file("foo.git", "devel",
"this is devel\n", branch="devel",
message="start developing")
qisrc_action("init", git_server.manifest_url, "--branch", "devel")
record_messages.reset()
qisrc_action("diff", "--all", "master")
assert record_messages.find("devel | 1 +")
|
|
96b902865cd06f578d7dc3f23f90168b7e8ba7c8
|
txircd/modules/rfc/umode_s.py
|
txircd/modules/rfc/umode_s.py
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ircLower, ModeType, timestamp
from zope.interface import implements
from weakref import WeakSet
irc.RPL_LISTMODE = "728" # Made up, based on freenode's quiet lists
irc.RPL_ENDOFLISTMODE = "729" # Made up, based on freenode's quiet lists
irc.ERR_INVALIDSNOTYPE = "985" # Made up, is not used by any IRCd
class ServerNoticeMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "ServerNoticeMode"
core = True
subscribeLists = {}
def hookIRCd(self, ircd):
self.ircd = ircd
def userModes(self):
return [ ("s", ModeType.List, self) ]
def actions(self):
return [ ("modepermission-user-s", 1, self.checkModePermission),
("sendservernotice", 1, self.sendServerNotice) ]
def checkModePermission(self, user, settingUser, adding, param):
if adding and not self.ircd.runActionUntilValue("userhasoperpermission", user, "servernotice", users=[user]):
user.sendMessage(irc.ERR_NOPRIVILEGES, ":Permission denied - You do not have the correct operator privileges")
return False
return None
def sendServerNotice(self, snodata):
mask = snodata["mask"]
if mask in self.subscribeLists:
for u in self.subscribeLists[mask]:
u.sendMessage("NOTICE", ":*** {}".format(snodata["message"]))
def checkSet(self, user, param):
params = param.split(",")
validparams = []
for par in params:
if self.ircd.runActionUntilTrue("servernoticetype", user, par):
mask = ircLower(par)
if mask not in self.subscribeLists:
self.subscribeLists[mask] = WeakSet()
if user not in self.subscribeLists[mask] and user.uuid[:3] == self.ircd.serverID:
self.subscribeLists[mask].add(user)
validparams.append(mask)
else:
user.sendMessage(irc.ERR_INVALIDSNOTYPE, par, ":Invalid server notice type")
return validparams
def checkUnset(self, user, param):
params = param.split(",")
validparams = []
for par in params:
mask = ircLower(par)
if mask in self.subscribeLists and user in self.subscribeLists[mask]:
self.subscribeLists[mask].remove(user)
validparams.append(mask)
return validparams
def showListParams(self, user, target):
if "s" in target.modes:
for mask in target.modes["s"]:
target.sendMessage(irc.RPL_LISTMODE, "s", mask[0], mask[1], str(timestamp(mask[2])))
target.sendMessage(irc.RPL_ENDOFLISTMODE, ":End of server notice type list")
snoMode = ServerNoticeMode()
|
Implement the server notice mechanism
|
Implement the server notice mechanism
|
Python
|
bsd-3-clause
|
ElementalAlchemist/txircd,Heufneutje/txircd
|
Implement the server notice mechanism
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ircLower, ModeType, timestamp
from zope.interface import implements
from weakref import WeakSet
irc.RPL_LISTMODE = "728" # Made up, based on freenode's quiet lists
irc.RPL_ENDOFLISTMODE = "729" # Made up, based on freenode's quiet lists
irc.ERR_INVALIDSNOTYPE = "985" # Made up, is not used by any IRCd
class ServerNoticeMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "ServerNoticeMode"
core = True
subscribeLists = {}
def hookIRCd(self, ircd):
self.ircd = ircd
def userModes(self):
return [ ("s", ModeType.List, self) ]
def actions(self):
return [ ("modepermission-user-s", 1, self.checkModePermission),
("sendservernotice", 1, self.sendServerNotice) ]
def checkModePermission(self, user, settingUser, adding, param):
if adding and not self.ircd.runActionUntilValue("userhasoperpermission", user, "servernotice", users=[user]):
user.sendMessage(irc.ERR_NOPRIVILEGES, ":Permission denied - You do not have the correct operator privileges")
return False
return None
def sendServerNotice(self, snodata):
mask = snodata["mask"]
if mask in self.subscribeLists:
for u in self.subscribeLists[mask]:
u.sendMessage("NOTICE", ":*** {}".format(snodata["message"]))
def checkSet(self, user, param):
params = param.split(",")
validparams = []
for par in params:
if self.ircd.runActionUntilTrue("servernoticetype", user, par):
mask = ircLower(par)
if mask not in self.subscribeLists:
self.subscribeLists[mask] = WeakSet()
if user not in self.subscribeLists[mask] and user.uuid[:3] == self.ircd.serverID:
self.subscribeLists[mask].add(user)
validparams.append(mask)
else:
user.sendMessage(irc.ERR_INVALIDSNOTYPE, par, ":Invalid server notice type")
return validparams
def checkUnset(self, user, param):
params = param.split(",")
validparams = []
for par in params:
mask = ircLower(par)
if mask in self.subscribeLists and user in self.subscribeLists[mask]:
self.subscribeLists[mask].remove(user)
validparams.append(mask)
return validparams
def showListParams(self, user, target):
if "s" in target.modes:
for mask in target.modes["s"]:
target.sendMessage(irc.RPL_LISTMODE, "s", mask[0], mask[1], str(timestamp(mask[2])))
target.sendMessage(irc.RPL_ENDOFLISTMODE, ":End of server notice type list")
snoMode = ServerNoticeMode()
|
<commit_before><commit_msg>Implement the server notice mechanism<commit_after>
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ircLower, ModeType, timestamp
from zope.interface import implements
from weakref import WeakSet
irc.RPL_LISTMODE = "728" # Made up, based on freenode's quiet lists
irc.RPL_ENDOFLISTMODE = "729" # Made up, based on freenode's quiet lists
irc.ERR_INVALIDSNOTYPE = "985" # Made up, is not used by any IRCd
class ServerNoticeMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "ServerNoticeMode"
core = True
subscribeLists = {}
def hookIRCd(self, ircd):
self.ircd = ircd
def userModes(self):
return [ ("s", ModeType.List, self) ]
def actions(self):
return [ ("modepermission-user-s", 1, self.checkModePermission),
("sendservernotice", 1, self.sendServerNotice) ]
def checkModePermission(self, user, settingUser, adding, param):
if adding and not self.ircd.runActionUntilValue("userhasoperpermission", user, "servernotice", users=[user]):
user.sendMessage(irc.ERR_NOPRIVILEGES, ":Permission denied - You do not have the correct operator privileges")
return False
return None
def sendServerNotice(self, snodata):
mask = snodata["mask"]
if mask in self.subscribeLists:
for u in self.subscribeLists[mask]:
u.sendMessage("NOTICE", ":*** {}".format(snodata["message"]))
def checkSet(self, user, param):
params = param.split(",")
validparams = []
for par in params:
if self.ircd.runActionUntilTrue("servernoticetype", user, par):
mask = ircLower(par)
if mask not in self.subscribeLists:
self.subscribeLists[mask] = WeakSet()
if user not in self.subscribeLists[mask] and user.uuid[:3] == self.ircd.serverID:
self.subscribeLists[mask].add(user)
validparams.append(mask)
else:
user.sendMessage(irc.ERR_INVALIDSNOTYPE, par, ":Invalid server notice type")
return validparams
def checkUnset(self, user, param):
params = param.split(",")
validparams = []
for par in params:
mask = ircLower(par)
if mask in self.subscribeLists and user in self.subscribeLists[mask]:
self.subscribeLists[mask].remove(user)
validparams.append(mask)
return validparams
def showListParams(self, user, target):
if "s" in target.modes:
for mask in target.modes["s"]:
target.sendMessage(irc.RPL_LISTMODE, "s", mask[0], mask[1], str(timestamp(mask[2])))
target.sendMessage(irc.RPL_ENDOFLISTMODE, ":End of server notice type list")
snoMode = ServerNoticeMode()
|
Implement the server notice mechanismfrom twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ircLower, ModeType, timestamp
from zope.interface import implements
from weakref import WeakSet
irc.RPL_LISTMODE = "728" # Made up, based on freenode's quiet lists
irc.RPL_ENDOFLISTMODE = "729" # Made up, based on freenode's quiet lists
irc.ERR_INVALIDSNOTYPE = "985" # Made up, is not used by any IRCd
class ServerNoticeMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "ServerNoticeMode"
core = True
subscribeLists = {}
def hookIRCd(self, ircd):
self.ircd = ircd
def userModes(self):
return [ ("s", ModeType.List, self) ]
def actions(self):
return [ ("modepermission-user-s", 1, self.checkModePermission),
("sendservernotice", 1, self.sendServerNotice) ]
def checkModePermission(self, user, settingUser, adding, param):
if adding and not self.ircd.runActionUntilValue("userhasoperpermission", user, "servernotice", users=[user]):
user.sendMessage(irc.ERR_NOPRIVILEGES, ":Permission denied - You do not have the correct operator privileges")
return False
return None
def sendServerNotice(self, snodata):
mask = snodata["mask"]
if mask in self.subscribeLists:
for u in self.subscribeLists[mask]:
u.sendMessage("NOTICE", ":*** {}".format(snodata["message"]))
def checkSet(self, user, param):
params = param.split(",")
validparams = []
for par in params:
if self.ircd.runActionUntilTrue("servernoticetype", user, par):
mask = ircLower(par)
if mask not in self.subscribeLists:
self.subscribeLists[mask] = WeakSet()
if user not in self.subscribeLists[mask] and user.uuid[:3] == self.ircd.serverID:
self.subscribeLists[mask].add(user)
validparams.append(mask)
else:
user.sendMessage(irc.ERR_INVALIDSNOTYPE, par, ":Invalid server notice type")
return validparams
def checkUnset(self, user, param):
params = param.split(",")
validparams = []
for par in params:
mask = ircLower(par)
if mask in self.subscribeLists and user in self.subscribeLists[mask]:
self.subscribeLists[mask].remove(user)
validparams.append(mask)
return validparams
def showListParams(self, user, target):
if "s" in target.modes:
for mask in target.modes["s"]:
target.sendMessage(irc.RPL_LISTMODE, "s", mask[0], mask[1], str(timestamp(mask[2])))
target.sendMessage(irc.RPL_ENDOFLISTMODE, ":End of server notice type list")
snoMode = ServerNoticeMode()
|
<commit_before><commit_msg>Implement the server notice mechanism<commit_after>from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ircLower, ModeType, timestamp
from zope.interface import implements
from weakref import WeakSet
irc.RPL_LISTMODE = "728" # Made up, based on freenode's quiet lists
irc.RPL_ENDOFLISTMODE = "729" # Made up, based on freenode's quiet lists
irc.ERR_INVALIDSNOTYPE = "985" # Made up, is not used by any IRCd
class ServerNoticeMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "ServerNoticeMode"
core = True
subscribeLists = {}
def hookIRCd(self, ircd):
self.ircd = ircd
def userModes(self):
return [ ("s", ModeType.List, self) ]
def actions(self):
return [ ("modepermission-user-s", 1, self.checkModePermission),
("sendservernotice", 1, self.sendServerNotice) ]
def checkModePermission(self, user, settingUser, adding, param):
if adding and not self.ircd.runActionUntilValue("userhasoperpermission", user, "servernotice", users=[user]):
user.sendMessage(irc.ERR_NOPRIVILEGES, ":Permission denied - You do not have the correct operator privileges")
return False
return None
def sendServerNotice(self, snodata):
mask = snodata["mask"]
if mask in self.subscribeLists:
for u in self.subscribeLists[mask]:
u.sendMessage("NOTICE", ":*** {}".format(snodata["message"]))
def checkSet(self, user, param):
params = param.split(",")
validparams = []
for par in params:
if self.ircd.runActionUntilTrue("servernoticetype", user, par):
mask = ircLower(par)
if mask not in self.subscribeLists:
self.subscribeLists[mask] = WeakSet()
if user not in self.subscribeLists[mask] and user.uuid[:3] == self.ircd.serverID:
self.subscribeLists[mask].add(user)
validparams.append(mask)
else:
user.sendMessage(irc.ERR_INVALIDSNOTYPE, par, ":Invalid server notice type")
return validparams
def checkUnset(self, user, param):
params = param.split(",")
validparams = []
for par in params:
mask = ircLower(par)
if mask in self.subscribeLists and user in self.subscribeLists[mask]:
self.subscribeLists[mask].remove(user)
validparams.append(mask)
return validparams
def showListParams(self, user, target):
if "s" in target.modes:
for mask in target.modes["s"]:
target.sendMessage(irc.RPL_LISTMODE, "s", mask[0], mask[1], str(timestamp(mask[2])))
target.sendMessage(irc.RPL_ENDOFLISTMODE, ":End of server notice type list")
snoMode = ServerNoticeMode()
|
|
af4fe82552b894073e3f4c7fae814deb80487b3c
|
api_v3/migrations/0010_drop_profile_not_null.py
|
api_v3/migrations/0010_drop_profile_not_null.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-06-15 6:12
from __future__ import unicode_literals
from django.db import migrations, connection
class Migration(migrations.Migration):
dependencies = [
('api_v3', '0009_added_subscriber'),
]
COLUMNS_SQL = """
SELECT column_name FROM information_schema.columns
WHERE table_name = 'accounts_profile' AND
column_name IN
('user_created', 'requester_type', 'organization', 'last_login')
AND is_nullable = 'NO';
"""
DROP_NOT_NULL_SQL = """
ALTER TABLE accounts_profile ALTER COLUMN {} DROP NOT NULL;
"""
operations = []
with connection.cursor() as cursor:
cursor.execute(COLUMNS_SQL)
cols = cursor.fetchall()
for c in cols:
operations.append(migrations.RunSQL(DROP_NOT_NULL_SQL.format(c[0])))
|
Reset `not null` on some legacy fields.
|
Reset `not null` on some legacy fields.
|
Python
|
mit
|
occrp/id-backend
|
Reset `not null` on some legacy fields.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-06-15 6:12
from __future__ import unicode_literals
from django.db import migrations, connection
class Migration(migrations.Migration):
dependencies = [
('api_v3', '0009_added_subscriber'),
]
COLUMNS_SQL = """
SELECT column_name FROM information_schema.columns
WHERE table_name = 'accounts_profile' AND
column_name IN
('user_created', 'requester_type', 'organization', 'last_login')
AND is_nullable = 'NO';
"""
DROP_NOT_NULL_SQL = """
ALTER TABLE accounts_profile ALTER COLUMN {} DROP NOT NULL;
"""
operations = []
with connection.cursor() as cursor:
cursor.execute(COLUMNS_SQL)
cols = cursor.fetchall()
for c in cols:
operations.append(migrations.RunSQL(DROP_NOT_NULL_SQL.format(c[0])))
|
<commit_before><commit_msg>Reset `not null` on some legacy fields.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-06-15 6:12
from __future__ import unicode_literals
from django.db import migrations, connection
class Migration(migrations.Migration):
dependencies = [
('api_v3', '0009_added_subscriber'),
]
COLUMNS_SQL = """
SELECT column_name FROM information_schema.columns
WHERE table_name = 'accounts_profile' AND
column_name IN
('user_created', 'requester_type', 'organization', 'last_login')
AND is_nullable = 'NO';
"""
DROP_NOT_NULL_SQL = """
ALTER TABLE accounts_profile ALTER COLUMN {} DROP NOT NULL;
"""
operations = []
with connection.cursor() as cursor:
cursor.execute(COLUMNS_SQL)
cols = cursor.fetchall()
for c in cols:
operations.append(migrations.RunSQL(DROP_NOT_NULL_SQL.format(c[0])))
|
Reset `not null` on some legacy fields.# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-06-15 6:12
from __future__ import unicode_literals
from django.db import migrations, connection
class Migration(migrations.Migration):
dependencies = [
('api_v3', '0009_added_subscriber'),
]
COLUMNS_SQL = """
SELECT column_name FROM information_schema.columns
WHERE table_name = 'accounts_profile' AND
column_name IN
('user_created', 'requester_type', 'organization', 'last_login')
AND is_nullable = 'NO';
"""
DROP_NOT_NULL_SQL = """
ALTER TABLE accounts_profile ALTER COLUMN {} DROP NOT NULL;
"""
operations = []
with connection.cursor() as cursor:
cursor.execute(COLUMNS_SQL)
cols = cursor.fetchall()
for c in cols:
operations.append(migrations.RunSQL(DROP_NOT_NULL_SQL.format(c[0])))
|
<commit_before><commit_msg>Reset `not null` on some legacy fields.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-06-15 6:12
from __future__ import unicode_literals
from django.db import migrations, connection
class Migration(migrations.Migration):
dependencies = [
('api_v3', '0009_added_subscriber'),
]
COLUMNS_SQL = """
SELECT column_name FROM information_schema.columns
WHERE table_name = 'accounts_profile' AND
column_name IN
('user_created', 'requester_type', 'organization', 'last_login')
AND is_nullable = 'NO';
"""
DROP_NOT_NULL_SQL = """
ALTER TABLE accounts_profile ALTER COLUMN {} DROP NOT NULL;
"""
operations = []
with connection.cursor() as cursor:
cursor.execute(COLUMNS_SQL)
cols = cursor.fetchall()
for c in cols:
operations.append(migrations.RunSQL(DROP_NOT_NULL_SQL.format(c[0])))
|
|
b077a5dba84cbc7b3fd37e276be1635301c79d99
|
website/core/settings/loc.py
|
website/core/settings/loc.py
|
"""Local settings and globals."""
import sys
from os.path import normpath, join
from .base import *
# Import secrets
sys.path.append(
abspath(join(PROJECT_ROOT, '../secrets/storymapjs/stg'))
)
try:
from secrets import *
except ImportError, e:
print 'WARNING: Could not import project secrets (%s). You will not be able to deploy.' % (e)
# Set static URL
STATIC_URL = '/static'
|
"""Local settings and globals."""
import sys
from os.path import normpath, join
from .base import *
# Import secrets (no project-specific secrets)
#sys.path.append(
# abspath(join(PROJECT_ROOT, '../secrets/storymapjs/stg'))
#)
#try:
# from secrets import *
#except ImportError, e:
# print 'WARNING: Could not import project secrets (%s). You will not be able to deploy.' % (e)
# Set static URL
STATIC_URL = '/static'
|
Comment out project-specific secrets import warning (not needed)
|
Comment out project-specific secrets import warning (not needed)
|
Python
|
mpl-2.0
|
cr3ative/StoryMapJS,wangjun/StoryMapJS,BenHeubl/StoryMapJS,BenHeubl/StoryMapJS,wangjun/StoryMapJS,cr3ative/StoryMapJS,miguelpaz/StoryMapJS,jorol/StoryMapJS,miguelpaz/StoryMapJS,jorol/StoryMapJS,1modm/StoryMapJS,cr3ative/StoryMapJS,makinacorpus/StoryMapJS,wangjun/StoryMapJS,miguelpaz/StoryMapJS,1modm/StoryMapJS,cr3ative/StoryMapJS,makinacorpus/StoryMapJS,BenHeubl/StoryMapJS,wangjun/StoryMapJS,jorol/StoryMapJS,jorol/StoryMapJS,BenHeubl/StoryMapJS,1modm/StoryMapJS,makinacorpus/StoryMapJS,1modm/StoryMapJS
|
"""Local settings and globals."""
import sys
from os.path import normpath, join
from .base import *
# Import secrets
sys.path.append(
abspath(join(PROJECT_ROOT, '../secrets/storymapjs/stg'))
)
try:
from secrets import *
except ImportError, e:
print 'WARNING: Could not import project secrets (%s). You will not be able to deploy.' % (e)
# Set static URL
STATIC_URL = '/static'Comment out project-specific secrets import warning (not needed)
|
"""Local settings and globals."""
import sys
from os.path import normpath, join
from .base import *
# Import secrets (no project-specific secrets)
#sys.path.append(
# abspath(join(PROJECT_ROOT, '../secrets/storymapjs/stg'))
#)
#try:
# from secrets import *
#except ImportError, e:
# print 'WARNING: Could not import project secrets (%s). You will not be able to deploy.' % (e)
# Set static URL
STATIC_URL = '/static'
|
<commit_before>"""Local settings and globals."""
import sys
from os.path import normpath, join
from .base import *
# Import secrets
sys.path.append(
abspath(join(PROJECT_ROOT, '../secrets/storymapjs/stg'))
)
try:
from secrets import *
except ImportError, e:
print 'WARNING: Could not import project secrets (%s). You will not be able to deploy.' % (e)
# Set static URL
STATIC_URL = '/static'<commit_msg>Comment out project-specific secrets import warning (not needed)<commit_after>
|
"""Local settings and globals."""
import sys
from os.path import normpath, join
from .base import *
# Import secrets (no project-specific secrets)
#sys.path.append(
# abspath(join(PROJECT_ROOT, '../secrets/storymapjs/stg'))
#)
#try:
# from secrets import *
#except ImportError, e:
# print 'WARNING: Could not import project secrets (%s). You will not be able to deploy.' % (e)
# Set static URL
STATIC_URL = '/static'
|
"""Local settings and globals."""
import sys
from os.path import normpath, join
from .base import *
# Import secrets
sys.path.append(
abspath(join(PROJECT_ROOT, '../secrets/storymapjs/stg'))
)
try:
from secrets import *
except ImportError, e:
print 'WARNING: Could not import project secrets (%s). You will not be able to deploy.' % (e)
# Set static URL
STATIC_URL = '/static'Comment out project-specific secrets import warning (not needed)"""Local settings and globals."""
import sys
from os.path import normpath, join
from .base import *
# Import secrets (no project-specific secrets)
#sys.path.append(
# abspath(join(PROJECT_ROOT, '../secrets/storymapjs/stg'))
#)
#try:
# from secrets import *
#except ImportError, e:
# print 'WARNING: Could not import project secrets (%s). You will not be able to deploy.' % (e)
# Set static URL
STATIC_URL = '/static'
|
<commit_before>"""Local settings and globals."""
import sys
from os.path import normpath, join
from .base import *
# Import secrets
sys.path.append(
abspath(join(PROJECT_ROOT, '../secrets/storymapjs/stg'))
)
try:
from secrets import *
except ImportError, e:
print 'WARNING: Could not import project secrets (%s). You will not be able to deploy.' % (e)
# Set static URL
STATIC_URL = '/static'<commit_msg>Comment out project-specific secrets import warning (not needed)<commit_after>"""Local settings and globals."""
import sys
from os.path import normpath, join
from .base import *
# Import secrets (no project-specific secrets)
#sys.path.append(
# abspath(join(PROJECT_ROOT, '../secrets/storymapjs/stg'))
#)
#try:
# from secrets import *
#except ImportError, e:
# print 'WARNING: Could not import project secrets (%s). You will not be able to deploy.' % (e)
# Set static URL
STATIC_URL = '/static'
|
b810e2b7cdd612b1dba522e36241a472d59376ca
|
scripts/convert_landmarks_example.py
|
scripts/convert_landmarks_example.py
|
#!/usr/bin/env python3
class Landmarks:
args = 'name', 'shortname', 'points', 'query', 'hashtags', 'phrase', 'is_area', 'query_suffix'
def __init__(self, query_suffix=None):
self.query_suffix = query_suffix
self.landmarks = []
def add(self, *args, **kwargs):
dictionary = {self.args[num]: arg for num, arg in enumerate(args)}
dictionary.update(kwargs)
self.landmarks.append(dictionary)
def print_config(self):
print('Replace your old Landmarks config with the following:\n')
if self.query_suffix:
print("QUERY_SUFFIX = '{}'".format(self.query_suffix))
print('LANDMARKS =', tuple(self.landmarks))
### replace example below with your own old-style landmarks config ###
LANDMARKS = Landmarks(query_suffix='Salt Lake City')
LANDMARKS.add('Rice Eccles Stadium', hashtags={'Utes'})
LANDMARKS.add('the Salt Lake Temple', hashtags={'TempleSquare'})
LANDMARKS.add('City Creek Center', points=((40.769210, -111.893901), (40.767231, -111.888275)), hashtags={'CityCreek'})
LANDMARKS.add('the State Capitol', query='Utah State Capitol Building')
LANDMARKS.add('the University of Utah', hashtags={'Utes'}, phrase='at', is_area=True)
LANDMARKS.add('Yalecrest', points=((40.750263, -111.836502), (40.750377, -111.851108), (40.751515, -111.853833), (40.741212, -111.853909), (40.741188, -111.836519)), is_area=True)
### replace example above with your own old-style landmarks config ###
LANDMARKS.print_config()
|
Add a script for converting landmarks config
|
Add a script for converting landmarks config
Add an example script for converting old-style landmarks configurations
(constructing an actual Landmarks instance in config.py) with the
new-style of using standard Python data-types (a tuple/list of dicts).
|
Python
|
mit
|
sebast1219/Monocle,sebast1219/Monocle,sebast1219/Monocle
|
Add a script for converting landmarks config
Add an example script for converting old-style landmarks configurations
(constructing an actual Landmarks instance in config.py) with the
new-style of using standard Python data-types (a tuple/list of dicts).
|
#!/usr/bin/env python3
class Landmarks:
args = 'name', 'shortname', 'points', 'query', 'hashtags', 'phrase', 'is_area', 'query_suffix'
def __init__(self, query_suffix=None):
self.query_suffix = query_suffix
self.landmarks = []
def add(self, *args, **kwargs):
dictionary = {self.args[num]: arg for num, arg in enumerate(args)}
dictionary.update(kwargs)
self.landmarks.append(dictionary)
def print_config(self):
print('Replace your old Landmarks config with the following:\n')
if self.query_suffix:
print("QUERY_SUFFIX = '{}'".format(self.query_suffix))
print('LANDMARKS =', tuple(self.landmarks))
### replace example below with your own old-style landmarks config ###
LANDMARKS = Landmarks(query_suffix='Salt Lake City')
LANDMARKS.add('Rice Eccles Stadium', hashtags={'Utes'})
LANDMARKS.add('the Salt Lake Temple', hashtags={'TempleSquare'})
LANDMARKS.add('City Creek Center', points=((40.769210, -111.893901), (40.767231, -111.888275)), hashtags={'CityCreek'})
LANDMARKS.add('the State Capitol', query='Utah State Capitol Building')
LANDMARKS.add('the University of Utah', hashtags={'Utes'}, phrase='at', is_area=True)
LANDMARKS.add('Yalecrest', points=((40.750263, -111.836502), (40.750377, -111.851108), (40.751515, -111.853833), (40.741212, -111.853909), (40.741188, -111.836519)), is_area=True)
### replace example above with your own old-style landmarks config ###
LANDMARKS.print_config()
|
<commit_before><commit_msg>Add a script for converting landmarks config
Add an example script for converting old-style landmarks configurations
(constructing an actual Landmarks instance in config.py) with the
new-style of using standard Python data-types (a tuple/list of dicts).<commit_after>
|
#!/usr/bin/env python3
class Landmarks:
args = 'name', 'shortname', 'points', 'query', 'hashtags', 'phrase', 'is_area', 'query_suffix'
def __init__(self, query_suffix=None):
self.query_suffix = query_suffix
self.landmarks = []
def add(self, *args, **kwargs):
dictionary = {self.args[num]: arg for num, arg in enumerate(args)}
dictionary.update(kwargs)
self.landmarks.append(dictionary)
def print_config(self):
print('Replace your old Landmarks config with the following:\n')
if self.query_suffix:
print("QUERY_SUFFIX = '{}'".format(self.query_suffix))
print('LANDMARKS =', tuple(self.landmarks))
### replace example below with your own old-style landmarks config ###
LANDMARKS = Landmarks(query_suffix='Salt Lake City')
LANDMARKS.add('Rice Eccles Stadium', hashtags={'Utes'})
LANDMARKS.add('the Salt Lake Temple', hashtags={'TempleSquare'})
LANDMARKS.add('City Creek Center', points=((40.769210, -111.893901), (40.767231, -111.888275)), hashtags={'CityCreek'})
LANDMARKS.add('the State Capitol', query='Utah State Capitol Building')
LANDMARKS.add('the University of Utah', hashtags={'Utes'}, phrase='at', is_area=True)
LANDMARKS.add('Yalecrest', points=((40.750263, -111.836502), (40.750377, -111.851108), (40.751515, -111.853833), (40.741212, -111.853909), (40.741188, -111.836519)), is_area=True)
### replace example above with your own old-style landmarks config ###
LANDMARKS.print_config()
|
Add a script for converting landmarks config
Add an example script for converting old-style landmarks configurations
(constructing an actual Landmarks instance in config.py) with the
new-style of using standard Python data-types (a tuple/list of dicts).#!/usr/bin/env python3
class Landmarks:
args = 'name', 'shortname', 'points', 'query', 'hashtags', 'phrase', 'is_area', 'query_suffix'
def __init__(self, query_suffix=None):
self.query_suffix = query_suffix
self.landmarks = []
def add(self, *args, **kwargs):
dictionary = {self.args[num]: arg for num, arg in enumerate(args)}
dictionary.update(kwargs)
self.landmarks.append(dictionary)
def print_config(self):
print('Replace your old Landmarks config with the following:\n')
if self.query_suffix:
print("QUERY_SUFFIX = '{}'".format(self.query_suffix))
print('LANDMARKS =', tuple(self.landmarks))
### replace example below with your own old-style landmarks config ###
LANDMARKS = Landmarks(query_suffix='Salt Lake City')
LANDMARKS.add('Rice Eccles Stadium', hashtags={'Utes'})
LANDMARKS.add('the Salt Lake Temple', hashtags={'TempleSquare'})
LANDMARKS.add('City Creek Center', points=((40.769210, -111.893901), (40.767231, -111.888275)), hashtags={'CityCreek'})
LANDMARKS.add('the State Capitol', query='Utah State Capitol Building')
LANDMARKS.add('the University of Utah', hashtags={'Utes'}, phrase='at', is_area=True)
LANDMARKS.add('Yalecrest', points=((40.750263, -111.836502), (40.750377, -111.851108), (40.751515, -111.853833), (40.741212, -111.853909), (40.741188, -111.836519)), is_area=True)
### replace example above with your own old-style landmarks config ###
LANDMARKS.print_config()
|
<commit_before><commit_msg>Add a script for converting landmarks config
Add an example script for converting old-style landmarks configurations
(constructing an actual Landmarks instance in config.py) with the
new-style of using standard Python data-types (a tuple/list of dicts).<commit_after>#!/usr/bin/env python3
class Landmarks:
args = 'name', 'shortname', 'points', 'query', 'hashtags', 'phrase', 'is_area', 'query_suffix'
def __init__(self, query_suffix=None):
self.query_suffix = query_suffix
self.landmarks = []
def add(self, *args, **kwargs):
dictionary = {self.args[num]: arg for num, arg in enumerate(args)}
dictionary.update(kwargs)
self.landmarks.append(dictionary)
def print_config(self):
print('Replace your old Landmarks config with the following:\n')
if self.query_suffix:
print("QUERY_SUFFIX = '{}'".format(self.query_suffix))
print('LANDMARKS =', tuple(self.landmarks))
### replace example below with your own old-style landmarks config ###
LANDMARKS = Landmarks(query_suffix='Salt Lake City')
LANDMARKS.add('Rice Eccles Stadium', hashtags={'Utes'})
LANDMARKS.add('the Salt Lake Temple', hashtags={'TempleSquare'})
LANDMARKS.add('City Creek Center', points=((40.769210, -111.893901), (40.767231, -111.888275)), hashtags={'CityCreek'})
LANDMARKS.add('the State Capitol', query='Utah State Capitol Building')
LANDMARKS.add('the University of Utah', hashtags={'Utes'}, phrase='at', is_area=True)
LANDMARKS.add('Yalecrest', points=((40.750263, -111.836502), (40.750377, -111.851108), (40.751515, -111.853833), (40.741212, -111.853909), (40.741188, -111.836519)), is_area=True)
### replace example above with your own old-style landmarks config ###
LANDMARKS.print_config()
|
|
6717e73d1dfb5b7b7a6d1e2cd24e1983d41c0fc3
|
tests/validation/tests/v3_api/test_node.py
|
tests/validation/tests/v3_api/test_node.py
|
import pytest
from .common import * # NOQA
def test_add_node_label():
client, cluster = get_global_admin_client_and_cluster()
test_label = "foo"
nodes = client.list_node(clusterId=cluster.id)
assert len(nodes.data) > 0
node_id = nodes.data[0].id
node = client.by_id_node(node_id)
# Make sure there is no test label and add test label
node_labels = node.labels.data_dict()
assert test_label not in node_labels
node_labels[test_label] = "bar"
client.update(node, labels=node_labels)
# Label should be added
wait_for_condition(client, node, check_label_added(test_label), None, 10)
node = client.reload(node)
node_labels = node.labels.data_dict()
assert node_labels[test_label] == "bar"
# Label should be delete
del node_labels[test_label]
client.update(node, labels=node_labels)
wait_for_condition(client, node, check_label_removed(test_label), None, 10)
node = client.reload(node)
node_labels = node.labels.data_dict()
assert test_label not in node_labels
def check_label_added(test_label):
def _find_condition(resource):
node_labels = resource.labels.data_dict()
if test_label in node_labels:
return True
else:
return False
return _find_condition
def check_label_removed(test_label):
def _find_condition(resource):
node_labels = resource.labels.data_dict()
if test_label not in node_labels:
return True
else:
return False
return _find_condition
|
Add validation test for adding node label
|
Add validation test for adding node label
https://github.com/rancher/rancher/issues/24123
|
Python
|
apache-2.0
|
rancher/rancher,cjellick/rancher,rancher/rancher,rancherio/rancher,rancher/rancher,rancher/rancher,cjellick/rancher,cjellick/rancher,rancherio/rancher
|
Add validation test for adding node label
https://github.com/rancher/rancher/issues/24123
|
import pytest
from .common import * # NOQA
def test_add_node_label():
client, cluster = get_global_admin_client_and_cluster()
test_label = "foo"
nodes = client.list_node(clusterId=cluster.id)
assert len(nodes.data) > 0
node_id = nodes.data[0].id
node = client.by_id_node(node_id)
# Make sure there is no test label and add test label
node_labels = node.labels.data_dict()
assert test_label not in node_labels
node_labels[test_label] = "bar"
client.update(node, labels=node_labels)
# Label should be added
wait_for_condition(client, node, check_label_added(test_label), None, 10)
node = client.reload(node)
node_labels = node.labels.data_dict()
assert node_labels[test_label] == "bar"
# Label should be delete
del node_labels[test_label]
client.update(node, labels=node_labels)
wait_for_condition(client, node, check_label_removed(test_label), None, 10)
node = client.reload(node)
node_labels = node.labels.data_dict()
assert test_label not in node_labels
def check_label_added(test_label):
def _find_condition(resource):
node_labels = resource.labels.data_dict()
if test_label in node_labels:
return True
else:
return False
return _find_condition
def check_label_removed(test_label):
def _find_condition(resource):
node_labels = resource.labels.data_dict()
if test_label not in node_labels:
return True
else:
return False
return _find_condition
|
<commit_before><commit_msg>Add validation test for adding node label
https://github.com/rancher/rancher/issues/24123<commit_after>
|
import pytest
from .common import * # NOQA
def test_add_node_label():
client, cluster = get_global_admin_client_and_cluster()
test_label = "foo"
nodes = client.list_node(clusterId=cluster.id)
assert len(nodes.data) > 0
node_id = nodes.data[0].id
node = client.by_id_node(node_id)
# Make sure there is no test label and add test label
node_labels = node.labels.data_dict()
assert test_label not in node_labels
node_labels[test_label] = "bar"
client.update(node, labels=node_labels)
# Label should be added
wait_for_condition(client, node, check_label_added(test_label), None, 10)
node = client.reload(node)
node_labels = node.labels.data_dict()
assert node_labels[test_label] == "bar"
# Label should be delete
del node_labels[test_label]
client.update(node, labels=node_labels)
wait_for_condition(client, node, check_label_removed(test_label), None, 10)
node = client.reload(node)
node_labels = node.labels.data_dict()
assert test_label not in node_labels
def check_label_added(test_label):
def _find_condition(resource):
node_labels = resource.labels.data_dict()
if test_label in node_labels:
return True
else:
return False
return _find_condition
def check_label_removed(test_label):
def _find_condition(resource):
node_labels = resource.labels.data_dict()
if test_label not in node_labels:
return True
else:
return False
return _find_condition
|
Add validation test for adding node label
https://github.com/rancher/rancher/issues/24123import pytest
from .common import * # NOQA
def test_add_node_label():
client, cluster = get_global_admin_client_and_cluster()
test_label = "foo"
nodes = client.list_node(clusterId=cluster.id)
assert len(nodes.data) > 0
node_id = nodes.data[0].id
node = client.by_id_node(node_id)
# Make sure there is no test label and add test label
node_labels = node.labels.data_dict()
assert test_label not in node_labels
node_labels[test_label] = "bar"
client.update(node, labels=node_labels)
# Label should be added
wait_for_condition(client, node, check_label_added(test_label), None, 10)
node = client.reload(node)
node_labels = node.labels.data_dict()
assert node_labels[test_label] == "bar"
# Label should be delete
del node_labels[test_label]
client.update(node, labels=node_labels)
wait_for_condition(client, node, check_label_removed(test_label), None, 10)
node = client.reload(node)
node_labels = node.labels.data_dict()
assert test_label not in node_labels
def check_label_added(test_label):
def _find_condition(resource):
node_labels = resource.labels.data_dict()
if test_label in node_labels:
return True
else:
return False
return _find_condition
def check_label_removed(test_label):
def _find_condition(resource):
node_labels = resource.labels.data_dict()
if test_label not in node_labels:
return True
else:
return False
return _find_condition
|
<commit_before><commit_msg>Add validation test for adding node label
https://github.com/rancher/rancher/issues/24123<commit_after>import pytest
from .common import * # NOQA
def test_add_node_label():
client, cluster = get_global_admin_client_and_cluster()
test_label = "foo"
nodes = client.list_node(clusterId=cluster.id)
assert len(nodes.data) > 0
node_id = nodes.data[0].id
node = client.by_id_node(node_id)
# Make sure there is no test label and add test label
node_labels = node.labels.data_dict()
assert test_label not in node_labels
node_labels[test_label] = "bar"
client.update(node, labels=node_labels)
# Label should be added
wait_for_condition(client, node, check_label_added(test_label), None, 10)
node = client.reload(node)
node_labels = node.labels.data_dict()
assert node_labels[test_label] == "bar"
# Label should be delete
del node_labels[test_label]
client.update(node, labels=node_labels)
wait_for_condition(client, node, check_label_removed(test_label), None, 10)
node = client.reload(node)
node_labels = node.labels.data_dict()
assert test_label not in node_labels
def check_label_added(test_label):
def _find_condition(resource):
node_labels = resource.labels.data_dict()
if test_label in node_labels:
return True
else:
return False
return _find_condition
def check_label_removed(test_label):
def _find_condition(resource):
node_labels = resource.labels.data_dict()
if test_label not in node_labels:
return True
else:
return False
return _find_condition
|
|
00cf923625d6327b0d0790bf011b2135bb41b239
|
ideascube/conf/idb_fra_jeancarre.py
|
ideascube/conf/idb_fra_jeancarre.py
|
# -*- coding: utf-8 -*-
"""Ideaxbox for Emmaus, France"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Jean Carré"
IDEASCUBE_PLACE_NAME = _("city")
COUNTRIES_FIRST = ['FR']
TIME_ZONE = None
LANGUAGE_CODE = 'fr'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'library',
},
{
'id': 'mediacenter',
},
{
'id': 'wikipedia',
'languages': ['fr', 'ar', 'fa']
},
{
'id': 'gutenberg',
'lang': 'fr',
},
{
'id': 'khanacademy',
},
{
'id': 'cest-pas-sorcier',
},
{
'id': 'wikisource',
'languages': ['fr']
},
{
'id': 'wikibooks',
'languages': ['fr']
},
{
'id': 'wiktionary',
'languages': ['fr']
},
{
'id': 'ted',
'sessions': [
('tedxgeneva2014.fr', 'Geneva 2014'),
('tedxlausanne2012.fr', 'Lausanne 2012'),
('tedxlausanne2013.fr', 'Lausanne 2013'),
('tedxlausanne2014.fr', 'Lausanne 2014'),
]
},
]
|
Remove card wikiversity and rename project name
|
Remove card wikiversity and rename project name
|
Python
|
agpl-3.0
|
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
|
Remove card wikiversity and rename project name
|
# -*- coding: utf-8 -*-
"""Ideaxbox for Emmaus, France"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Jean Carré"
IDEASCUBE_PLACE_NAME = _("city")
COUNTRIES_FIRST = ['FR']
TIME_ZONE = None
LANGUAGE_CODE = 'fr'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'library',
},
{
'id': 'mediacenter',
},
{
'id': 'wikipedia',
'languages': ['fr', 'ar', 'fa']
},
{
'id': 'gutenberg',
'lang': 'fr',
},
{
'id': 'khanacademy',
},
{
'id': 'cest-pas-sorcier',
},
{
'id': 'wikisource',
'languages': ['fr']
},
{
'id': 'wikibooks',
'languages': ['fr']
},
{
'id': 'wiktionary',
'languages': ['fr']
},
{
'id': 'ted',
'sessions': [
('tedxgeneva2014.fr', 'Geneva 2014'),
('tedxlausanne2012.fr', 'Lausanne 2012'),
('tedxlausanne2013.fr', 'Lausanne 2013'),
('tedxlausanne2014.fr', 'Lausanne 2014'),
]
},
]
|
<commit_before><commit_msg>Remove card wikiversity and rename project name<commit_after>
|
# -*- coding: utf-8 -*-
"""Ideaxbox for Emmaus, France"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Jean Carré"
IDEASCUBE_PLACE_NAME = _("city")
COUNTRIES_FIRST = ['FR']
TIME_ZONE = None
LANGUAGE_CODE = 'fr'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'library',
},
{
'id': 'mediacenter',
},
{
'id': 'wikipedia',
'languages': ['fr', 'ar', 'fa']
},
{
'id': 'gutenberg',
'lang': 'fr',
},
{
'id': 'khanacademy',
},
{
'id': 'cest-pas-sorcier',
},
{
'id': 'wikisource',
'languages': ['fr']
},
{
'id': 'wikibooks',
'languages': ['fr']
},
{
'id': 'wiktionary',
'languages': ['fr']
},
{
'id': 'ted',
'sessions': [
('tedxgeneva2014.fr', 'Geneva 2014'),
('tedxlausanne2012.fr', 'Lausanne 2012'),
('tedxlausanne2013.fr', 'Lausanne 2013'),
('tedxlausanne2014.fr', 'Lausanne 2014'),
]
},
]
|
Remove card wikiversity and rename project name# -*- coding: utf-8 -*-
"""Ideaxbox for Emmaus, France"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Jean Carré"
IDEASCUBE_PLACE_NAME = _("city")
COUNTRIES_FIRST = ['FR']
TIME_ZONE = None
LANGUAGE_CODE = 'fr'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'library',
},
{
'id': 'mediacenter',
},
{
'id': 'wikipedia',
'languages': ['fr', 'ar', 'fa']
},
{
'id': 'gutenberg',
'lang': 'fr',
},
{
'id': 'khanacademy',
},
{
'id': 'cest-pas-sorcier',
},
{
'id': 'wikisource',
'languages': ['fr']
},
{
'id': 'wikibooks',
'languages': ['fr']
},
{
'id': 'wiktionary',
'languages': ['fr']
},
{
'id': 'ted',
'sessions': [
('tedxgeneva2014.fr', 'Geneva 2014'),
('tedxlausanne2012.fr', 'Lausanne 2012'),
('tedxlausanne2013.fr', 'Lausanne 2013'),
('tedxlausanne2014.fr', 'Lausanne 2014'),
]
},
]
|
<commit_before><commit_msg>Remove card wikiversity and rename project name<commit_after># -*- coding: utf-8 -*-
"""Ideaxbox for Emmaus, France"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_NAME = u"Jean Carré"
IDEASCUBE_PLACE_NAME = _("city")
COUNTRIES_FIRST = ['FR']
TIME_ZONE = None
LANGUAGE_CODE = 'fr'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'library',
},
{
'id': 'mediacenter',
},
{
'id': 'wikipedia',
'languages': ['fr', 'ar', 'fa']
},
{
'id': 'gutenberg',
'lang': 'fr',
},
{
'id': 'khanacademy',
},
{
'id': 'cest-pas-sorcier',
},
{
'id': 'wikisource',
'languages': ['fr']
},
{
'id': 'wikibooks',
'languages': ['fr']
},
{
'id': 'wiktionary',
'languages': ['fr']
},
{
'id': 'ted',
'sessions': [
('tedxgeneva2014.fr', 'Geneva 2014'),
('tedxlausanne2012.fr', 'Lausanne 2012'),
('tedxlausanne2013.fr', 'Lausanne 2013'),
('tedxlausanne2014.fr', 'Lausanne 2014'),
]
},
]
|
|
b6d0ff750bb48c8be514a8a36f1b7ab30efb460a
|
tests/scoring_engine/test_config.py
|
tests/scoring_engine/test_config.py
|
from scoring_engine.config_loader import ConfigLoader
class MockConfigLoader():
pass
ConfigLoader = MockConfigLoader
from scoring_engine.config import config
class TestConfig(object):
def setup(self):
import pdb; pdb.set_trace()
# self.config = ConfigLoader(location="../tests/scoring_engine/example.conf")
# def test_web_debug(self):
# assert self.config.web_debug is False
# def test_checks_location(self):
# assert self.config.checks_location == "scoring_engine/engine/checks"
# def test_check_timeout(self):
# assert self.config.check_timeout == 30
# def test_round_time_sleep(self):
# assert self.config.round_time_sleep == 180
# def test_worker_refresh_time(self):
# assert self.config.worker_refresh_time == 30
# def test_db_uri(self):
# assert self.config.db_uri == "sqlite:////tmp/test_engine.db"
# def test_timezone(self):
# assert self.config.timezone == 'US/Eastern'
# def test_redis_host(self):
# assert self.config.redis_host == "127.0.0.1"
# def test_redis_port(self):
# assert self.config.redis_port == 6379
# def test_redis_password(self):
# assert self.config.redis_password == "testpass"
|
Add test for scoring engine config
|
Add test for scoring engine config
|
Python
|
mit
|
pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine
|
Add test for scoring engine config
|
from scoring_engine.config_loader import ConfigLoader
class MockConfigLoader():
pass
ConfigLoader = MockConfigLoader
from scoring_engine.config import config
class TestConfig(object):
def setup(self):
import pdb; pdb.set_trace()
# self.config = ConfigLoader(location="../tests/scoring_engine/example.conf")
# def test_web_debug(self):
# assert self.config.web_debug is False
# def test_checks_location(self):
# assert self.config.checks_location == "scoring_engine/engine/checks"
# def test_check_timeout(self):
# assert self.config.check_timeout == 30
# def test_round_time_sleep(self):
# assert self.config.round_time_sleep == 180
# def test_worker_refresh_time(self):
# assert self.config.worker_refresh_time == 30
# def test_db_uri(self):
# assert self.config.db_uri == "sqlite:////tmp/test_engine.db"
# def test_timezone(self):
# assert self.config.timezone == 'US/Eastern'
# def test_redis_host(self):
# assert self.config.redis_host == "127.0.0.1"
# def test_redis_port(self):
# assert self.config.redis_port == 6379
# def test_redis_password(self):
# assert self.config.redis_password == "testpass"
|
<commit_before><commit_msg>Add test for scoring engine config<commit_after>
|
from scoring_engine.config_loader import ConfigLoader
class MockConfigLoader():
pass
ConfigLoader = MockConfigLoader
from scoring_engine.config import config
class TestConfig(object):
def setup(self):
import pdb; pdb.set_trace()
# self.config = ConfigLoader(location="../tests/scoring_engine/example.conf")
# def test_web_debug(self):
# assert self.config.web_debug is False
# def test_checks_location(self):
# assert self.config.checks_location == "scoring_engine/engine/checks"
# def test_check_timeout(self):
# assert self.config.check_timeout == 30
# def test_round_time_sleep(self):
# assert self.config.round_time_sleep == 180
# def test_worker_refresh_time(self):
# assert self.config.worker_refresh_time == 30
# def test_db_uri(self):
# assert self.config.db_uri == "sqlite:////tmp/test_engine.db"
# def test_timezone(self):
# assert self.config.timezone == 'US/Eastern'
# def test_redis_host(self):
# assert self.config.redis_host == "127.0.0.1"
# def test_redis_port(self):
# assert self.config.redis_port == 6379
# def test_redis_password(self):
# assert self.config.redis_password == "testpass"
|
Add test for scoring engine config
from scoring_engine.config_loader import ConfigLoader
class MockConfigLoader():
pass
ConfigLoader = MockConfigLoader
from scoring_engine.config import config
class TestConfig(object):
def setup(self):
import pdb; pdb.set_trace()
# self.config = ConfigLoader(location="../tests/scoring_engine/example.conf")
# def test_web_debug(self):
# assert self.config.web_debug is False
# def test_checks_location(self):
# assert self.config.checks_location == "scoring_engine/engine/checks"
# def test_check_timeout(self):
# assert self.config.check_timeout == 30
# def test_round_time_sleep(self):
# assert self.config.round_time_sleep == 180
# def test_worker_refresh_time(self):
# assert self.config.worker_refresh_time == 30
# def test_db_uri(self):
# assert self.config.db_uri == "sqlite:////tmp/test_engine.db"
# def test_timezone(self):
# assert self.config.timezone == 'US/Eastern'
# def test_redis_host(self):
# assert self.config.redis_host == "127.0.0.1"
# def test_redis_port(self):
# assert self.config.redis_port == 6379
# def test_redis_password(self):
# assert self.config.redis_password == "testpass"
|
<commit_before><commit_msg>Add test for scoring engine config<commit_after>
from scoring_engine.config_loader import ConfigLoader
class MockConfigLoader():
pass
ConfigLoader = MockConfigLoader
from scoring_engine.config import config
class TestConfig(object):
def setup(self):
import pdb; pdb.set_trace()
# self.config = ConfigLoader(location="../tests/scoring_engine/example.conf")
# def test_web_debug(self):
# assert self.config.web_debug is False
# def test_checks_location(self):
# assert self.config.checks_location == "scoring_engine/engine/checks"
# def test_check_timeout(self):
# assert self.config.check_timeout == 30
# def test_round_time_sleep(self):
# assert self.config.round_time_sleep == 180
# def test_worker_refresh_time(self):
# assert self.config.worker_refresh_time == 30
# def test_db_uri(self):
# assert self.config.db_uri == "sqlite:////tmp/test_engine.db"
# def test_timezone(self):
# assert self.config.timezone == 'US/Eastern'
# def test_redis_host(self):
# assert self.config.redis_host == "127.0.0.1"
# def test_redis_port(self):
# assert self.config.redis_port == 6379
# def test_redis_password(self):
# assert self.config.redis_password == "testpass"
|
|
f44faa9a029b57c96cdb7d1b38a4e3760ac19f94
|
indra/tests/test_elsevier_client.py
|
indra/tests/test_elsevier_client.py
|
from indra.literature import elsevier_client as ec
def test_get_fulltext_article():
# This article is not open access so in order to get a full text response
# with a body element requires full text access keys to be correctly
# set up.
doi = '10.1016/j.cell.2016.02.059'
text = ec.get_article(doi)
assert text is not None
def test_get_abstract():
# If we have an API key but are not on an approved IP or don't have a
# necessary institution key, we should still be able to get the abstract.
# If there is a problem with the API key itself, this will log and error
# and return None.
doi = '10.1016/j.cell.2016.02.059'
text = ec.get_abstract(doi)
assert text is not None
|
Add two smoke tests for Elsevier client
|
Add two smoke tests for Elsevier client
|
Python
|
bsd-2-clause
|
pvtodorov/indra,jmuhlich/indra,sorgerlab/belpy,sorgerlab/indra,johnbachman/indra,johnbachman/belpy,bgyori/indra,sorgerlab/belpy,sorgerlab/indra,pvtodorov/indra,johnbachman/indra,johnbachman/belpy,bgyori/indra,jmuhlich/indra,sorgerlab/indra,johnbachman/indra,sorgerlab/belpy,jmuhlich/indra,pvtodorov/indra,bgyori/indra,pvtodorov/indra,johnbachman/belpy
|
Add two smoke tests for Elsevier client
|
from indra.literature import elsevier_client as ec
def test_get_fulltext_article():
# This article is not open access so in order to get a full text response
# with a body element requires full text access keys to be correctly
# set up.
doi = '10.1016/j.cell.2016.02.059'
text = ec.get_article(doi)
assert text is not None
def test_get_abstract():
# If we have an API key but are not on an approved IP or don't have a
# necessary institution key, we should still be able to get the abstract.
# If there is a problem with the API key itself, this will log and error
# and return None.
doi = '10.1016/j.cell.2016.02.059'
text = ec.get_abstract(doi)
assert text is not None
|
<commit_before><commit_msg>Add two smoke tests for Elsevier client<commit_after>
|
from indra.literature import elsevier_client as ec
def test_get_fulltext_article():
# This article is not open access so in order to get a full text response
# with a body element requires full text access keys to be correctly
# set up.
doi = '10.1016/j.cell.2016.02.059'
text = ec.get_article(doi)
assert text is not None
def test_get_abstract():
# If we have an API key but are not on an approved IP or don't have a
# necessary institution key, we should still be able to get the abstract.
# If there is a problem with the API key itself, this will log and error
# and return None.
doi = '10.1016/j.cell.2016.02.059'
text = ec.get_abstract(doi)
assert text is not None
|
Add two smoke tests for Elsevier clientfrom indra.literature import elsevier_client as ec
def test_get_fulltext_article():
# This article is not open access so in order to get a full text response
# with a body element requires full text access keys to be correctly
# set up.
doi = '10.1016/j.cell.2016.02.059'
text = ec.get_article(doi)
assert text is not None
def test_get_abstract():
# If we have an API key but are not on an approved IP or don't have a
# necessary institution key, we should still be able to get the abstract.
# If there is a problem with the API key itself, this will log and error
# and return None.
doi = '10.1016/j.cell.2016.02.059'
text = ec.get_abstract(doi)
assert text is not None
|
<commit_before><commit_msg>Add two smoke tests for Elsevier client<commit_after>from indra.literature import elsevier_client as ec
def test_get_fulltext_article():
# This article is not open access so in order to get a full text response
# with a body element requires full text access keys to be correctly
# set up.
doi = '10.1016/j.cell.2016.02.059'
text = ec.get_article(doi)
assert text is not None
def test_get_abstract():
# If we have an API key but are not on an approved IP or don't have a
# necessary institution key, we should still be able to get the abstract.
# If there is a problem with the API key itself, this will log and error
# and return None.
doi = '10.1016/j.cell.2016.02.059'
text = ec.get_abstract(doi)
assert text is not None
|
|
d7d916c196001ca2d23e4a3477145c5383ee99bf
|
tests/v6/test_derived_generators.py
|
tests/v6/test_derived_generators.py
|
import pytest
from .exemplar_generators import EXEMPLAR_DERIVED_GENERATORS
@pytest.mark.parametrize("g", EXEMPLAR_DERIVED_GENERATORS)
def test_derived_generator_is_owner_of_its_constituent_generators(g):
"""
"""
# Sanity check that list of constituent generators is not empty
assert g.constituent_generators != []
# Check that `g` is the owner of each of its constituent generators
for c in g.constituent_generators:
assert c.owner is g
|
Add test that derived generator is the owner of its constituent generators
|
Add test that derived generator is the owner of its constituent generators
|
Python
|
mit
|
maxalbert/tohu
|
Add test that derived generator is the owner of its constituent generators
|
import pytest
from .exemplar_generators import EXEMPLAR_DERIVED_GENERATORS
@pytest.mark.parametrize("g", EXEMPLAR_DERIVED_GENERATORS)
def test_derived_generator_is_owner_of_its_constituent_generators(g):
"""
"""
# Sanity check that list of constituent generators is not empty
assert g.constituent_generators != []
# Check that `g` is the owner of each of its constituent generators
for c in g.constituent_generators:
assert c.owner is g
|
<commit_before><commit_msg>Add test that derived generator is the owner of its constituent generators<commit_after>
|
import pytest
from .exemplar_generators import EXEMPLAR_DERIVED_GENERATORS
@pytest.mark.parametrize("g", EXEMPLAR_DERIVED_GENERATORS)
def test_derived_generator_is_owner_of_its_constituent_generators(g):
"""
"""
# Sanity check that list of constituent generators is not empty
assert g.constituent_generators != []
# Check that `g` is the owner of each of its constituent generators
for c in g.constituent_generators:
assert c.owner is g
|
Add test that derived generator is the owner of its constituent generatorsimport pytest
from .exemplar_generators import EXEMPLAR_DERIVED_GENERATORS
@pytest.mark.parametrize("g", EXEMPLAR_DERIVED_GENERATORS)
def test_derived_generator_is_owner_of_its_constituent_generators(g):
"""
"""
# Sanity check that list of constituent generators is not empty
assert g.constituent_generators != []
# Check that `g` is the owner of each of its constituent generators
for c in g.constituent_generators:
assert c.owner is g
|
<commit_before><commit_msg>Add test that derived generator is the owner of its constituent generators<commit_after>import pytest
from .exemplar_generators import EXEMPLAR_DERIVED_GENERATORS
@pytest.mark.parametrize("g", EXEMPLAR_DERIVED_GENERATORS)
def test_derived_generator_is_owner_of_its_constituent_generators(g):
"""
"""
# Sanity check that list of constituent generators is not empty
assert g.constituent_generators != []
# Check that `g` is the owner of each of its constituent generators
for c in g.constituent_generators:
assert c.owner is g
|
|
3f9032dd250434f45db97e106da7ff97eb5d39d1
|
tests/test_irc_bot.py
|
tests/test_irc_bot.py
|
"""
:Copyright: 2007-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from irc.client import Event, NickMask, ServerConnection
import pytest
from syslog2irc.irc import create_bot, IrcChannel, IrcConfig, IrcServer
from syslog2irc.signals import irc_channel_joined
@pytest.fixture
def config():
server = IrcServer('irc.server.test')
channels = {IrcChannel('#one'), IrcChannel('#two')}
return IrcConfig(
server=server,
nickname='nick',
realname='Nick',
channels=channels,
)
@pytest.fixture
def bot(config):
bot = create_bot(config)
yield bot
bot.disconnect('Done.')
@pytest.fixture
def nickmask(config):
return NickMask(f'{config.nickname}!{config.nickname}@{config.server.host}')
def test_get_version(bot):
assert bot.get_version() == 'syslog2IRC'
def test_channel_joins(config, bot, nickmask, monkeypatch):
class FakeSocket:
def getpeername(self):
return ('10.0.0.99', 6667)
socket = FakeSocket()
conn = ServerConnection(None)
welcome_event = Event(
type='welcome', source=config.server.host, target=config.nickname
)
def join(self, channel, key=''):
join_event = Event(type='join', source=nickmask, target=channel)
bot.on_join(conn, join_event)
received_signal_data = []
@irc_channel_joined.connect
def handle_irc_channel_joined(sender, **data):
received_signal_data.append(data)
with monkeypatch.context() as mpc:
mpc.setattr(ServerConnection, 'socket', socket)
mpc.setattr(ServerConnection, 'join', join)
bot.on_welcome(conn, welcome_event)
assert received_signal_data == [
{'channel_name': '#one'},
{'channel_name': '#two'},
]
|
Test channel joins of real bot
|
Test channel joins of real bot
|
Python
|
mit
|
homeworkprod/syslog2irc
|
Test channel joins of real bot
|
"""
:Copyright: 2007-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from irc.client import Event, NickMask, ServerConnection
import pytest
from syslog2irc.irc import create_bot, IrcChannel, IrcConfig, IrcServer
from syslog2irc.signals import irc_channel_joined
@pytest.fixture
def config():
server = IrcServer('irc.server.test')
channels = {IrcChannel('#one'), IrcChannel('#two')}
return IrcConfig(
server=server,
nickname='nick',
realname='Nick',
channels=channels,
)
@pytest.fixture
def bot(config):
bot = create_bot(config)
yield bot
bot.disconnect('Done.')
@pytest.fixture
def nickmask(config):
return NickMask(f'{config.nickname}!{config.nickname}@{config.server.host}')
def test_get_version(bot):
assert bot.get_version() == 'syslog2IRC'
def test_channel_joins(config, bot, nickmask, monkeypatch):
class FakeSocket:
def getpeername(self):
return ('10.0.0.99', 6667)
socket = FakeSocket()
conn = ServerConnection(None)
welcome_event = Event(
type='welcome', source=config.server.host, target=config.nickname
)
def join(self, channel, key=''):
join_event = Event(type='join', source=nickmask, target=channel)
bot.on_join(conn, join_event)
received_signal_data = []
@irc_channel_joined.connect
def handle_irc_channel_joined(sender, **data):
received_signal_data.append(data)
with monkeypatch.context() as mpc:
mpc.setattr(ServerConnection, 'socket', socket)
mpc.setattr(ServerConnection, 'join', join)
bot.on_welcome(conn, welcome_event)
assert received_signal_data == [
{'channel_name': '#one'},
{'channel_name': '#two'},
]
|
<commit_before><commit_msg>Test channel joins of real bot<commit_after>
|
"""
:Copyright: 2007-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from irc.client import Event, NickMask, ServerConnection
import pytest
from syslog2irc.irc import create_bot, IrcChannel, IrcConfig, IrcServer
from syslog2irc.signals import irc_channel_joined
@pytest.fixture
def config():
server = IrcServer('irc.server.test')
channels = {IrcChannel('#one'), IrcChannel('#two')}
return IrcConfig(
server=server,
nickname='nick',
realname='Nick',
channels=channels,
)
@pytest.fixture
def bot(config):
bot = create_bot(config)
yield bot
bot.disconnect('Done.')
@pytest.fixture
def nickmask(config):
return NickMask(f'{config.nickname}!{config.nickname}@{config.server.host}')
def test_get_version(bot):
assert bot.get_version() == 'syslog2IRC'
def test_channel_joins(config, bot, nickmask, monkeypatch):
class FakeSocket:
def getpeername(self):
return ('10.0.0.99', 6667)
socket = FakeSocket()
conn = ServerConnection(None)
welcome_event = Event(
type='welcome', source=config.server.host, target=config.nickname
)
def join(self, channel, key=''):
join_event = Event(type='join', source=nickmask, target=channel)
bot.on_join(conn, join_event)
received_signal_data = []
@irc_channel_joined.connect
def handle_irc_channel_joined(sender, **data):
received_signal_data.append(data)
with monkeypatch.context() as mpc:
mpc.setattr(ServerConnection, 'socket', socket)
mpc.setattr(ServerConnection, 'join', join)
bot.on_welcome(conn, welcome_event)
assert received_signal_data == [
{'channel_name': '#one'},
{'channel_name': '#two'},
]
|
Test channel joins of real bot"""
:Copyright: 2007-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from irc.client import Event, NickMask, ServerConnection
import pytest
from syslog2irc.irc import create_bot, IrcChannel, IrcConfig, IrcServer
from syslog2irc.signals import irc_channel_joined
@pytest.fixture
def config():
server = IrcServer('irc.server.test')
channels = {IrcChannel('#one'), IrcChannel('#two')}
return IrcConfig(
server=server,
nickname='nick',
realname='Nick',
channels=channels,
)
@pytest.fixture
def bot(config):
bot = create_bot(config)
yield bot
bot.disconnect('Done.')
@pytest.fixture
def nickmask(config):
return NickMask(f'{config.nickname}!{config.nickname}@{config.server.host}')
def test_get_version(bot):
assert bot.get_version() == 'syslog2IRC'
def test_channel_joins(config, bot, nickmask, monkeypatch):
class FakeSocket:
def getpeername(self):
return ('10.0.0.99', 6667)
socket = FakeSocket()
conn = ServerConnection(None)
welcome_event = Event(
type='welcome', source=config.server.host, target=config.nickname
)
def join(self, channel, key=''):
join_event = Event(type='join', source=nickmask, target=channel)
bot.on_join(conn, join_event)
received_signal_data = []
@irc_channel_joined.connect
def handle_irc_channel_joined(sender, **data):
received_signal_data.append(data)
with monkeypatch.context() as mpc:
mpc.setattr(ServerConnection, 'socket', socket)
mpc.setattr(ServerConnection, 'join', join)
bot.on_welcome(conn, welcome_event)
assert received_signal_data == [
{'channel_name': '#one'},
{'channel_name': '#two'},
]
|
<commit_before><commit_msg>Test channel joins of real bot<commit_after>"""
:Copyright: 2007-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from irc.client import Event, NickMask, ServerConnection
import pytest
from syslog2irc.irc import create_bot, IrcChannel, IrcConfig, IrcServer
from syslog2irc.signals import irc_channel_joined
@pytest.fixture
def config():
server = IrcServer('irc.server.test')
channels = {IrcChannel('#one'), IrcChannel('#two')}
return IrcConfig(
server=server,
nickname='nick',
realname='Nick',
channels=channels,
)
@pytest.fixture
def bot(config):
bot = create_bot(config)
yield bot
bot.disconnect('Done.')
@pytest.fixture
def nickmask(config):
return NickMask(f'{config.nickname}!{config.nickname}@{config.server.host}')
def test_get_version(bot):
assert bot.get_version() == 'syslog2IRC'
def test_channel_joins(config, bot, nickmask, monkeypatch):
class FakeSocket:
def getpeername(self):
return ('10.0.0.99', 6667)
socket = FakeSocket()
conn = ServerConnection(None)
welcome_event = Event(
type='welcome', source=config.server.host, target=config.nickname
)
def join(self, channel, key=''):
join_event = Event(type='join', source=nickmask, target=channel)
bot.on_join(conn, join_event)
received_signal_data = []
@irc_channel_joined.connect
def handle_irc_channel_joined(sender, **data):
received_signal_data.append(data)
with monkeypatch.context() as mpc:
mpc.setattr(ServerConnection, 'socket', socket)
mpc.setattr(ServerConnection, 'join', join)
bot.on_welcome(conn, welcome_event)
assert received_signal_data == [
{'channel_name': '#one'},
{'channel_name': '#two'},
]
|
|
cde1be5f4f8b3effe2d6802db368167a35495878
|
tools/load_pcap.py
|
tools/load_pcap.py
|
#!/usr/bin/env python
# Installed from: http://dirtbags.net/py-pcap.html
import pcap
import argparse
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), "..", "pox"))
from pox.lib.packet.ethernet import *
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input')
parser.add_argument('-o', '--output', default="pcap.trace")
args = parser.parse_args()
p = pcap.open(args.input)
for i in p:
e = ethernet(raw=i[1])
import pdb; pdb.set_trace()
print e
|
Add a tool to convert pcap to Murphy's python classes.
|
Add a tool to convert pcap to Murphy's python classes.
Not quite useful yet
|
Python
|
apache-2.0
|
ucb-sts/sts,ucb-sts/sts,jmiserez/sts,jmiserez/sts
|
Add a tool to convert pcap to Murphy's python classes.
Not quite useful yet
|
#!/usr/bin/env python
# Installed from: http://dirtbags.net/py-pcap.html
import pcap
import argparse
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), "..", "pox"))
from pox.lib.packet.ethernet import *
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input')
parser.add_argument('-o', '--output', default="pcap.trace")
args = parser.parse_args()
p = pcap.open(args.input)
for i in p:
e = ethernet(raw=i[1])
import pdb; pdb.set_trace()
print e
|
<commit_before><commit_msg>Add a tool to convert pcap to Murphy's python classes.
Not quite useful yet<commit_after>
|
#!/usr/bin/env python
# Installed from: http://dirtbags.net/py-pcap.html
import pcap
import argparse
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), "..", "pox"))
from pox.lib.packet.ethernet import *
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input')
parser.add_argument('-o', '--output', default="pcap.trace")
args = parser.parse_args()
p = pcap.open(args.input)
for i in p:
e = ethernet(raw=i[1])
import pdb; pdb.set_trace()
print e
|
Add a tool to convert pcap to Murphy's python classes.
Not quite useful yet#!/usr/bin/env python
# Installed from: http://dirtbags.net/py-pcap.html
import pcap
import argparse
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), "..", "pox"))
from pox.lib.packet.ethernet import *
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input')
parser.add_argument('-o', '--output', default="pcap.trace")
args = parser.parse_args()
p = pcap.open(args.input)
for i in p:
e = ethernet(raw=i[1])
import pdb; pdb.set_trace()
print e
|
<commit_before><commit_msg>Add a tool to convert pcap to Murphy's python classes.
Not quite useful yet<commit_after>#!/usr/bin/env python
# Installed from: http://dirtbags.net/py-pcap.html
import pcap
import argparse
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), "..", "pox"))
from pox.lib.packet.ethernet import *
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input')
parser.add_argument('-o', '--output', default="pcap.trace")
args = parser.parse_args()
p = pcap.open(args.input)
for i in p:
e = ethernet(raw=i[1])
import pdb; pdb.set_trace()
print e
|
|
b7b0159e462efb7abfd63c0e3066704637fa4df2
|
txircd/modules/extra/stats_ports.py
|
txircd/modules/extra/stats_ports.py
|
from twisted.plugin import IPlugin
from txircd.factory import UserFactory
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class StatsPorts(ModuleData):
implements(IPlugin, IModuleData)
name = "StatsPorts"
def actions(self):
return [ ("statsruntype-ports", 10, self.listPorts) ]
def listPorts(self):
info = {}
for portDesc, portData in self.ircd.boundPorts.iteritems():
if isinstance(portData.factory, UserFactory):
info[str(portData.port)] = "{} (clients)".format(portDesc)
else:
info[str(portData.port)] = "{} (servers)".format(portDesc)
return info
statsPorts = StatsPorts()
|
Add STATS type to display the ports that the server is listening on
|
Add STATS type to display the ports that the server is listening on
|
Python
|
bsd-3-clause
|
Heufneutje/txircd
|
Add STATS type to display the ports that the server is listening on
|
from twisted.plugin import IPlugin
from txircd.factory import UserFactory
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class StatsPorts(ModuleData):
implements(IPlugin, IModuleData)
name = "StatsPorts"
def actions(self):
return [ ("statsruntype-ports", 10, self.listPorts) ]
def listPorts(self):
info = {}
for portDesc, portData in self.ircd.boundPorts.iteritems():
if isinstance(portData.factory, UserFactory):
info[str(portData.port)] = "{} (clients)".format(portDesc)
else:
info[str(portData.port)] = "{} (servers)".format(portDesc)
return info
statsPorts = StatsPorts()
|
<commit_before><commit_msg>Add STATS type to display the ports that the server is listening on<commit_after>
|
from twisted.plugin import IPlugin
from txircd.factory import UserFactory
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class StatsPorts(ModuleData):
implements(IPlugin, IModuleData)
name = "StatsPorts"
def actions(self):
return [ ("statsruntype-ports", 10, self.listPorts) ]
def listPorts(self):
info = {}
for portDesc, portData in self.ircd.boundPorts.iteritems():
if isinstance(portData.factory, UserFactory):
info[str(portData.port)] = "{} (clients)".format(portDesc)
else:
info[str(portData.port)] = "{} (servers)".format(portDesc)
return info
statsPorts = StatsPorts()
|
Add STATS type to display the ports that the server is listening onfrom twisted.plugin import IPlugin
from txircd.factory import UserFactory
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class StatsPorts(ModuleData):
implements(IPlugin, IModuleData)
name = "StatsPorts"
def actions(self):
return [ ("statsruntype-ports", 10, self.listPorts) ]
def listPorts(self):
info = {}
for portDesc, portData in self.ircd.boundPorts.iteritems():
if isinstance(portData.factory, UserFactory):
info[str(portData.port)] = "{} (clients)".format(portDesc)
else:
info[str(portData.port)] = "{} (servers)".format(portDesc)
return info
statsPorts = StatsPorts()
|
<commit_before><commit_msg>Add STATS type to display the ports that the server is listening on<commit_after>from twisted.plugin import IPlugin
from txircd.factory import UserFactory
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class StatsPorts(ModuleData):
implements(IPlugin, IModuleData)
name = "StatsPorts"
def actions(self):
return [ ("statsruntype-ports", 10, self.listPorts) ]
def listPorts(self):
info = {}
for portDesc, portData in self.ircd.boundPorts.iteritems():
if isinstance(portData.factory, UserFactory):
info[str(portData.port)] = "{} (clients)".format(portDesc)
else:
info[str(portData.port)] = "{} (servers)".format(portDesc)
return info
statsPorts = StatsPorts()
|
|
1c84604b28ffcbfc3b32d401a29e87927aba282e
|
observations/migrations/0002_auto_20151217_1759.py
|
observations/migrations/0002_auto_20151217_1759.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('observations', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='observation',
options={'verbose_name': 'Observation', 'ordering': ['-jd'], 'verbose_name_plural': 'Observations'},
),
]
|
Add migration for Observation model options change.
|
Add migration for Observation model options change.
|
Python
|
mit
|
zsiciarz/variablestars.net,zsiciarz/variablestars.net,zsiciarz/variablestars.net
|
Add migration for Observation model options change.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('observations', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='observation',
options={'verbose_name': 'Observation', 'ordering': ['-jd'], 'verbose_name_plural': 'Observations'},
),
]
|
<commit_before><commit_msg>Add migration for Observation model options change.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('observations', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='observation',
options={'verbose_name': 'Observation', 'ordering': ['-jd'], 'verbose_name_plural': 'Observations'},
),
]
|
Add migration for Observation model options change.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('observations', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='observation',
options={'verbose_name': 'Observation', 'ordering': ['-jd'], 'verbose_name_plural': 'Observations'},
),
]
|
<commit_before><commit_msg>Add migration for Observation model options change.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('observations', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='observation',
options={'verbose_name': 'Observation', 'ordering': ['-jd'], 'verbose_name_plural': 'Observations'},
),
]
|
|
8f69b07e80198c1af0d5bd368d8ad8ced968884a
|
experimental/csm/ct_csv_to_traces.py
|
experimental/csm/ct_csv_to_traces.py
|
import csv
import sys
def csv_to_traces(infile, outfile):
traces = []
with open(infile) as inf:
results = csv.DictReader(inf)
for r in results:
for t in r['trace'].split(','):
traces.append(t)
with open(outfile, 'w') as outf:
for trace in traces:
outf.write(trace + '\n')
def main():
if len(sys.argv) < 3:
print "Usage: {0} <input-file> <output-file>".format(sys.argv[0])
return
input_filename = sys.argv[1]
output_filename = sys.argv[2]
csv_to_traces(input_filename, output_filename)
if __name__ == "__main__":
main()
|
Add script to transform CT result to trace list
|
Add script to transform CT result to trace list
|
Python
|
bsd-3-clause
|
catapult-project/catapult-csm,catapult-project/catapult-csm,catapult-project/catapult-csm,catapult-project/catapult-csm,catapult-project/catapult-csm,catapult-project/catapult-csm,catapult-project/catapult-csm
|
Add script to transform CT result to trace list
|
import csv
import sys
def csv_to_traces(infile, outfile):
traces = []
with open(infile) as inf:
results = csv.DictReader(inf)
for r in results:
for t in r['trace'].split(','):
traces.append(t)
with open(outfile, 'w') as outf:
for trace in traces:
outf.write(trace + '\n')
def main():
if len(sys.argv) < 3:
print "Usage: {0} <input-file> <output-file>".format(sys.argv[0])
return
input_filename = sys.argv[1]
output_filename = sys.argv[2]
csv_to_traces(input_filename, output_filename)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script to transform CT result to trace list<commit_after>
|
import csv
import sys
def csv_to_traces(infile, outfile):
traces = []
with open(infile) as inf:
results = csv.DictReader(inf)
for r in results:
for t in r['trace'].split(','):
traces.append(t)
with open(outfile, 'w') as outf:
for trace in traces:
outf.write(trace + '\n')
def main():
if len(sys.argv) < 3:
print "Usage: {0} <input-file> <output-file>".format(sys.argv[0])
return
input_filename = sys.argv[1]
output_filename = sys.argv[2]
csv_to_traces(input_filename, output_filename)
if __name__ == "__main__":
main()
|
Add script to transform CT result to trace listimport csv
import sys
def csv_to_traces(infile, outfile):
traces = []
with open(infile) as inf:
results = csv.DictReader(inf)
for r in results:
for t in r['trace'].split(','):
traces.append(t)
with open(outfile, 'w') as outf:
for trace in traces:
outf.write(trace + '\n')
def main():
if len(sys.argv) < 3:
print "Usage: {0} <input-file> <output-file>".format(sys.argv[0])
return
input_filename = sys.argv[1]
output_filename = sys.argv[2]
csv_to_traces(input_filename, output_filename)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script to transform CT result to trace list<commit_after>import csv
import sys
def csv_to_traces(infile, outfile):
traces = []
with open(infile) as inf:
results = csv.DictReader(inf)
for r in results:
for t in r['trace'].split(','):
traces.append(t)
with open(outfile, 'w') as outf:
for trace in traces:
outf.write(trace + '\n')
def main():
if len(sys.argv) < 3:
print "Usage: {0} <input-file> <output-file>".format(sys.argv[0])
return
input_filename = sys.argv[1]
output_filename = sys.argv[2]
csv_to_traces(input_filename, output_filename)
if __name__ == "__main__":
main()
|
|
37a6261897b413f3aa5be310c4cd1e7abcb1a55f
|
examples/zorder.py
|
examples/zorder.py
|
"""
Eventually this will be an example showing how we support z-ordering.
"""
import arcade
SCALE = 0.75
SCREEN_HEIGHT = 320
SCREEN_WIDTH = 512
class MyApplication(arcade.Window):
""" Main application class. """
def __init__(self, width, height):
""" Set up the game and initialize the variables. """
super().__init__(width, height)
# Sprite lists
self.all_sprites_list = arcade.SpriteList()
# Set up the player
self.score = 0
self.player_sprite = arcade.Sprite("examples/images/playerShip1_orange.png", SCALE)
self.player_sprite.center_x = 200
self.player_sprite.center_y = 200
self.all_sprites_list.append(self.player_sprite)
# Make the asteroids
enemy_sprite = arcade.Sprite("examples/images/meteorGrey_big1.png", SCALE)
enemy_sprite.center_y = 200
enemy_sprite.center_x = 150
enemy_sprite.size = 4
self.all_sprites_list.append(enemy_sprite)
enemy_sprite = arcade.Sprite("examples/images/meteorGrey_big2.png", SCALE)
enemy_sprite.center_y = 200
enemy_sprite.center_x = 250
enemy_sprite.size = 4
self.all_sprites_list.append(enemy_sprite)
enemy_sprite = arcade.Sprite("examples/images/meteorGrey_big3.png", SCALE)
enemy_sprite.center_y = 150
enemy_sprite.center_x = 200
enemy_sprite.size = 4
self.all_sprites_list.append(enemy_sprite)
self.background = arcade.load_texture("stars.jpg")
def on_draw(self):
"""
Render the screen.
"""
# Draw the background
arcade.draw_xywh_rectangle_textured(0, 0, SCREEN_WIDTH, SCREEN_HEIGHT, self.background)
# Draw all the sprites.
self.all_sprites_list.draw()
window = MyApplication(SCREEN_WIDTH, SCREEN_HEIGHT)
arcade.run()
|
Add start of z-order example.
|
Add start of z-order example.
|
Python
|
mit
|
mikemhenry/arcade,mikemhenry/arcade
|
Add start of z-order example.
|
"""
Eventually this will be an example showing how we support z-ordering.
"""
import arcade
SCALE = 0.75
SCREEN_HEIGHT = 320
SCREEN_WIDTH = 512
class MyApplication(arcade.Window):
""" Main application class. """
def __init__(self, width, height):
""" Set up the game and initialize the variables. """
super().__init__(width, height)
# Sprite lists
self.all_sprites_list = arcade.SpriteList()
# Set up the player
self.score = 0
self.player_sprite = arcade.Sprite("examples/images/playerShip1_orange.png", SCALE)
self.player_sprite.center_x = 200
self.player_sprite.center_y = 200
self.all_sprites_list.append(self.player_sprite)
# Make the asteroids
enemy_sprite = arcade.Sprite("examples/images/meteorGrey_big1.png", SCALE)
enemy_sprite.center_y = 200
enemy_sprite.center_x = 150
enemy_sprite.size = 4
self.all_sprites_list.append(enemy_sprite)
enemy_sprite = arcade.Sprite("examples/images/meteorGrey_big2.png", SCALE)
enemy_sprite.center_y = 200
enemy_sprite.center_x = 250
enemy_sprite.size = 4
self.all_sprites_list.append(enemy_sprite)
enemy_sprite = arcade.Sprite("examples/images/meteorGrey_big3.png", SCALE)
enemy_sprite.center_y = 150
enemy_sprite.center_x = 200
enemy_sprite.size = 4
self.all_sprites_list.append(enemy_sprite)
self.background = arcade.load_texture("stars.jpg")
def on_draw(self):
"""
Render the screen.
"""
# Draw the background
arcade.draw_xywh_rectangle_textured(0, 0, SCREEN_WIDTH, SCREEN_HEIGHT, self.background)
# Draw all the sprites.
self.all_sprites_list.draw()
window = MyApplication(SCREEN_WIDTH, SCREEN_HEIGHT)
arcade.run()
|
<commit_before><commit_msg>Add start of z-order example.<commit_after>
|
"""
Eventually this will be an example showing how we support z-ordering.
"""
import arcade
SCALE = 0.75
SCREEN_HEIGHT = 320
SCREEN_WIDTH = 512
class MyApplication(arcade.Window):
""" Main application class. """
def __init__(self, width, height):
""" Set up the game and initialize the variables. """
super().__init__(width, height)
# Sprite lists
self.all_sprites_list = arcade.SpriteList()
# Set up the player
self.score = 0
self.player_sprite = arcade.Sprite("examples/images/playerShip1_orange.png", SCALE)
self.player_sprite.center_x = 200
self.player_sprite.center_y = 200
self.all_sprites_list.append(self.player_sprite)
# Make the asteroids
enemy_sprite = arcade.Sprite("examples/images/meteorGrey_big1.png", SCALE)
enemy_sprite.center_y = 200
enemy_sprite.center_x = 150
enemy_sprite.size = 4
self.all_sprites_list.append(enemy_sprite)
enemy_sprite = arcade.Sprite("examples/images/meteorGrey_big2.png", SCALE)
enemy_sprite.center_y = 200
enemy_sprite.center_x = 250
enemy_sprite.size = 4
self.all_sprites_list.append(enemy_sprite)
enemy_sprite = arcade.Sprite("examples/images/meteorGrey_big3.png", SCALE)
enemy_sprite.center_y = 150
enemy_sprite.center_x = 200
enemy_sprite.size = 4
self.all_sprites_list.append(enemy_sprite)
self.background = arcade.load_texture("stars.jpg")
def on_draw(self):
"""
Render the screen.
"""
# Draw the background
arcade.draw_xywh_rectangle_textured(0, 0, SCREEN_WIDTH, SCREEN_HEIGHT, self.background)
# Draw all the sprites.
self.all_sprites_list.draw()
window = MyApplication(SCREEN_WIDTH, SCREEN_HEIGHT)
arcade.run()
|
Add start of z-order example."""
Eventually this will be an example showing how we support z-ordering.
"""
import arcade
SCALE = 0.75
SCREEN_HEIGHT = 320
SCREEN_WIDTH = 512
class MyApplication(arcade.Window):
""" Main application class. """
def __init__(self, width, height):
""" Set up the game and initialize the variables. """
super().__init__(width, height)
# Sprite lists
self.all_sprites_list = arcade.SpriteList()
# Set up the player
self.score = 0
self.player_sprite = arcade.Sprite("examples/images/playerShip1_orange.png", SCALE)
self.player_sprite.center_x = 200
self.player_sprite.center_y = 200
self.all_sprites_list.append(self.player_sprite)
# Make the asteroids
enemy_sprite = arcade.Sprite("examples/images/meteorGrey_big1.png", SCALE)
enemy_sprite.center_y = 200
enemy_sprite.center_x = 150
enemy_sprite.size = 4
self.all_sprites_list.append(enemy_sprite)
enemy_sprite = arcade.Sprite("examples/images/meteorGrey_big2.png", SCALE)
enemy_sprite.center_y = 200
enemy_sprite.center_x = 250
enemy_sprite.size = 4
self.all_sprites_list.append(enemy_sprite)
enemy_sprite = arcade.Sprite("examples/images/meteorGrey_big3.png", SCALE)
enemy_sprite.center_y = 150
enemy_sprite.center_x = 200
enemy_sprite.size = 4
self.all_sprites_list.append(enemy_sprite)
self.background = arcade.load_texture("stars.jpg")
def on_draw(self):
"""
Render the screen.
"""
# Draw the background
arcade.draw_xywh_rectangle_textured(0, 0, SCREEN_WIDTH, SCREEN_HEIGHT, self.background)
# Draw all the sprites.
self.all_sprites_list.draw()
window = MyApplication(SCREEN_WIDTH, SCREEN_HEIGHT)
arcade.run()
|
<commit_before><commit_msg>Add start of z-order example.<commit_after>"""
Eventually this will be an example showing how we support z-ordering.
"""
import arcade
SCALE = 0.75
SCREEN_HEIGHT = 320
SCREEN_WIDTH = 512
class MyApplication(arcade.Window):
""" Main application class. """
def __init__(self, width, height):
""" Set up the game and initialize the variables. """
super().__init__(width, height)
# Sprite lists
self.all_sprites_list = arcade.SpriteList()
# Set up the player
self.score = 0
self.player_sprite = arcade.Sprite("examples/images/playerShip1_orange.png", SCALE)
self.player_sprite.center_x = 200
self.player_sprite.center_y = 200
self.all_sprites_list.append(self.player_sprite)
# Make the asteroids
enemy_sprite = arcade.Sprite("examples/images/meteorGrey_big1.png", SCALE)
enemy_sprite.center_y = 200
enemy_sprite.center_x = 150
enemy_sprite.size = 4
self.all_sprites_list.append(enemy_sprite)
enemy_sprite = arcade.Sprite("examples/images/meteorGrey_big2.png", SCALE)
enemy_sprite.center_y = 200
enemy_sprite.center_x = 250
enemy_sprite.size = 4
self.all_sprites_list.append(enemy_sprite)
enemy_sprite = arcade.Sprite("examples/images/meteorGrey_big3.png", SCALE)
enemy_sprite.center_y = 150
enemy_sprite.center_x = 200
enemy_sprite.size = 4
self.all_sprites_list.append(enemy_sprite)
self.background = arcade.load_texture("stars.jpg")
def on_draw(self):
"""
Render the screen.
"""
# Draw the background
arcade.draw_xywh_rectangle_textured(0, 0, SCREEN_WIDTH, SCREEN_HEIGHT, self.background)
# Draw all the sprites.
self.all_sprites_list.draw()
window = MyApplication(SCREEN_WIDTH, SCREEN_HEIGHT)
arcade.run()
|
|
71dcd5198944b59f8f69e4027032f8ef0346eabf
|
examples/asmled.py
|
examples/asmled.py
|
# flash LED #1 using inline assembler
@micropython.asm_thumb
def flash_led(r0):
movw(r1, (stm.GPIOA + stm.GPIO_BSRRL) & 0xffff)
movt(r1, ((stm.GPIOA + stm.GPIO_BSRRL) >> 16) & 0x7fff)
movw(r2, 1 << 13)
movt(r2, 0)
movw(r3, 0)
movt(r3, 1 << 13)
b(loop_entry)
label(loop1)
# turn LED on
str(r2, r1, 0)
# delay for a bit
movw(r4, 5599900 & 0xffff)
movt(r4, (5599900 >> 16) & 0xffff)
label(delay_on)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_on)
# turn LED off
str(r3, r1, 0)
# delay for a bit
movw(r4, 5599900 & 0xffff)
movt(r4, (5599900 >> 16) & 0xffff)
label(delay_off)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_off)
# loop r0 times
subs(r0, r0, 1)
label(loop_entry)
cmp(r0, 0)
bgt(loop1)
# flash LED #1 using inline assembler
# this version uses the convenience assembler operation 'movwt'
@micropython.asm_thumb
def flash_led_v2(r0):
movwt(r1, stm.GPIOA + stm.GPIO_BSRRL)
movwt(r2, 1 << 13)
movwt(r3, 1 << (16 + 13))
b(loop_entry)
label(loop1)
# turn LED on
str(r2, r1, 0)
# delay for a bit
movwt(r4, 5599900)
label(delay_on)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_on)
# turn LED off
str(r3, r1, 0)
# delay for a bit
movwt(r4, 5599900)
label(delay_off)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_off)
# loop r0 times
subs(r0, r0, 1)
label(loop_entry)
cmp(r0, 0)
bgt(loop1)
flash_led_v2(5)
|
Add example script to flash an LED using inline assembler.
|
examples: Add example script to flash an LED using inline assembler.
|
Python
|
mit
|
utopiaprince/micropython,alex-robbins/micropython,ruffy91/micropython,drrk/micropython,mgyenik/micropython,martinribelotta/micropython,trezor/micropython,misterdanb/micropython,torwag/micropython,adafruit/circuitpython,Timmenem/micropython,ernesto-g/micropython,kostyll/micropython,hosaka/micropython,lbattraw/micropython,dmazzella/micropython,KISSMonX/micropython,oopy/micropython,oopy/micropython,slzatz/micropython,noahwilliamsson/micropython,mianos/micropython,dxxb/micropython,SungEun-Steve-Kim/test-mp,methoxid/micropystat,dmazzella/micropython,AriZuu/micropython,torwag/micropython,tobbad/micropython,suda/micropython,lowRISC/micropython,kostyll/micropython,tdautc19841202/micropython,henriknelson/micropython,redbear/micropython,lbattraw/micropython,dhylands/micropython,drrk/micropython,alex-march/micropython,mpalomer/micropython,oopy/micropython,ceramos/micropython,tdautc19841202/micropython,matthewelse/micropython,cwyark/micropython,feilongfl/micropython,galenhz/micropython,alex-robbins/micropython,paul-xxx/micropython,dhylands/micropython,HenrikSolver/micropython,noahwilliamsson/micropython,jlillest/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,paul-xxx/micropython,mpalomer/micropython,AriZuu/micropython,pramasoul/micropython,puuu/micropython,cloudformdesign/micropython,bvernoux/micropython,alex-robbins/micropython,noahchense/micropython,methoxid/micropystat,pfalcon/micropython,danicampora/micropython,kostyll/micropython,cloudformdesign/micropython,orionrobots/micropython,ceramos/micropython,mhoffma/micropython,Peetz0r/micropython-esp32,cloudformdesign/micropython,dxxb/micropython,jimkmc/micropython,Timmenem/micropython,jlillest/micropython,Peetz0r/micropython-esp32,Vogtinator/micropython,micropython/micropython-esp32,vitiral/micropython,noahchense/micropython,MrSurly/micropython,feilongfl/micropython,misterdanb/micropython,adafruit/micropython,lowRISC/micropython,ryannathans/micropython,trezor/micropython,blmorris/micropython,ganshun666/micropython,ceramos/micropython,xuxiaoxin/micropython,jimkmc/micropython,suda/micropython,skybird6672/micropython,pramasoul/micropython,xyb/micropython,xuxiaoxin/micropython,ericsnowcurrently/micropython,vriera/micropython,tdautc19841202/micropython,deshipu/micropython,mianos/micropython,deshipu/micropython,slzatz/micropython,cwyark/micropython,warner83/micropython,supergis/micropython,selste/micropython,omtinez/micropython,TDAbboud/micropython,adafruit/circuitpython,paul-xxx/micropython,bvernoux/micropython,emfcamp/micropython,paul-xxx/micropython,praemdonck/micropython,matthewelse/micropython,MrSurly/micropython,praemdonck/micropython,dxxb/micropython,jmarcelino/pycom-micropython,ryannathans/micropython,MrSurly/micropython-esp32,misterdanb/micropython,blmorris/micropython,adafruit/circuitpython,blazewicz/micropython,kostyll/micropython,TDAbboud/micropython,ruffy91/micropython,warner83/micropython,swegener/micropython,trezor/micropython,kerneltask/micropython,heisewangluo/micropython,dmazzella/micropython,lowRISC/micropython,pozetroninc/micropython,suda/micropython,Vogtinator/micropython,aethaniel/micropython,redbear/micropython,mgyenik/micropython,TDAbboud/micropython,methoxid/micropystat,neilh10/micropython,stonegithubs/micropython,turbinenreiter/micropython,lowRISC/micropython,swegener/micropython,infinnovation/micropython,rubencabrera/micropython,tobbad/micropython,infinnovation/micropython,hosaka/micropython,matthewelse/micropython,tuc-osg/micropython,ahotam/micropython,dinau/micropython,KISSMonX/micropython,micropython/micropython-esp32,aethaniel/micropython,pozetroninc/micropython,adamkh/micropython,EcmaXp/micropython,vitiral/micropython,chrisdearman/micropython,praemdonck/micropython,ericsnowcurrently/micropython,lbattraw/micropython,vriera/micropython,Timmenem/micropython,jlillest/micropython,orionrobots/micropython,danicampora/micropython,jlillest/micropython,ruffy91/micropython,Peetz0r/micropython-esp32,redbear/micropython,galenhz/micropython,SHA2017-badge/micropython-esp32,vriera/micropython,pfalcon/micropython,SHA2017-badge/micropython-esp32,galenhz/micropython,lowRISC/micropython,utopiaprince/micropython,omtinez/micropython,slzatz/micropython,omtinez/micropython,heisewangluo/micropython,firstval/micropython,pozetroninc/micropython,dinau/micropython,hosaka/micropython,tdautc19841202/micropython,chrisdearman/micropython,jmarcelino/pycom-micropython,chrisdearman/micropython,martinribelotta/micropython,EcmaXp/micropython,xhat/micropython,paul-xxx/micropython,adafruit/micropython,bvernoux/micropython,MrSurly/micropython,AriZuu/micropython,micropython/micropython-esp32,kerneltask/micropython,Timmenem/micropython,tuc-osg/micropython,skybird6672/micropython,turbinenreiter/micropython,noahchense/micropython,ryannathans/micropython,oopy/micropython,SungEun-Steve-Kim/test-mp,dinau/micropython,KISSMonX/micropython,matthewelse/micropython,tdautc19841202/micropython,ruffy91/micropython,aitjcize/micropython,EcmaXp/micropython,ruffy91/micropython,Timmenem/micropython,neilh10/micropython,henriknelson/micropython,tralamazza/micropython,dmazzella/micropython,hosaka/micropython,xyb/micropython,feilongfl/micropython,cwyark/micropython,rubencabrera/micropython,adamkh/micropython,cwyark/micropython,dinau/micropython,Vogtinator/micropython,ahotam/micropython,alex-robbins/micropython,PappaPeppar/micropython,redbear/micropython,vitiral/micropython,alex-march/micropython,emfcamp/micropython,emfcamp/micropython,SungEun-Steve-Kim/test-mp,pfalcon/micropython,heisewangluo/micropython,aitjcize/micropython,supergis/micropython,pozetroninc/micropython,adafruit/micropython,ceramos/micropython,dxxb/micropython,selste/micropython,hiway/micropython,martinribelotta/micropython,ahotam/micropython,blmorris/micropython,xhat/micropython,noahwilliamsson/micropython,alex-march/micropython,cnoviello/micropython,trezor/micropython,neilh10/micropython,puuu/micropython,kerneltask/micropython,pramasoul/micropython,toolmacher/micropython,selste/micropython,SHA2017-badge/micropython-esp32,stonegithubs/micropython,hiway/micropython,adafruit/circuitpython,jmarcelino/pycom-micropython,deshipu/micropython,Peetz0r/micropython-esp32,orionrobots/micropython,tobbad/micropython,adamkh/micropython,xuxiaoxin/micropython,oopy/micropython,jlillest/micropython,SHA2017-badge/micropython-esp32,bvernoux/micropython,Peetz0r/micropython-esp32,MrSurly/micropython-esp32,aethaniel/micropython,selste/micropython,AriZuu/micropython,mianos/micropython,skybird6672/micropython,methoxid/micropystat,jimkmc/micropython,chrisdearman/micropython,Vogtinator/micropython,henriknelson/micropython,matthewelse/micropython,danicampora/micropython,tobbad/micropython,toolmacher/micropython,drrk/micropython,aethaniel/micropython,utopiaprince/micropython,swegener/micropython,PappaPeppar/micropython,mgyenik/micropython,xyb/micropython,neilh10/micropython,firstval/micropython,tralamazza/micropython,noahwilliamsson/micropython,supergis/micropython,mianos/micropython,HenrikSolver/micropython,tuc-osg/micropython,blmorris/micropython,deshipu/micropython,hiway/micropython,aethaniel/micropython,infinnovation/micropython,ahotam/micropython,EcmaXp/micropython,SHA2017-badge/micropython-esp32,tuc-osg/micropython,skybird6672/micropython,feilongfl/micropython,pramasoul/micropython,ernesto-g/micropython,methoxid/micropystat,PappaPeppar/micropython,deshipu/micropython,adamkh/micropython,aitjcize/micropython,hiway/micropython,SungEun-Steve-Kim/test-mp,firstval/micropython,utopiaprince/micropython,redbear/micropython,lbattraw/micropython,warner83/micropython,cnoviello/micropython,noahwilliamsson/micropython,supergis/micropython,swegener/micropython,ernesto-g/micropython,tralamazza/micropython,firstval/micropython,cloudformdesign/micropython,ganshun666/micropython,warner83/micropython,matthewelse/micropython,TDAbboud/micropython,MrSurly/micropython,martinribelotta/micropython,MrSurly/micropython-esp32,alex-robbins/micropython,infinnovation/micropython,blazewicz/micropython,cnoviello/micropython,ChuckM/micropython,supergis/micropython,mpalomer/micropython,mianos/micropython,orionrobots/micropython,kerneltask/micropython,stonegithubs/micropython,drrk/micropython,rubencabrera/micropython,misterdanb/micropython,stonegithubs/micropython,praemdonck/micropython,adafruit/circuitpython,pozetroninc/micropython,xhat/micropython,ahotam/micropython,toolmacher/micropython,puuu/micropython,ceramos/micropython,omtinez/micropython,jmarcelino/pycom-micropython,danicampora/micropython,alex-march/micropython,rubencabrera/micropython,Vogtinator/micropython,xuxiaoxin/micropython,omtinez/micropython,suda/micropython,ganshun666/micropython,alex-march/micropython,neilh10/micropython,adafruit/micropython,mgyenik/micropython,praemdonck/micropython,mhoffma/micropython,mhoffma/micropython,torwag/micropython,ericsnowcurrently/micropython,blazewicz/micropython,orionrobots/micropython,ChuckM/micropython,KISSMonX/micropython,mgyenik/micropython,adamkh/micropython,heisewangluo/micropython,vitiral/micropython,blmorris/micropython,dinau/micropython,kerneltask/micropython,ChuckM/micropython,pramasoul/micropython,infinnovation/micropython,heisewangluo/micropython,HenrikSolver/micropython,xhat/micropython,ChuckM/micropython,PappaPeppar/micropython,tralamazza/micropython,turbinenreiter/micropython,aitjcize/micropython,galenhz/micropython,hiway/micropython,emfcamp/micropython,trezor/micropython,cloudformdesign/micropython,ganshun666/micropython,mpalomer/micropython,dhylands/micropython,ryannathans/micropython,noahchense/micropython,HenrikSolver/micropython,PappaPeppar/micropython,mhoffma/micropython,MrSurly/micropython-esp32,skybird6672/micropython,drrk/micropython,KISSMonX/micropython,SungEun-Steve-Kim/test-mp,jimkmc/micropython,ryannathans/micropython,misterdanb/micropython,adafruit/micropython,pfalcon/micropython,blazewicz/micropython,xyb/micropython,dhylands/micropython,micropython/micropython-esp32,dxxb/micropython,turbinenreiter/micropython,stonegithubs/micropython,blazewicz/micropython,swegener/micropython,rubencabrera/micropython,HenrikSolver/micropython,mhoffma/micropython,ericsnowcurrently/micropython,suda/micropython,cnoviello/micropython,hosaka/micropython,MrSurly/micropython,slzatz/micropython,martinribelotta/micropython,vriera/micropython,kostyll/micropython,dhylands/micropython,henriknelson/micropython,ericsnowcurrently/micropython,ernesto-g/micropython,vitiral/micropython,ChuckM/micropython,adafruit/circuitpython,toolmacher/micropython,warner83/micropython,cnoviello/micropython,tobbad/micropython,mpalomer/micropython,feilongfl/micropython,puuu/micropython,pfalcon/micropython,danicampora/micropython,noahchense/micropython,utopiaprince/micropython,jimkmc/micropython,firstval/micropython,toolmacher/micropython,torwag/micropython,galenhz/micropython,vriera/micropython,xhat/micropython,emfcamp/micropython,chrisdearman/micropython,torwag/micropython,lbattraw/micropython,slzatz/micropython,ernesto-g/micropython,selste/micropython,xuxiaoxin/micropython,xyb/micropython,cwyark/micropython,AriZuu/micropython,turbinenreiter/micropython,EcmaXp/micropython,jmarcelino/pycom-micropython,bvernoux/micropython,henriknelson/micropython,puuu/micropython,TDAbboud/micropython,tuc-osg/micropython,ganshun666/micropython
|
examples: Add example script to flash an LED using inline assembler.
|
# flash LED #1 using inline assembler
@micropython.asm_thumb
def flash_led(r0):
movw(r1, (stm.GPIOA + stm.GPIO_BSRRL) & 0xffff)
movt(r1, ((stm.GPIOA + stm.GPIO_BSRRL) >> 16) & 0x7fff)
movw(r2, 1 << 13)
movt(r2, 0)
movw(r3, 0)
movt(r3, 1 << 13)
b(loop_entry)
label(loop1)
# turn LED on
str(r2, r1, 0)
# delay for a bit
movw(r4, 5599900 & 0xffff)
movt(r4, (5599900 >> 16) & 0xffff)
label(delay_on)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_on)
# turn LED off
str(r3, r1, 0)
# delay for a bit
movw(r4, 5599900 & 0xffff)
movt(r4, (5599900 >> 16) & 0xffff)
label(delay_off)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_off)
# loop r0 times
subs(r0, r0, 1)
label(loop_entry)
cmp(r0, 0)
bgt(loop1)
# flash LED #1 using inline assembler
# this version uses the convenience assembler operation 'movwt'
@micropython.asm_thumb
def flash_led_v2(r0):
movwt(r1, stm.GPIOA + stm.GPIO_BSRRL)
movwt(r2, 1 << 13)
movwt(r3, 1 << (16 + 13))
b(loop_entry)
label(loop1)
# turn LED on
str(r2, r1, 0)
# delay for a bit
movwt(r4, 5599900)
label(delay_on)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_on)
# turn LED off
str(r3, r1, 0)
# delay for a bit
movwt(r4, 5599900)
label(delay_off)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_off)
# loop r0 times
subs(r0, r0, 1)
label(loop_entry)
cmp(r0, 0)
bgt(loop1)
flash_led_v2(5)
|
<commit_before><commit_msg>examples: Add example script to flash an LED using inline assembler.<commit_after>
|
# flash LED #1 using inline assembler
@micropython.asm_thumb
def flash_led(r0):
movw(r1, (stm.GPIOA + stm.GPIO_BSRRL) & 0xffff)
movt(r1, ((stm.GPIOA + stm.GPIO_BSRRL) >> 16) & 0x7fff)
movw(r2, 1 << 13)
movt(r2, 0)
movw(r3, 0)
movt(r3, 1 << 13)
b(loop_entry)
label(loop1)
# turn LED on
str(r2, r1, 0)
# delay for a bit
movw(r4, 5599900 & 0xffff)
movt(r4, (5599900 >> 16) & 0xffff)
label(delay_on)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_on)
# turn LED off
str(r3, r1, 0)
# delay for a bit
movw(r4, 5599900 & 0xffff)
movt(r4, (5599900 >> 16) & 0xffff)
label(delay_off)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_off)
# loop r0 times
subs(r0, r0, 1)
label(loop_entry)
cmp(r0, 0)
bgt(loop1)
# flash LED #1 using inline assembler
# this version uses the convenience assembler operation 'movwt'
@micropython.asm_thumb
def flash_led_v2(r0):
movwt(r1, stm.GPIOA + stm.GPIO_BSRRL)
movwt(r2, 1 << 13)
movwt(r3, 1 << (16 + 13))
b(loop_entry)
label(loop1)
# turn LED on
str(r2, r1, 0)
# delay for a bit
movwt(r4, 5599900)
label(delay_on)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_on)
# turn LED off
str(r3, r1, 0)
# delay for a bit
movwt(r4, 5599900)
label(delay_off)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_off)
# loop r0 times
subs(r0, r0, 1)
label(loop_entry)
cmp(r0, 0)
bgt(loop1)
flash_led_v2(5)
|
examples: Add example script to flash an LED using inline assembler.# flash LED #1 using inline assembler
@micropython.asm_thumb
def flash_led(r0):
movw(r1, (stm.GPIOA + stm.GPIO_BSRRL) & 0xffff)
movt(r1, ((stm.GPIOA + stm.GPIO_BSRRL) >> 16) & 0x7fff)
movw(r2, 1 << 13)
movt(r2, 0)
movw(r3, 0)
movt(r3, 1 << 13)
b(loop_entry)
label(loop1)
# turn LED on
str(r2, r1, 0)
# delay for a bit
movw(r4, 5599900 & 0xffff)
movt(r4, (5599900 >> 16) & 0xffff)
label(delay_on)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_on)
# turn LED off
str(r3, r1, 0)
# delay for a bit
movw(r4, 5599900 & 0xffff)
movt(r4, (5599900 >> 16) & 0xffff)
label(delay_off)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_off)
# loop r0 times
subs(r0, r0, 1)
label(loop_entry)
cmp(r0, 0)
bgt(loop1)
# flash LED #1 using inline assembler
# this version uses the convenience assembler operation 'movwt'
@micropython.asm_thumb
def flash_led_v2(r0):
movwt(r1, stm.GPIOA + stm.GPIO_BSRRL)
movwt(r2, 1 << 13)
movwt(r3, 1 << (16 + 13))
b(loop_entry)
label(loop1)
# turn LED on
str(r2, r1, 0)
# delay for a bit
movwt(r4, 5599900)
label(delay_on)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_on)
# turn LED off
str(r3, r1, 0)
# delay for a bit
movwt(r4, 5599900)
label(delay_off)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_off)
# loop r0 times
subs(r0, r0, 1)
label(loop_entry)
cmp(r0, 0)
bgt(loop1)
flash_led_v2(5)
|
<commit_before><commit_msg>examples: Add example script to flash an LED using inline assembler.<commit_after># flash LED #1 using inline assembler
@micropython.asm_thumb
def flash_led(r0):
movw(r1, (stm.GPIOA + stm.GPIO_BSRRL) & 0xffff)
movt(r1, ((stm.GPIOA + stm.GPIO_BSRRL) >> 16) & 0x7fff)
movw(r2, 1 << 13)
movt(r2, 0)
movw(r3, 0)
movt(r3, 1 << 13)
b(loop_entry)
label(loop1)
# turn LED on
str(r2, r1, 0)
# delay for a bit
movw(r4, 5599900 & 0xffff)
movt(r4, (5599900 >> 16) & 0xffff)
label(delay_on)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_on)
# turn LED off
str(r3, r1, 0)
# delay for a bit
movw(r4, 5599900 & 0xffff)
movt(r4, (5599900 >> 16) & 0xffff)
label(delay_off)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_off)
# loop r0 times
subs(r0, r0, 1)
label(loop_entry)
cmp(r0, 0)
bgt(loop1)
# flash LED #1 using inline assembler
# this version uses the convenience assembler operation 'movwt'
@micropython.asm_thumb
def flash_led_v2(r0):
movwt(r1, stm.GPIOA + stm.GPIO_BSRRL)
movwt(r2, 1 << 13)
movwt(r3, 1 << (16 + 13))
b(loop_entry)
label(loop1)
# turn LED on
str(r2, r1, 0)
# delay for a bit
movwt(r4, 5599900)
label(delay_on)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_on)
# turn LED off
str(r3, r1, 0)
# delay for a bit
movwt(r4, 5599900)
label(delay_off)
subs(r4, r4, 1)
cmp(r4, 0)
bgt(delay_off)
# loop r0 times
subs(r0, r0, 1)
label(loop_entry)
cmp(r0, 0)
bgt(loop1)
flash_led_v2(5)
|
|
934367c972d99c034ee69fcec7930c4bd7d42dcc
|
tests/syntax/IndentationError.py
|
tests/syntax/IndentationError.py
|
#
# Kay Hayen, mailto:kayhayen@gmx.de
#
# Python test originally created or extracted from other peoples work. The
# parts from me are in the public domain. It is at least Free Software
# where it's copied from other people. In these cases, it will normally be
# indicated.
#
# If you submit Kay Hayen patches to this software in either form, you
# automatically grant him a copyright assignment to the code, or in the
# alternative a BSD license to the code, should your jurisdiction prevent
# this. Obviously it won't affect code that comes to him indirectly or
# code you don't submit to him.
#
# This is to reserve my ability to re-license the code at any time, e.g.
# the PSF. With this version of Nuitka, using it for Closed Source will
# not be allowed.
#
# Please leave the whole of this copyright notice intact.
#
def someFunc():
a
b
|
Cover the syntax error from wrong indentation as well.
|
Cover the syntax error from wrong indentation as well.
|
Python
|
apache-2.0
|
tempbottle/Nuitka,kayhayen/Nuitka,tempbottle/Nuitka,kayhayen/Nuitka,wfxiang08/Nuitka,wfxiang08/Nuitka,kayhayen/Nuitka,tempbottle/Nuitka,wfxiang08/Nuitka,wfxiang08/Nuitka,tempbottle/Nuitka,kayhayen/Nuitka
|
Cover the syntax error from wrong indentation as well.
|
#
# Kay Hayen, mailto:kayhayen@gmx.de
#
# Python test originally created or extracted from other peoples work. The
# parts from me are in the public domain. It is at least Free Software
# where it's copied from other people. In these cases, it will normally be
# indicated.
#
# If you submit Kay Hayen patches to this software in either form, you
# automatically grant him a copyright assignment to the code, or in the
# alternative a BSD license to the code, should your jurisdiction prevent
# this. Obviously it won't affect code that comes to him indirectly or
# code you don't submit to him.
#
# This is to reserve my ability to re-license the code at any time, e.g.
# the PSF. With this version of Nuitka, using it for Closed Source will
# not be allowed.
#
# Please leave the whole of this copyright notice intact.
#
def someFunc():
a
b
|
<commit_before><commit_msg>Cover the syntax error from wrong indentation as well.<commit_after>
|
#
# Kay Hayen, mailto:kayhayen@gmx.de
#
# Python test originally created or extracted from other peoples work. The
# parts from me are in the public domain. It is at least Free Software
# where it's copied from other people. In these cases, it will normally be
# indicated.
#
# If you submit Kay Hayen patches to this software in either form, you
# automatically grant him a copyright assignment to the code, or in the
# alternative a BSD license to the code, should your jurisdiction prevent
# this. Obviously it won't affect code that comes to him indirectly or
# code you don't submit to him.
#
# This is to reserve my ability to re-license the code at any time, e.g.
# the PSF. With this version of Nuitka, using it for Closed Source will
# not be allowed.
#
# Please leave the whole of this copyright notice intact.
#
def someFunc():
a
b
|
Cover the syntax error from wrong indentation as well.#
# Kay Hayen, mailto:kayhayen@gmx.de
#
# Python test originally created or extracted from other peoples work. The
# parts from me are in the public domain. It is at least Free Software
# where it's copied from other people. In these cases, it will normally be
# indicated.
#
# If you submit Kay Hayen patches to this software in either form, you
# automatically grant him a copyright assignment to the code, or in the
# alternative a BSD license to the code, should your jurisdiction prevent
# this. Obviously it won't affect code that comes to him indirectly or
# code you don't submit to him.
#
# This is to reserve my ability to re-license the code at any time, e.g.
# the PSF. With this version of Nuitka, using it for Closed Source will
# not be allowed.
#
# Please leave the whole of this copyright notice intact.
#
def someFunc():
a
b
|
<commit_before><commit_msg>Cover the syntax error from wrong indentation as well.<commit_after>#
# Kay Hayen, mailto:kayhayen@gmx.de
#
# Python test originally created or extracted from other peoples work. The
# parts from me are in the public domain. It is at least Free Software
# where it's copied from other people. In these cases, it will normally be
# indicated.
#
# If you submit Kay Hayen patches to this software in either form, you
# automatically grant him a copyright assignment to the code, or in the
# alternative a BSD license to the code, should your jurisdiction prevent
# this. Obviously it won't affect code that comes to him indirectly or
# code you don't submit to him.
#
# This is to reserve my ability to re-license the code at any time, e.g.
# the PSF. With this version of Nuitka, using it for Closed Source will
# not be allowed.
#
# Please leave the whole of this copyright notice intact.
#
def someFunc():
a
b
|
|
23fd59feb2576ec5837bbb2233f6b20befed5866
|
plumeria/plugins/google_translate.py
|
plumeria/plugins/google_translate.py
|
import json
import re
import pycountry
from titlecase import titlecase
from plumeria.command import commands, CommandError
from plumeria.message import Message
from plumeria.message.lists import parse_list
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
def find_language(code):
try:
return pycountry.languages.get(iso639_1_code=code)
except: pass
try:
return pycountry.languages.get(name=titlecase(code))
except: pass
raise CommandError("Unknown language code: {}".format(code))
@commands.register("translate", category="Search")
@rate_limit()
async def translate(message: Message):
"""
Translates text to another language.
Example::
/translate en fr hello
"""
parts = message.content.strip().split(" ", maxsplit=2)
if len(parts) < 3:
raise CommandError("At least 3 parameters are required: from language, to language, and the text")
from_lang, to_lang, text = parts
from_lang = find_language(from_lang)
to_lang = find_language(to_lang)
if from_lang == to_lang:
raise CommandError("The 'from language' can't be the same as the 'to language'.")
r = await http.get("https://translate.googleapis.com/translate_a/single", params={
"client": "gtx",
"sl": from_lang.iso639_1_code,
"tl": to_lang.iso639_1_code,
"dt": "t",
"q": text,
}, headers={
('User-Agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36')
})
print(r.text())
# this isn't very right
raw_data = re.sub(",,+", ",", r.text())
raw_data = raw_data.replace("[,", "[")
raw_data = raw_data.replace(",]", "]")
data = json.loads(raw_data)
if isinstance(data[0], list):
translations = data[0]
return "".join([e[0].replace("\\n", "\n") for e in translations])
else:
raise CommandError("Translation not available.")
|
Add a Google Translate plugin.
|
Add a Google Translate plugin.
|
Python
|
mit
|
sk89q/Plumeria,sk89q/Plumeria,sk89q/Plumeria
|
Add a Google Translate plugin.
|
import json
import re
import pycountry
from titlecase import titlecase
from plumeria.command import commands, CommandError
from plumeria.message import Message
from plumeria.message.lists import parse_list
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
def find_language(code):
try:
return pycountry.languages.get(iso639_1_code=code)
except: pass
try:
return pycountry.languages.get(name=titlecase(code))
except: pass
raise CommandError("Unknown language code: {}".format(code))
@commands.register("translate", category="Search")
@rate_limit()
async def translate(message: Message):
"""
Translates text to another language.
Example::
/translate en fr hello
"""
parts = message.content.strip().split(" ", maxsplit=2)
if len(parts) < 3:
raise CommandError("At least 3 parameters are required: from language, to language, and the text")
from_lang, to_lang, text = parts
from_lang = find_language(from_lang)
to_lang = find_language(to_lang)
if from_lang == to_lang:
raise CommandError("The 'from language' can't be the same as the 'to language'.")
r = await http.get("https://translate.googleapis.com/translate_a/single", params={
"client": "gtx",
"sl": from_lang.iso639_1_code,
"tl": to_lang.iso639_1_code,
"dt": "t",
"q": text,
}, headers={
('User-Agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36')
})
print(r.text())
# this isn't very right
raw_data = re.sub(",,+", ",", r.text())
raw_data = raw_data.replace("[,", "[")
raw_data = raw_data.replace(",]", "]")
data = json.loads(raw_data)
if isinstance(data[0], list):
translations = data[0]
return "".join([e[0].replace("\\n", "\n") for e in translations])
else:
raise CommandError("Translation not available.")
|
<commit_before><commit_msg>Add a Google Translate plugin.<commit_after>
|
import json
import re
import pycountry
from titlecase import titlecase
from plumeria.command import commands, CommandError
from plumeria.message import Message
from plumeria.message.lists import parse_list
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
def find_language(code):
try:
return pycountry.languages.get(iso639_1_code=code)
except: pass
try:
return pycountry.languages.get(name=titlecase(code))
except: pass
raise CommandError("Unknown language code: {}".format(code))
@commands.register("translate", category="Search")
@rate_limit()
async def translate(message: Message):
"""
Translates text to another language.
Example::
/translate en fr hello
"""
parts = message.content.strip().split(" ", maxsplit=2)
if len(parts) < 3:
raise CommandError("At least 3 parameters are required: from language, to language, and the text")
from_lang, to_lang, text = parts
from_lang = find_language(from_lang)
to_lang = find_language(to_lang)
if from_lang == to_lang:
raise CommandError("The 'from language' can't be the same as the 'to language'.")
r = await http.get("https://translate.googleapis.com/translate_a/single", params={
"client": "gtx",
"sl": from_lang.iso639_1_code,
"tl": to_lang.iso639_1_code,
"dt": "t",
"q": text,
}, headers={
('User-Agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36')
})
print(r.text())
# this isn't very right
raw_data = re.sub(",,+", ",", r.text())
raw_data = raw_data.replace("[,", "[")
raw_data = raw_data.replace(",]", "]")
data = json.loads(raw_data)
if isinstance(data[0], list):
translations = data[0]
return "".join([e[0].replace("\\n", "\n") for e in translations])
else:
raise CommandError("Translation not available.")
|
Add a Google Translate plugin.import json
import re
import pycountry
from titlecase import titlecase
from plumeria.command import commands, CommandError
from plumeria.message import Message
from plumeria.message.lists import parse_list
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
def find_language(code):
try:
return pycountry.languages.get(iso639_1_code=code)
except: pass
try:
return pycountry.languages.get(name=titlecase(code))
except: pass
raise CommandError("Unknown language code: {}".format(code))
@commands.register("translate", category="Search")
@rate_limit()
async def translate(message: Message):
"""
Translates text to another language.
Example::
/translate en fr hello
"""
parts = message.content.strip().split(" ", maxsplit=2)
if len(parts) < 3:
raise CommandError("At least 3 parameters are required: from language, to language, and the text")
from_lang, to_lang, text = parts
from_lang = find_language(from_lang)
to_lang = find_language(to_lang)
if from_lang == to_lang:
raise CommandError("The 'from language' can't be the same as the 'to language'.")
r = await http.get("https://translate.googleapis.com/translate_a/single", params={
"client": "gtx",
"sl": from_lang.iso639_1_code,
"tl": to_lang.iso639_1_code,
"dt": "t",
"q": text,
}, headers={
('User-Agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36')
})
print(r.text())
# this isn't very right
raw_data = re.sub(",,+", ",", r.text())
raw_data = raw_data.replace("[,", "[")
raw_data = raw_data.replace(",]", "]")
data = json.loads(raw_data)
if isinstance(data[0], list):
translations = data[0]
return "".join([e[0].replace("\\n", "\n") for e in translations])
else:
raise CommandError("Translation not available.")
|
<commit_before><commit_msg>Add a Google Translate plugin.<commit_after>import json
import re
import pycountry
from titlecase import titlecase
from plumeria.command import commands, CommandError
from plumeria.message import Message
from plumeria.message.lists import parse_list
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
def find_language(code):
try:
return pycountry.languages.get(iso639_1_code=code)
except: pass
try:
return pycountry.languages.get(name=titlecase(code))
except: pass
raise CommandError("Unknown language code: {}".format(code))
@commands.register("translate", category="Search")
@rate_limit()
async def translate(message: Message):
"""
Translates text to another language.
Example::
/translate en fr hello
"""
parts = message.content.strip().split(" ", maxsplit=2)
if len(parts) < 3:
raise CommandError("At least 3 parameters are required: from language, to language, and the text")
from_lang, to_lang, text = parts
from_lang = find_language(from_lang)
to_lang = find_language(to_lang)
if from_lang == to_lang:
raise CommandError("The 'from language' can't be the same as the 'to language'.")
r = await http.get("https://translate.googleapis.com/translate_a/single", params={
"client": "gtx",
"sl": from_lang.iso639_1_code,
"tl": to_lang.iso639_1_code,
"dt": "t",
"q": text,
}, headers={
('User-Agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36')
})
print(r.text())
# this isn't very right
raw_data = re.sub(",,+", ",", r.text())
raw_data = raw_data.replace("[,", "[")
raw_data = raw_data.replace(",]", "]")
data = json.loads(raw_data)
if isinstance(data[0], list):
translations = data[0]
return "".join([e[0].replace("\\n", "\n") for e in translations])
else:
raise CommandError("Translation not available.")
|
|
132c32076affebe1dd22b833befebe4e57dbb30e
|
var/spack/packages/muster/package.py
|
var/spack/packages/muster/package.py
|
from spack import *
class Muster(Package):
"""The Muster library provides implementations of sequential and
parallel K-Medoids clustering algorithms. It is intended as a
general framework for parallel cluster analysis, particularly
for performance data analysis on systems with very large
numbers of processes.
"""
homepage = "https://github.com/scalability-llnl/muster"
url = "https://github.com/scalability-llnl/muster/archive/v1.0.tar.gz"
version('1.0', '2eec6979a4a36d3a65a792d12969be16')
depends_on("boost")
depends_on("mpi")
def install(self, spec, prefix):
cmake(".", *std_cmake_args)
make()
make("install")
|
Add Muster parallel clustering library.
|
Add Muster parallel clustering library.
|
Python
|
lgpl-2.1
|
lgarren/spack,TheTimmy/spack,EmreAtes/spack,skosukhin/spack,lgarren/spack,LLNL/spack,matthiasdiener/spack,tmerrick1/spack,LLNL/spack,matthiasdiener/spack,TheTimmy/spack,tmerrick1/spack,mfherbst/spack,skosukhin/spack,iulian787/spack,lgarren/spack,iulian787/spack,iulian787/spack,lgarren/spack,tmerrick1/spack,krafczyk/spack,LLNL/spack,krafczyk/spack,matthiasdiener/spack,matthiasdiener/spack,iulian787/spack,lgarren/spack,skosukhin/spack,skosukhin/spack,EmreAtes/spack,skosukhin/spack,tmerrick1/spack,LLNL/spack,TheTimmy/spack,tmerrick1/spack,LLNL/spack,krafczyk/spack,EmreAtes/spack,TheTimmy/spack,krafczyk/spack,iulian787/spack,mfherbst/spack,TheTimmy/spack,mfherbst/spack,mfherbst/spack,EmreAtes/spack,matthiasdiener/spack,EmreAtes/spack,krafczyk/spack,mfherbst/spack
|
Add Muster parallel clustering library.
|
from spack import *
class Muster(Package):
"""The Muster library provides implementations of sequential and
parallel K-Medoids clustering algorithms. It is intended as a
general framework for parallel cluster analysis, particularly
for performance data analysis on systems with very large
numbers of processes.
"""
homepage = "https://github.com/scalability-llnl/muster"
url = "https://github.com/scalability-llnl/muster/archive/v1.0.tar.gz"
version('1.0', '2eec6979a4a36d3a65a792d12969be16')
depends_on("boost")
depends_on("mpi")
def install(self, spec, prefix):
cmake(".", *std_cmake_args)
make()
make("install")
|
<commit_before><commit_msg>Add Muster parallel clustering library.<commit_after>
|
from spack import *
class Muster(Package):
"""The Muster library provides implementations of sequential and
parallel K-Medoids clustering algorithms. It is intended as a
general framework for parallel cluster analysis, particularly
for performance data analysis on systems with very large
numbers of processes.
"""
homepage = "https://github.com/scalability-llnl/muster"
url = "https://github.com/scalability-llnl/muster/archive/v1.0.tar.gz"
version('1.0', '2eec6979a4a36d3a65a792d12969be16')
depends_on("boost")
depends_on("mpi")
def install(self, spec, prefix):
cmake(".", *std_cmake_args)
make()
make("install")
|
Add Muster parallel clustering library.from spack import *
class Muster(Package):
"""The Muster library provides implementations of sequential and
parallel K-Medoids clustering algorithms. It is intended as a
general framework for parallel cluster analysis, particularly
for performance data analysis on systems with very large
numbers of processes.
"""
homepage = "https://github.com/scalability-llnl/muster"
url = "https://github.com/scalability-llnl/muster/archive/v1.0.tar.gz"
version('1.0', '2eec6979a4a36d3a65a792d12969be16')
depends_on("boost")
depends_on("mpi")
def install(self, spec, prefix):
cmake(".", *std_cmake_args)
make()
make("install")
|
<commit_before><commit_msg>Add Muster parallel clustering library.<commit_after>from spack import *
class Muster(Package):
"""The Muster library provides implementations of sequential and
parallel K-Medoids clustering algorithms. It is intended as a
general framework for parallel cluster analysis, particularly
for performance data analysis on systems with very large
numbers of processes.
"""
homepage = "https://github.com/scalability-llnl/muster"
url = "https://github.com/scalability-llnl/muster/archive/v1.0.tar.gz"
version('1.0', '2eec6979a4a36d3a65a792d12969be16')
depends_on("boost")
depends_on("mpi")
def install(self, spec, prefix):
cmake(".", *std_cmake_args)
make()
make("install")
|
|
ccf3ea0a1b89815710460a40eefc3f230b9660a3
|
get_credentials.py
|
get_credentials.py
|
"""
Program: Get Credentials
Programmer: Michael Fryar, Research Fellow, EPoD
Date created: January 5, 2017
Purpose: Gets OAuth2 credentials to allow interacting
with Google Sheets via API.
"""
from __future__ import print_function
import httplib2
import os
from apiclient import discovery
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/sheets.googleapis.com-python-quickstart.json
SCOPES = 'https://www.googleapis.com/auth/spreadsheets'
CLIENT_SECRET_FILE = 'client_secret.json'
APPLICATION_NAME = 'Google Sheets API Python Quickstart'
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'sheets.googleapis.com-python-quickstart.json')
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
if __name__ == '__main__':
get_credentials()
|
Add script to get OAuth2 credentials for Google Sheets API
|
Add script to get OAuth2 credentials for Google Sheets API
|
Python
|
mit
|
mefryar/epodx-dashboards,mefryar/epodx-dashboards
|
Add script to get OAuth2 credentials for Google Sheets API
|
"""
Program: Get Credentials
Programmer: Michael Fryar, Research Fellow, EPoD
Date created: January 5, 2017
Purpose: Gets OAuth2 credentials to allow interacting
with Google Sheets via API.
"""
from __future__ import print_function
import httplib2
import os
from apiclient import discovery
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/sheets.googleapis.com-python-quickstart.json
SCOPES = 'https://www.googleapis.com/auth/spreadsheets'
CLIENT_SECRET_FILE = 'client_secret.json'
APPLICATION_NAME = 'Google Sheets API Python Quickstart'
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'sheets.googleapis.com-python-quickstart.json')
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
if __name__ == '__main__':
get_credentials()
|
<commit_before><commit_msg>Add script to get OAuth2 credentials for Google Sheets API<commit_after>
|
"""
Program: Get Credentials
Programmer: Michael Fryar, Research Fellow, EPoD
Date created: January 5, 2017
Purpose: Gets OAuth2 credentials to allow interacting
with Google Sheets via API.
"""
from __future__ import print_function
import httplib2
import os
from apiclient import discovery
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/sheets.googleapis.com-python-quickstart.json
SCOPES = 'https://www.googleapis.com/auth/spreadsheets'
CLIENT_SECRET_FILE = 'client_secret.json'
APPLICATION_NAME = 'Google Sheets API Python Quickstart'
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'sheets.googleapis.com-python-quickstart.json')
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
if __name__ == '__main__':
get_credentials()
|
Add script to get OAuth2 credentials for Google Sheets API"""
Program: Get Credentials
Programmer: Michael Fryar, Research Fellow, EPoD
Date created: January 5, 2017
Purpose: Gets OAuth2 credentials to allow interacting
with Google Sheets via API.
"""
from __future__ import print_function
import httplib2
import os
from apiclient import discovery
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/sheets.googleapis.com-python-quickstart.json
SCOPES = 'https://www.googleapis.com/auth/spreadsheets'
CLIENT_SECRET_FILE = 'client_secret.json'
APPLICATION_NAME = 'Google Sheets API Python Quickstart'
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'sheets.googleapis.com-python-quickstart.json')
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
if __name__ == '__main__':
get_credentials()
|
<commit_before><commit_msg>Add script to get OAuth2 credentials for Google Sheets API<commit_after>"""
Program: Get Credentials
Programmer: Michael Fryar, Research Fellow, EPoD
Date created: January 5, 2017
Purpose: Gets OAuth2 credentials to allow interacting
with Google Sheets via API.
"""
from __future__ import print_function
import httplib2
import os
from apiclient import discovery
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/sheets.googleapis.com-python-quickstart.json
SCOPES = 'https://www.googleapis.com/auth/spreadsheets'
CLIENT_SECRET_FILE = 'client_secret.json'
APPLICATION_NAME = 'Google Sheets API Python Quickstart'
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'sheets.googleapis.com-python-quickstart.json')
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
if __name__ == '__main__':
get_credentials()
|
|
755c192a9299888475606a9e60c8c8c5105b6cd2
|
bed_utils/bed_from_basecall.py
|
bed_utils/bed_from_basecall.py
|
#!/usr/bin/env python
'''
Converts a file in basecall format to BED3 format.
'''
import sys,os
def basecall_to_bed(fname):
with open(fname) as f:
for line in f:
if line[0] == '#':
continue
cols = line.strip('\n').split('\t')
chrom = cols[0]
pos = int(cols[1])-1
sys.stdout.write('%s\t%s\t%s\n' % (chrom,pos,pos+1,))
def usage():
print __doc__
print 'Usage: %s basecall.txt' % os.path.basename(sys.argv[0])
sys.exit(1)
if __name__ == '__main__':
fname = None
for arg in sys.argv[1:]:
if not fname and os.path.exists(arg):
fname = arg
elif arg == '-h':
usage()
else:
usage()
if not fname:
usage()
basecall_to_bed(fname)
|
Convert basecall files to BED3 files
|
Convert basecall files to BED3 files
|
Python
|
bsd-3-clause
|
ngsutils/ngsutils,ngsutils/ngsutils,ngsutils/ngsutils
|
Convert basecall files to BED3 files
|
#!/usr/bin/env python
'''
Converts a file in basecall format to BED3 format.
'''
import sys,os
def basecall_to_bed(fname):
with open(fname) as f:
for line in f:
if line[0] == '#':
continue
cols = line.strip('\n').split('\t')
chrom = cols[0]
pos = int(cols[1])-1
sys.stdout.write('%s\t%s\t%s\n' % (chrom,pos,pos+1,))
def usage():
print __doc__
print 'Usage: %s basecall.txt' % os.path.basename(sys.argv[0])
sys.exit(1)
if __name__ == '__main__':
fname = None
for arg in sys.argv[1:]:
if not fname and os.path.exists(arg):
fname = arg
elif arg == '-h':
usage()
else:
usage()
if not fname:
usage()
basecall_to_bed(fname)
|
<commit_before><commit_msg>Convert basecall files to BED3 files<commit_after>
|
#!/usr/bin/env python
'''
Converts a file in basecall format to BED3 format.
'''
import sys,os
def basecall_to_bed(fname):
with open(fname) as f:
for line in f:
if line[0] == '#':
continue
cols = line.strip('\n').split('\t')
chrom = cols[0]
pos = int(cols[1])-1
sys.stdout.write('%s\t%s\t%s\n' % (chrom,pos,pos+1,))
def usage():
print __doc__
print 'Usage: %s basecall.txt' % os.path.basename(sys.argv[0])
sys.exit(1)
if __name__ == '__main__':
fname = None
for arg in sys.argv[1:]:
if not fname and os.path.exists(arg):
fname = arg
elif arg == '-h':
usage()
else:
usage()
if not fname:
usage()
basecall_to_bed(fname)
|
Convert basecall files to BED3 files#!/usr/bin/env python
'''
Converts a file in basecall format to BED3 format.
'''
import sys,os
def basecall_to_bed(fname):
with open(fname) as f:
for line in f:
if line[0] == '#':
continue
cols = line.strip('\n').split('\t')
chrom = cols[0]
pos = int(cols[1])-1
sys.stdout.write('%s\t%s\t%s\n' % (chrom,pos,pos+1,))
def usage():
print __doc__
print 'Usage: %s basecall.txt' % os.path.basename(sys.argv[0])
sys.exit(1)
if __name__ == '__main__':
fname = None
for arg in sys.argv[1:]:
if not fname and os.path.exists(arg):
fname = arg
elif arg == '-h':
usage()
else:
usage()
if not fname:
usage()
basecall_to_bed(fname)
|
<commit_before><commit_msg>Convert basecall files to BED3 files<commit_after>#!/usr/bin/env python
'''
Converts a file in basecall format to BED3 format.
'''
import sys,os
def basecall_to_bed(fname):
with open(fname) as f:
for line in f:
if line[0] == '#':
continue
cols = line.strip('\n').split('\t')
chrom = cols[0]
pos = int(cols[1])-1
sys.stdout.write('%s\t%s\t%s\n' % (chrom,pos,pos+1,))
def usage():
print __doc__
print 'Usage: %s basecall.txt' % os.path.basename(sys.argv[0])
sys.exit(1)
if __name__ == '__main__':
fname = None
for arg in sys.argv[1:]:
if not fname and os.path.exists(arg):
fname = arg
elif arg == '-h':
usage()
else:
usage()
if not fname:
usage()
basecall_to_bed(fname)
|
|
8eda98d75d2fb502b447deb2ab28e71611b021c3
|
joommf/tests/test_min_dyn_example.py
|
joommf/tests/test_min_dyn_example.py
|
def test_min_dyn():
import glob
import matplotlib.pyplot as plt
%matplotlib inline
from joommf.sim import Sim
from joommf.mesh import Mesh
from joommf.energies.exchange import Exchange
from joommf.energies.demag import Demag
from joommf.drivers import evolver
from joommf.energies.zeeman import FixedZeeman
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e3, 0, 0) # external magnetic field (A/m)
m_init = (1, 0, 1)
gamma = 2.21e5
alpha = 0.1
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create first simulation object.
sim1 = Sim(mesh, Ms, name='multiple_example_part1', debug=True)
t_sim = 1e-9
sim1.add_energy(Exchange(A))
sim1.add_energy(Demag())
sim1.add_energy(FixedZeeman(H))
sim1.set_evolver(
evolver.Minimiser(m_init, Ms, gamma))
# Set initial magnetisation.
# Run simulation.
sim1.minimise()
m_init2 = glob.glob(sim1.mif_filename[:-4] + "*.omf")[-1]
sim2 = Sim(mesh, Ms, name='multiple_example_part2', debug=True)
sim2.add_energy(Exchange(A))
sim2.add_energy(Demag())
sim2.add_energy(FixedZeeman(H))
sim2.set_evolver(
evolver.LLG(t_sim, m_init2, Ms, alpha, gamma, name='evolver'))
# Set initial magnetisation.
# Run simulation.
sim2.run()
|
Add minimisation & then dynamics example test
|
Add minimisation & then dynamics example test
|
Python
|
bsd-2-clause
|
fangohr/oommf-python,fangohr/oommf-python,fangohr/oommf-python
|
Add minimisation & then dynamics example test
|
def test_min_dyn():
import glob
import matplotlib.pyplot as plt
%matplotlib inline
from joommf.sim import Sim
from joommf.mesh import Mesh
from joommf.energies.exchange import Exchange
from joommf.energies.demag import Demag
from joommf.drivers import evolver
from joommf.energies.zeeman import FixedZeeman
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e3, 0, 0) # external magnetic field (A/m)
m_init = (1, 0, 1)
gamma = 2.21e5
alpha = 0.1
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create first simulation object.
sim1 = Sim(mesh, Ms, name='multiple_example_part1', debug=True)
t_sim = 1e-9
sim1.add_energy(Exchange(A))
sim1.add_energy(Demag())
sim1.add_energy(FixedZeeman(H))
sim1.set_evolver(
evolver.Minimiser(m_init, Ms, gamma))
# Set initial magnetisation.
# Run simulation.
sim1.minimise()
m_init2 = glob.glob(sim1.mif_filename[:-4] + "*.omf")[-1]
sim2 = Sim(mesh, Ms, name='multiple_example_part2', debug=True)
sim2.add_energy(Exchange(A))
sim2.add_energy(Demag())
sim2.add_energy(FixedZeeman(H))
sim2.set_evolver(
evolver.LLG(t_sim, m_init2, Ms, alpha, gamma, name='evolver'))
# Set initial magnetisation.
# Run simulation.
sim2.run()
|
<commit_before><commit_msg>Add minimisation & then dynamics example test<commit_after>
|
def test_min_dyn():
import glob
import matplotlib.pyplot as plt
%matplotlib inline
from joommf.sim import Sim
from joommf.mesh import Mesh
from joommf.energies.exchange import Exchange
from joommf.energies.demag import Demag
from joommf.drivers import evolver
from joommf.energies.zeeman import FixedZeeman
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e3, 0, 0) # external magnetic field (A/m)
m_init = (1, 0, 1)
gamma = 2.21e5
alpha = 0.1
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create first simulation object.
sim1 = Sim(mesh, Ms, name='multiple_example_part1', debug=True)
t_sim = 1e-9
sim1.add_energy(Exchange(A))
sim1.add_energy(Demag())
sim1.add_energy(FixedZeeman(H))
sim1.set_evolver(
evolver.Minimiser(m_init, Ms, gamma))
# Set initial magnetisation.
# Run simulation.
sim1.minimise()
m_init2 = glob.glob(sim1.mif_filename[:-4] + "*.omf")[-1]
sim2 = Sim(mesh, Ms, name='multiple_example_part2', debug=True)
sim2.add_energy(Exchange(A))
sim2.add_energy(Demag())
sim2.add_energy(FixedZeeman(H))
sim2.set_evolver(
evolver.LLG(t_sim, m_init2, Ms, alpha, gamma, name='evolver'))
# Set initial magnetisation.
# Run simulation.
sim2.run()
|
Add minimisation & then dynamics example testdef test_min_dyn():
import glob
import matplotlib.pyplot as plt
%matplotlib inline
from joommf.sim import Sim
from joommf.mesh import Mesh
from joommf.energies.exchange import Exchange
from joommf.energies.demag import Demag
from joommf.drivers import evolver
from joommf.energies.zeeman import FixedZeeman
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e3, 0, 0) # external magnetic field (A/m)
m_init = (1, 0, 1)
gamma = 2.21e5
alpha = 0.1
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create first simulation object.
sim1 = Sim(mesh, Ms, name='multiple_example_part1', debug=True)
t_sim = 1e-9
sim1.add_energy(Exchange(A))
sim1.add_energy(Demag())
sim1.add_energy(FixedZeeman(H))
sim1.set_evolver(
evolver.Minimiser(m_init, Ms, gamma))
# Set initial magnetisation.
# Run simulation.
sim1.minimise()
m_init2 = glob.glob(sim1.mif_filename[:-4] + "*.omf")[-1]
sim2 = Sim(mesh, Ms, name='multiple_example_part2', debug=True)
sim2.add_energy(Exchange(A))
sim2.add_energy(Demag())
sim2.add_energy(FixedZeeman(H))
sim2.set_evolver(
evolver.LLG(t_sim, m_init2, Ms, alpha, gamma, name='evolver'))
# Set initial magnetisation.
# Run simulation.
sim2.run()
|
<commit_before><commit_msg>Add minimisation & then dynamics example test<commit_after>def test_min_dyn():
import glob
import matplotlib.pyplot as plt
%matplotlib inline
from joommf.sim import Sim
from joommf.mesh import Mesh
from joommf.energies.exchange import Exchange
from joommf.energies.demag import Demag
from joommf.drivers import evolver
from joommf.energies.zeeman import FixedZeeman
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e3, 0, 0) # external magnetic field (A/m)
m_init = (1, 0, 1)
gamma = 2.21e5
alpha = 0.1
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create first simulation object.
sim1 = Sim(mesh, Ms, name='multiple_example_part1', debug=True)
t_sim = 1e-9
sim1.add_energy(Exchange(A))
sim1.add_energy(Demag())
sim1.add_energy(FixedZeeman(H))
sim1.set_evolver(
evolver.Minimiser(m_init, Ms, gamma))
# Set initial magnetisation.
# Run simulation.
sim1.minimise()
m_init2 = glob.glob(sim1.mif_filename[:-4] + "*.omf")[-1]
sim2 = Sim(mesh, Ms, name='multiple_example_part2', debug=True)
sim2.add_energy(Exchange(A))
sim2.add_energy(Demag())
sim2.add_energy(FixedZeeman(H))
sim2.set_evolver(
evolver.LLG(t_sim, m_init2, Ms, alpha, gamma, name='evolver'))
# Set initial magnetisation.
# Run simulation.
sim2.run()
|
|
2e8b74b7b7067ab3bb0345247c4d16521234822f
|
tests/test_line_number_regression.py
|
tests/test_line_number_regression.py
|
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
# Copyright 2017 Eddie Antonio Santos <easantos@ualberta.ca>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#from sensibility.evaluation.evaluate import first_with_line_no, IndexResult
from sensibility.evaluation.distance import determine_fix_event
from sensibility.edit import Deletion
from sensibility.language import language
# This example caused a crash due to the edit's line number being
# calculated on the wrong revision of the file.
#
# This example has been modified slightly, but the line numbers and token
# indices are the same.
# id: 11608/193044
error_file = b"""
/**
* ##### # ########### ## ##### ############## ####.
*
* @###### (#### ####)
* @####### (# ####### ###### ## # ####)
*/
public class DaysAlivePrint
{
public static void main(String[] args)
{
Day birthday = new Day(1951, 5, 25);
Day today = new Day(2012, 7, 23);
int days = today.daysFrom(birthday);
System.out.println(days);
}
}
}"""
fixed_file = b"""
public class DaysAlivePrint
{
public static void main(String[] args)
{
Day birthday = new Day(1951, 5, 25);
Day today = new Day(2012, 7, 23);
int days = today.daysFrom(birthday);
System.out.println(days); //print result
}
}"""
def setup():
language.set_language('java')
def test_line_number_regession() -> None:
event = determine_fix_event(error_file, fixed_file)
assert isinstance(event.fix, Deletion)
# One of the curly braces at the end of the ERROR file.
assert event.fix.original_token == language.vocabulary.to_index('}')
assert event.line_no in {19, 22, 23}
|
Add failing line number test.
|
Add failing line number test.
|
Python
|
apache-2.0
|
naturalness/sensibility,naturalness/sensibility,naturalness/sensibility,naturalness/sensibility
|
Add failing line number test.
|
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
# Copyright 2017 Eddie Antonio Santos <easantos@ualberta.ca>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#from sensibility.evaluation.evaluate import first_with_line_no, IndexResult
from sensibility.evaluation.distance import determine_fix_event
from sensibility.edit import Deletion
from sensibility.language import language
# This example caused a crash due to the edit's line number being
# calculated on the wrong revision of the file.
#
# This example has been modified slightly, but the line numbers and token
# indices are the same.
# id: 11608/193044
error_file = b"""
/**
* ##### # ########### ## ##### ############## ####.
*
* @###### (#### ####)
* @####### (# ####### ###### ## # ####)
*/
public class DaysAlivePrint
{
public static void main(String[] args)
{
Day birthday = new Day(1951, 5, 25);
Day today = new Day(2012, 7, 23);
int days = today.daysFrom(birthday);
System.out.println(days);
}
}
}"""
fixed_file = b"""
public class DaysAlivePrint
{
public static void main(String[] args)
{
Day birthday = new Day(1951, 5, 25);
Day today = new Day(2012, 7, 23);
int days = today.daysFrom(birthday);
System.out.println(days); //print result
}
}"""
def setup():
language.set_language('java')
def test_line_number_regession() -> None:
event = determine_fix_event(error_file, fixed_file)
assert isinstance(event.fix, Deletion)
# One of the curly braces at the end of the ERROR file.
assert event.fix.original_token == language.vocabulary.to_index('}')
assert event.line_no in {19, 22, 23}
|
<commit_before><commit_msg>Add failing line number test.<commit_after>
|
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
# Copyright 2017 Eddie Antonio Santos <easantos@ualberta.ca>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#from sensibility.evaluation.evaluate import first_with_line_no, IndexResult
from sensibility.evaluation.distance import determine_fix_event
from sensibility.edit import Deletion
from sensibility.language import language
# This example caused a crash due to the edit's line number being
# calculated on the wrong revision of the file.
#
# This example has been modified slightly, but the line numbers and token
# indices are the same.
# id: 11608/193044
error_file = b"""
/**
* ##### # ########### ## ##### ############## ####.
*
* @###### (#### ####)
* @####### (# ####### ###### ## # ####)
*/
public class DaysAlivePrint
{
public static void main(String[] args)
{
Day birthday = new Day(1951, 5, 25);
Day today = new Day(2012, 7, 23);
int days = today.daysFrom(birthday);
System.out.println(days);
}
}
}"""
fixed_file = b"""
public class DaysAlivePrint
{
public static void main(String[] args)
{
Day birthday = new Day(1951, 5, 25);
Day today = new Day(2012, 7, 23);
int days = today.daysFrom(birthday);
System.out.println(days); //print result
}
}"""
def setup():
language.set_language('java')
def test_line_number_regession() -> None:
event = determine_fix_event(error_file, fixed_file)
assert isinstance(event.fix, Deletion)
# One of the curly braces at the end of the ERROR file.
assert event.fix.original_token == language.vocabulary.to_index('}')
assert event.line_no in {19, 22, 23}
|
Add failing line number test.#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
# Copyright 2017 Eddie Antonio Santos <easantos@ualberta.ca>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#from sensibility.evaluation.evaluate import first_with_line_no, IndexResult
from sensibility.evaluation.distance import determine_fix_event
from sensibility.edit import Deletion
from sensibility.language import language
# This example caused a crash due to the edit's line number being
# calculated on the wrong revision of the file.
#
# This example has been modified slightly, but the line numbers and token
# indices are the same.
# id: 11608/193044
error_file = b"""
/**
* ##### # ########### ## ##### ############## ####.
*
* @###### (#### ####)
* @####### (# ####### ###### ## # ####)
*/
public class DaysAlivePrint
{
public static void main(String[] args)
{
Day birthday = new Day(1951, 5, 25);
Day today = new Day(2012, 7, 23);
int days = today.daysFrom(birthday);
System.out.println(days);
}
}
}"""
fixed_file = b"""
public class DaysAlivePrint
{
public static void main(String[] args)
{
Day birthday = new Day(1951, 5, 25);
Day today = new Day(2012, 7, 23);
int days = today.daysFrom(birthday);
System.out.println(days); //print result
}
}"""
def setup():
language.set_language('java')
def test_line_number_regession() -> None:
event = determine_fix_event(error_file, fixed_file)
assert isinstance(event.fix, Deletion)
# One of the curly braces at the end of the ERROR file.
assert event.fix.original_token == language.vocabulary.to_index('}')
assert event.line_no in {19, 22, 23}
|
<commit_before><commit_msg>Add failing line number test.<commit_after>#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
# Copyright 2017 Eddie Antonio Santos <easantos@ualberta.ca>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#from sensibility.evaluation.evaluate import first_with_line_no, IndexResult
from sensibility.evaluation.distance import determine_fix_event
from sensibility.edit import Deletion
from sensibility.language import language
# This example caused a crash due to the edit's line number being
# calculated on the wrong revision of the file.
#
# This example has been modified slightly, but the line numbers and token
# indices are the same.
# id: 11608/193044
error_file = b"""
/**
* ##### # ########### ## ##### ############## ####.
*
* @###### (#### ####)
* @####### (# ####### ###### ## # ####)
*/
public class DaysAlivePrint
{
public static void main(String[] args)
{
Day birthday = new Day(1951, 5, 25);
Day today = new Day(2012, 7, 23);
int days = today.daysFrom(birthday);
System.out.println(days);
}
}
}"""
fixed_file = b"""
public class DaysAlivePrint
{
public static void main(String[] args)
{
Day birthday = new Day(1951, 5, 25);
Day today = new Day(2012, 7, 23);
int days = today.daysFrom(birthday);
System.out.println(days); //print result
}
}"""
def setup():
language.set_language('java')
def test_line_number_regession() -> None:
event = determine_fix_event(error_file, fixed_file)
assert isinstance(event.fix, Deletion)
# One of the curly braces at the end of the ERROR file.
assert event.fix.original_token == language.vocabulary.to_index('}')
assert event.line_no in {19, 22, 23}
|
|
7d2f454eb5302b62a9c22c69c319f6c9f8bab7fe
|
rst2pdf/tests/input/test_issue_65.py
|
rst2pdf/tests/input/test_issue_65.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from reportlab.platypus import SimpleDocTemplate, Paragraph
from reportlab.platypus.tables import *
def go():
Story=[]
doc = SimpleDocTemplate("phello.pdf")
cell=[Paragraph('A',ParagraphStyle(name='Normal',
fontName='Helvetica',
fontSize=10,
leading=12)),]
# This story has only Helvetica
#Story=cell
# This one has helvetica and Times-Roman
Story=cell+[Table([[cell]])]
doc.build(Story)
go()
|
Test case showing the RL bug
|
Test case showing the RL bug
|
Python
|
mit
|
rafaelmartins/rst2pdf,rafaelmartins/rst2pdf
|
Test case showing the RL bug
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from reportlab.platypus import SimpleDocTemplate, Paragraph
from reportlab.platypus.tables import *
def go():
Story=[]
doc = SimpleDocTemplate("phello.pdf")
cell=[Paragraph('A',ParagraphStyle(name='Normal',
fontName='Helvetica',
fontSize=10,
leading=12)),]
# This story has only Helvetica
#Story=cell
# This one has helvetica and Times-Roman
Story=cell+[Table([[cell]])]
doc.build(Story)
go()
|
<commit_before><commit_msg>Test case showing the RL bug<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from reportlab.platypus import SimpleDocTemplate, Paragraph
from reportlab.platypus.tables import *
def go():
Story=[]
doc = SimpleDocTemplate("phello.pdf")
cell=[Paragraph('A',ParagraphStyle(name='Normal',
fontName='Helvetica',
fontSize=10,
leading=12)),]
# This story has only Helvetica
#Story=cell
# This one has helvetica and Times-Roman
Story=cell+[Table([[cell]])]
doc.build(Story)
go()
|
Test case showing the RL bug#!/usr/bin/env python
# -*- coding: utf-8 -*-
from reportlab.platypus import SimpleDocTemplate, Paragraph
from reportlab.platypus.tables import *
def go():
Story=[]
doc = SimpleDocTemplate("phello.pdf")
cell=[Paragraph('A',ParagraphStyle(name='Normal',
fontName='Helvetica',
fontSize=10,
leading=12)),]
# This story has only Helvetica
#Story=cell
# This one has helvetica and Times-Roman
Story=cell+[Table([[cell]])]
doc.build(Story)
go()
|
<commit_before><commit_msg>Test case showing the RL bug<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from reportlab.platypus import SimpleDocTemplate, Paragraph
from reportlab.platypus.tables import *
def go():
Story=[]
doc = SimpleDocTemplate("phello.pdf")
cell=[Paragraph('A',ParagraphStyle(name='Normal',
fontName='Helvetica',
fontSize=10,
leading=12)),]
# This story has only Helvetica
#Story=cell
# This one has helvetica and Times-Roman
Story=cell+[Table([[cell]])]
doc.build(Story)
go()
|
|
7ffb2c2e56f80d12f5e7dccfe8555a741d67e335
|
megalist_dataflow/utils/execution.py
|
megalist_dataflow/utils/execution.py
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from enum import Enum
from typing import Iterable
OK_STATUS = 'OK'
class Action(Enum):
CM_OFFLINE_CONVERSION, \
ADS_OFFLINE_CONVERSION, \
ADS_SSD_UPLOAD, \
ADS_USER_LIST_UPLOAD, \
ADS_USER_LIST_REMOVE, \
GA_USER_LIST_UPLOAD = range(6)
class OriginType(Enum):
BIG_QUERY, \
CSV = range(2)
# TODO: CSV not yet implemented
class Execution:
def __init__(self,
origin_name, # type: str,
origin_type, # type: OriginType
origin_metadata, # type: str,
action, # type: Action
destination_name, # type str
destination_metadata, # type: Iterable[str]
):
self._origin_name = origin_name
self._origin_type = origin_type
self._origin_metadata = origin_metadata
self._action = action
self._destination_name = destination_name
self._destination_metadata = destination_metadata
@property
def origin_name(self):
return self._origin_name
@property
def origin_type(self):
return self._origin_type
@property
def origin_metadata(self):
return self._origin_metadata
@property
def action(self):
return self._action
@property
def destination_name(self):
return self._destination_name
@property
def destination_metadata(self):
return self._destination_metadata
def __str__(self):
return 'Origin name: {}. Action: {}. Destination name: {}'.format(self.origin_name, self.action, self.destination_name)
def __eq__(self, other):
return self.origin_name == other.origin_name \
and self.origin_type == other.origin_type \
and self.origin_metadata == other.origin_metadata \
and self.action == other.action \
and self.destination_name == other.destination_name \
and self.destination_metadata == other.destination_metadata
|
Create Execution class to represent data read from configuration Sheet
|
Create Execution class to represent data read from configuration Sheet
|
Python
|
apache-2.0
|
google/megalista,google/megalista
|
Create Execution class to represent data read from configuration Sheet
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from enum import Enum
from typing import Iterable
OK_STATUS = 'OK'
class Action(Enum):
CM_OFFLINE_CONVERSION, \
ADS_OFFLINE_CONVERSION, \
ADS_SSD_UPLOAD, \
ADS_USER_LIST_UPLOAD, \
ADS_USER_LIST_REMOVE, \
GA_USER_LIST_UPLOAD = range(6)
class OriginType(Enum):
BIG_QUERY, \
CSV = range(2)
# TODO: CSV not yet implemented
class Execution:
def __init__(self,
origin_name, # type: str,
origin_type, # type: OriginType
origin_metadata, # type: str,
action, # type: Action
destination_name, # type str
destination_metadata, # type: Iterable[str]
):
self._origin_name = origin_name
self._origin_type = origin_type
self._origin_metadata = origin_metadata
self._action = action
self._destination_name = destination_name
self._destination_metadata = destination_metadata
@property
def origin_name(self):
return self._origin_name
@property
def origin_type(self):
return self._origin_type
@property
def origin_metadata(self):
return self._origin_metadata
@property
def action(self):
return self._action
@property
def destination_name(self):
return self._destination_name
@property
def destination_metadata(self):
return self._destination_metadata
def __str__(self):
return 'Origin name: {}. Action: {}. Destination name: {}'.format(self.origin_name, self.action, self.destination_name)
def __eq__(self, other):
return self.origin_name == other.origin_name \
and self.origin_type == other.origin_type \
and self.origin_metadata == other.origin_metadata \
and self.action == other.action \
and self.destination_name == other.destination_name \
and self.destination_metadata == other.destination_metadata
|
<commit_before><commit_msg>Create Execution class to represent data read from configuration Sheet<commit_after>
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from enum import Enum
from typing import Iterable
OK_STATUS = 'OK'
class Action(Enum):
CM_OFFLINE_CONVERSION, \
ADS_OFFLINE_CONVERSION, \
ADS_SSD_UPLOAD, \
ADS_USER_LIST_UPLOAD, \
ADS_USER_LIST_REMOVE, \
GA_USER_LIST_UPLOAD = range(6)
class OriginType(Enum):
BIG_QUERY, \
CSV = range(2)
# TODO: CSV not yet implemented
class Execution:
def __init__(self,
origin_name, # type: str,
origin_type, # type: OriginType
origin_metadata, # type: str,
action, # type: Action
destination_name, # type str
destination_metadata, # type: Iterable[str]
):
self._origin_name = origin_name
self._origin_type = origin_type
self._origin_metadata = origin_metadata
self._action = action
self._destination_name = destination_name
self._destination_metadata = destination_metadata
@property
def origin_name(self):
return self._origin_name
@property
def origin_type(self):
return self._origin_type
@property
def origin_metadata(self):
return self._origin_metadata
@property
def action(self):
return self._action
@property
def destination_name(self):
return self._destination_name
@property
def destination_metadata(self):
return self._destination_metadata
def __str__(self):
return 'Origin name: {}. Action: {}. Destination name: {}'.format(self.origin_name, self.action, self.destination_name)
def __eq__(self, other):
return self.origin_name == other.origin_name \
and self.origin_type == other.origin_type \
and self.origin_metadata == other.origin_metadata \
and self.action == other.action \
and self.destination_name == other.destination_name \
and self.destination_metadata == other.destination_metadata
|
Create Execution class to represent data read from configuration Sheet# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from enum import Enum
from typing import Iterable
OK_STATUS = 'OK'
class Action(Enum):
CM_OFFLINE_CONVERSION, \
ADS_OFFLINE_CONVERSION, \
ADS_SSD_UPLOAD, \
ADS_USER_LIST_UPLOAD, \
ADS_USER_LIST_REMOVE, \
GA_USER_LIST_UPLOAD = range(6)
class OriginType(Enum):
BIG_QUERY, \
CSV = range(2)
# TODO: CSV not yet implemented
class Execution:
def __init__(self,
origin_name, # type: str,
origin_type, # type: OriginType
origin_metadata, # type: str,
action, # type: Action
destination_name, # type str
destination_metadata, # type: Iterable[str]
):
self._origin_name = origin_name
self._origin_type = origin_type
self._origin_metadata = origin_metadata
self._action = action
self._destination_name = destination_name
self._destination_metadata = destination_metadata
@property
def origin_name(self):
return self._origin_name
@property
def origin_type(self):
return self._origin_type
@property
def origin_metadata(self):
return self._origin_metadata
@property
def action(self):
return self._action
@property
def destination_name(self):
return self._destination_name
@property
def destination_metadata(self):
return self._destination_metadata
def __str__(self):
return 'Origin name: {}. Action: {}. Destination name: {}'.format(self.origin_name, self.action, self.destination_name)
def __eq__(self, other):
return self.origin_name == other.origin_name \
and self.origin_type == other.origin_type \
and self.origin_metadata == other.origin_metadata \
and self.action == other.action \
and self.destination_name == other.destination_name \
and self.destination_metadata == other.destination_metadata
|
<commit_before><commit_msg>Create Execution class to represent data read from configuration Sheet<commit_after># Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from enum import Enum
from typing import Iterable
OK_STATUS = 'OK'
class Action(Enum):
CM_OFFLINE_CONVERSION, \
ADS_OFFLINE_CONVERSION, \
ADS_SSD_UPLOAD, \
ADS_USER_LIST_UPLOAD, \
ADS_USER_LIST_REMOVE, \
GA_USER_LIST_UPLOAD = range(6)
class OriginType(Enum):
BIG_QUERY, \
CSV = range(2)
# TODO: CSV not yet implemented
class Execution:
def __init__(self,
origin_name, # type: str,
origin_type, # type: OriginType
origin_metadata, # type: str,
action, # type: Action
destination_name, # type str
destination_metadata, # type: Iterable[str]
):
self._origin_name = origin_name
self._origin_type = origin_type
self._origin_metadata = origin_metadata
self._action = action
self._destination_name = destination_name
self._destination_metadata = destination_metadata
@property
def origin_name(self):
return self._origin_name
@property
def origin_type(self):
return self._origin_type
@property
def origin_metadata(self):
return self._origin_metadata
@property
def action(self):
return self._action
@property
def destination_name(self):
return self._destination_name
@property
def destination_metadata(self):
return self._destination_metadata
def __str__(self):
return 'Origin name: {}. Action: {}. Destination name: {}'.format(self.origin_name, self.action, self.destination_name)
def __eq__(self, other):
return self.origin_name == other.origin_name \
and self.origin_type == other.origin_type \
and self.origin_metadata == other.origin_metadata \
and self.action == other.action \
and self.destination_name == other.destination_name \
and self.destination_metadata == other.destination_metadata
|
|
d27920e11b1cc528f6707cf34ec9650acf02b983
|
datasets/management/commands/transfer_user_votes.py
|
datasets/management/commands/transfer_user_votes.py
|
from django.core.management.base import BaseCommand
from datasets.models import User
class Command(BaseCommand):
help = 'Transfer the votes of an user to another' \
'Usage: python manage.py transfer_user_votes <from_username> <to_username>'
def add_arguments(self, parser):
parser.add_argument('from_username', type=str)
parser.add_argument('to_username', type=str)
def handle(self, *args, **options):
from_username = options['from_username']
to_username = options['to_username']
from_user = User.objects.get(username=from_username)
to_user = User.objects.get(username=to_username)
for vote in from_user.votes.all():
vote.created_by = to_user
vote.save()
|
Add command transfer user votes
|
Add command transfer user votes
|
Python
|
agpl-3.0
|
MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets
|
Add command transfer user votes
|
from django.core.management.base import BaseCommand
from datasets.models import User
class Command(BaseCommand):
help = 'Transfer the votes of an user to another' \
'Usage: python manage.py transfer_user_votes <from_username> <to_username>'
def add_arguments(self, parser):
parser.add_argument('from_username', type=str)
parser.add_argument('to_username', type=str)
def handle(self, *args, **options):
from_username = options['from_username']
to_username = options['to_username']
from_user = User.objects.get(username=from_username)
to_user = User.objects.get(username=to_username)
for vote in from_user.votes.all():
vote.created_by = to_user
vote.save()
|
<commit_before><commit_msg>Add command transfer user votes<commit_after>
|
from django.core.management.base import BaseCommand
from datasets.models import User
class Command(BaseCommand):
help = 'Transfer the votes of an user to another' \
'Usage: python manage.py transfer_user_votes <from_username> <to_username>'
def add_arguments(self, parser):
parser.add_argument('from_username', type=str)
parser.add_argument('to_username', type=str)
def handle(self, *args, **options):
from_username = options['from_username']
to_username = options['to_username']
from_user = User.objects.get(username=from_username)
to_user = User.objects.get(username=to_username)
for vote in from_user.votes.all():
vote.created_by = to_user
vote.save()
|
Add command transfer user votesfrom django.core.management.base import BaseCommand
from datasets.models import User
class Command(BaseCommand):
help = 'Transfer the votes of an user to another' \
'Usage: python manage.py transfer_user_votes <from_username> <to_username>'
def add_arguments(self, parser):
parser.add_argument('from_username', type=str)
parser.add_argument('to_username', type=str)
def handle(self, *args, **options):
from_username = options['from_username']
to_username = options['to_username']
from_user = User.objects.get(username=from_username)
to_user = User.objects.get(username=to_username)
for vote in from_user.votes.all():
vote.created_by = to_user
vote.save()
|
<commit_before><commit_msg>Add command transfer user votes<commit_after>from django.core.management.base import BaseCommand
from datasets.models import User
class Command(BaseCommand):
help = 'Transfer the votes of an user to another' \
'Usage: python manage.py transfer_user_votes <from_username> <to_username>'
def add_arguments(self, parser):
parser.add_argument('from_username', type=str)
parser.add_argument('to_username', type=str)
def handle(self, *args, **options):
from_username = options['from_username']
to_username = options['to_username']
from_user = User.objects.get(username=from_username)
to_user = User.objects.get(username=to_username)
for vote in from_user.votes.all():
vote.created_by = to_user
vote.save()
|
|
bc1e78ac1caf18e0dfaa6479388cb8496e406419
|
packages/python/plotly/plotly/tests/test_core/test_subplots/test_find_nonempty_subplots.py
|
packages/python/plotly/plotly/tests/test_core/test_subplots/test_find_nonempty_subplots.py
|
import pytest
import plotly.graph_objects as go
from plotly.subplots import make_subplots
from itertools import combinations, product
from functools import reduce
def all_combos(it):
return list(
reduce(
lambda a, b: a + b,
[list(combinations(it, r)) for r in range(1, len(it))],
[],
)
)
def translate_layout_keys(t):
xr, yr = t
xr = xr.replace("axis", "")
yr = yr.replace("axis", "")
return (xr, yr)
def get_non_empty_subplots(fig, selector):
gr = fig._validate_get_grid_ref()
nrows = len(gr)
ncols = len(gr[0])
sp_addresses = product(range(nrows), range(ncols))
# assign a number similar to plotly's xref/yref (e.g, xref=x2) to each
# subplot address (xref=x -> 1, but xref=x3 -> 3)
# sp_ax_numbers=range(1,len(sp_addresses)+1)
# Get those subplot numbers which contain something
ret = list(
filter(
lambda sp: fig._subplot_not_empty(
*translate_layout_keys(sp.layout_keys), selector=selector
),
[gr[r][c][0] for r, c in sp_addresses],
)
)
return ret
def test_choose_correct_non_empty_subplots():
# This checks to see that the correct subplots are selected for all
# combinations of selectors
fig = make_subplots(2, 2)
fig.add_trace(go.Scatter(x=[1, 2], y=[3, 4]), row=1, col=1)
fig.add_shape(dict(type="rect", x0=1, x1=2, y0=3, y1=4), row=1, col=2)
fig.add_annotation(dict(text="A", x=1, y=2), row=2, col=1)
fig.add_layout_image(
dict(source="test", x=1, y=2, sizex=0.5, sizey=0.5), row=2, col=2
)
all_subplots = get_non_empty_subplots(fig, "all")
selectors = all_combos(["traces", "shapes", "annotations", "images"])
subplot_combos = all_combos(all_subplots)
for s, spc in zip(selectors, subplot_combos):
sps = tuple(get_non_empty_subplots(fig, s))
assert sps == spc
|
Test finding empty subplots for all selector combinations
|
Test finding empty subplots for all selector combinations
This tests go.Figure._subplot_not_empty
|
Python
|
mit
|
plotly/plotly.py,plotly/plotly.py,plotly/plotly.py
|
Test finding empty subplots for all selector combinations
This tests go.Figure._subplot_not_empty
|
import pytest
import plotly.graph_objects as go
from plotly.subplots import make_subplots
from itertools import combinations, product
from functools import reduce
def all_combos(it):
return list(
reduce(
lambda a, b: a + b,
[list(combinations(it, r)) for r in range(1, len(it))],
[],
)
)
def translate_layout_keys(t):
xr, yr = t
xr = xr.replace("axis", "")
yr = yr.replace("axis", "")
return (xr, yr)
def get_non_empty_subplots(fig, selector):
gr = fig._validate_get_grid_ref()
nrows = len(gr)
ncols = len(gr[0])
sp_addresses = product(range(nrows), range(ncols))
# assign a number similar to plotly's xref/yref (e.g, xref=x2) to each
# subplot address (xref=x -> 1, but xref=x3 -> 3)
# sp_ax_numbers=range(1,len(sp_addresses)+1)
# Get those subplot numbers which contain something
ret = list(
filter(
lambda sp: fig._subplot_not_empty(
*translate_layout_keys(sp.layout_keys), selector=selector
),
[gr[r][c][0] for r, c in sp_addresses],
)
)
return ret
def test_choose_correct_non_empty_subplots():
# This checks to see that the correct subplots are selected for all
# combinations of selectors
fig = make_subplots(2, 2)
fig.add_trace(go.Scatter(x=[1, 2], y=[3, 4]), row=1, col=1)
fig.add_shape(dict(type="rect", x0=1, x1=2, y0=3, y1=4), row=1, col=2)
fig.add_annotation(dict(text="A", x=1, y=2), row=2, col=1)
fig.add_layout_image(
dict(source="test", x=1, y=2, sizex=0.5, sizey=0.5), row=2, col=2
)
all_subplots = get_non_empty_subplots(fig, "all")
selectors = all_combos(["traces", "shapes", "annotations", "images"])
subplot_combos = all_combos(all_subplots)
for s, spc in zip(selectors, subplot_combos):
sps = tuple(get_non_empty_subplots(fig, s))
assert sps == spc
|
<commit_before><commit_msg>Test finding empty subplots for all selector combinations
This tests go.Figure._subplot_not_empty<commit_after>
|
import pytest
import plotly.graph_objects as go
from plotly.subplots import make_subplots
from itertools import combinations, product
from functools import reduce
def all_combos(it):
return list(
reduce(
lambda a, b: a + b,
[list(combinations(it, r)) for r in range(1, len(it))],
[],
)
)
def translate_layout_keys(t):
xr, yr = t
xr = xr.replace("axis", "")
yr = yr.replace("axis", "")
return (xr, yr)
def get_non_empty_subplots(fig, selector):
gr = fig._validate_get_grid_ref()
nrows = len(gr)
ncols = len(gr[0])
sp_addresses = product(range(nrows), range(ncols))
# assign a number similar to plotly's xref/yref (e.g, xref=x2) to each
# subplot address (xref=x -> 1, but xref=x3 -> 3)
# sp_ax_numbers=range(1,len(sp_addresses)+1)
# Get those subplot numbers which contain something
ret = list(
filter(
lambda sp: fig._subplot_not_empty(
*translate_layout_keys(sp.layout_keys), selector=selector
),
[gr[r][c][0] for r, c in sp_addresses],
)
)
return ret
def test_choose_correct_non_empty_subplots():
# This checks to see that the correct subplots are selected for all
# combinations of selectors
fig = make_subplots(2, 2)
fig.add_trace(go.Scatter(x=[1, 2], y=[3, 4]), row=1, col=1)
fig.add_shape(dict(type="rect", x0=1, x1=2, y0=3, y1=4), row=1, col=2)
fig.add_annotation(dict(text="A", x=1, y=2), row=2, col=1)
fig.add_layout_image(
dict(source="test", x=1, y=2, sizex=0.5, sizey=0.5), row=2, col=2
)
all_subplots = get_non_empty_subplots(fig, "all")
selectors = all_combos(["traces", "shapes", "annotations", "images"])
subplot_combos = all_combos(all_subplots)
for s, spc in zip(selectors, subplot_combos):
sps = tuple(get_non_empty_subplots(fig, s))
assert sps == spc
|
Test finding empty subplots for all selector combinations
This tests go.Figure._subplot_not_emptyimport pytest
import plotly.graph_objects as go
from plotly.subplots import make_subplots
from itertools import combinations, product
from functools import reduce
def all_combos(it):
return list(
reduce(
lambda a, b: a + b,
[list(combinations(it, r)) for r in range(1, len(it))],
[],
)
)
def translate_layout_keys(t):
xr, yr = t
xr = xr.replace("axis", "")
yr = yr.replace("axis", "")
return (xr, yr)
def get_non_empty_subplots(fig, selector):
gr = fig._validate_get_grid_ref()
nrows = len(gr)
ncols = len(gr[0])
sp_addresses = product(range(nrows), range(ncols))
# assign a number similar to plotly's xref/yref (e.g, xref=x2) to each
# subplot address (xref=x -> 1, but xref=x3 -> 3)
# sp_ax_numbers=range(1,len(sp_addresses)+1)
# Get those subplot numbers which contain something
ret = list(
filter(
lambda sp: fig._subplot_not_empty(
*translate_layout_keys(sp.layout_keys), selector=selector
),
[gr[r][c][0] for r, c in sp_addresses],
)
)
return ret
def test_choose_correct_non_empty_subplots():
# This checks to see that the correct subplots are selected for all
# combinations of selectors
fig = make_subplots(2, 2)
fig.add_trace(go.Scatter(x=[1, 2], y=[3, 4]), row=1, col=1)
fig.add_shape(dict(type="rect", x0=1, x1=2, y0=3, y1=4), row=1, col=2)
fig.add_annotation(dict(text="A", x=1, y=2), row=2, col=1)
fig.add_layout_image(
dict(source="test", x=1, y=2, sizex=0.5, sizey=0.5), row=2, col=2
)
all_subplots = get_non_empty_subplots(fig, "all")
selectors = all_combos(["traces", "shapes", "annotations", "images"])
subplot_combos = all_combos(all_subplots)
for s, spc in zip(selectors, subplot_combos):
sps = tuple(get_non_empty_subplots(fig, s))
assert sps == spc
|
<commit_before><commit_msg>Test finding empty subplots for all selector combinations
This tests go.Figure._subplot_not_empty<commit_after>import pytest
import plotly.graph_objects as go
from plotly.subplots import make_subplots
from itertools import combinations, product
from functools import reduce
def all_combos(it):
return list(
reduce(
lambda a, b: a + b,
[list(combinations(it, r)) for r in range(1, len(it))],
[],
)
)
def translate_layout_keys(t):
xr, yr = t
xr = xr.replace("axis", "")
yr = yr.replace("axis", "")
return (xr, yr)
def get_non_empty_subplots(fig, selector):
gr = fig._validate_get_grid_ref()
nrows = len(gr)
ncols = len(gr[0])
sp_addresses = product(range(nrows), range(ncols))
# assign a number similar to plotly's xref/yref (e.g, xref=x2) to each
# subplot address (xref=x -> 1, but xref=x3 -> 3)
# sp_ax_numbers=range(1,len(sp_addresses)+1)
# Get those subplot numbers which contain something
ret = list(
filter(
lambda sp: fig._subplot_not_empty(
*translate_layout_keys(sp.layout_keys), selector=selector
),
[gr[r][c][0] for r, c in sp_addresses],
)
)
return ret
def test_choose_correct_non_empty_subplots():
# This checks to see that the correct subplots are selected for all
# combinations of selectors
fig = make_subplots(2, 2)
fig.add_trace(go.Scatter(x=[1, 2], y=[3, 4]), row=1, col=1)
fig.add_shape(dict(type="rect", x0=1, x1=2, y0=3, y1=4), row=1, col=2)
fig.add_annotation(dict(text="A", x=1, y=2), row=2, col=1)
fig.add_layout_image(
dict(source="test", x=1, y=2, sizex=0.5, sizey=0.5), row=2, col=2
)
all_subplots = get_non_empty_subplots(fig, "all")
selectors = all_combos(["traces", "shapes", "annotations", "images"])
subplot_combos = all_combos(all_subplots)
for s, spc in zip(selectors, subplot_combos):
sps = tuple(get_non_empty_subplots(fig, s))
assert sps == spc
|
|
04c692ca4b6045a49b76dab46847bc058adbdacd
|
dmrs_preprocess/handle_ltop.py
|
dmrs_preprocess/handle_ltop.py
|
def handle_ltop_links(dmrs_xml):
'''
Remove LTOP links from DMRS and add LTOP attribute to the DMRS entity
:param dmrs_xml: Input DMRS XML
:return: Modified DMRS XML
'''
ltop = '-1'
links_to_remove = list()
for entity in dmrs_xml:
if entity.tag == 'link':
link = entity
# Remove LTOP (ghost) link
if link.attrib['from'] == '0':
links_to_remove.append(link)
ltop = link.attrib['to']
for link in links_to_remove:
dmrs_xml.remove(link)
dmrs_xml.attrib['LTOP'] = ltop
del dmrs_xml.attrib['index']
return dmrs_xml
|
Attach LTOP as the attribute and remove index
|
Attach LTOP as the attribute and remove index
|
Python
|
mit
|
matichorvat/pydmrs
|
Attach LTOP as the attribute and remove index
|
def handle_ltop_links(dmrs_xml):
'''
Remove LTOP links from DMRS and add LTOP attribute to the DMRS entity
:param dmrs_xml: Input DMRS XML
:return: Modified DMRS XML
'''
ltop = '-1'
links_to_remove = list()
for entity in dmrs_xml:
if entity.tag == 'link':
link = entity
# Remove LTOP (ghost) link
if link.attrib['from'] == '0':
links_to_remove.append(link)
ltop = link.attrib['to']
for link in links_to_remove:
dmrs_xml.remove(link)
dmrs_xml.attrib['LTOP'] = ltop
del dmrs_xml.attrib['index']
return dmrs_xml
|
<commit_before><commit_msg>Attach LTOP as the attribute and remove index<commit_after>
|
def handle_ltop_links(dmrs_xml):
'''
Remove LTOP links from DMRS and add LTOP attribute to the DMRS entity
:param dmrs_xml: Input DMRS XML
:return: Modified DMRS XML
'''
ltop = '-1'
links_to_remove = list()
for entity in dmrs_xml:
if entity.tag == 'link':
link = entity
# Remove LTOP (ghost) link
if link.attrib['from'] == '0':
links_to_remove.append(link)
ltop = link.attrib['to']
for link in links_to_remove:
dmrs_xml.remove(link)
dmrs_xml.attrib['LTOP'] = ltop
del dmrs_xml.attrib['index']
return dmrs_xml
|
Attach LTOP as the attribute and remove index
def handle_ltop_links(dmrs_xml):
'''
Remove LTOP links from DMRS and add LTOP attribute to the DMRS entity
:param dmrs_xml: Input DMRS XML
:return: Modified DMRS XML
'''
ltop = '-1'
links_to_remove = list()
for entity in dmrs_xml:
if entity.tag == 'link':
link = entity
# Remove LTOP (ghost) link
if link.attrib['from'] == '0':
links_to_remove.append(link)
ltop = link.attrib['to']
for link in links_to_remove:
dmrs_xml.remove(link)
dmrs_xml.attrib['LTOP'] = ltop
del dmrs_xml.attrib['index']
return dmrs_xml
|
<commit_before><commit_msg>Attach LTOP as the attribute and remove index<commit_after>
def handle_ltop_links(dmrs_xml):
'''
Remove LTOP links from DMRS and add LTOP attribute to the DMRS entity
:param dmrs_xml: Input DMRS XML
:return: Modified DMRS XML
'''
ltop = '-1'
links_to_remove = list()
for entity in dmrs_xml:
if entity.tag == 'link':
link = entity
# Remove LTOP (ghost) link
if link.attrib['from'] == '0':
links_to_remove.append(link)
ltop = link.attrib['to']
for link in links_to_remove:
dmrs_xml.remove(link)
dmrs_xml.attrib['LTOP'] = ltop
del dmrs_xml.attrib['index']
return dmrs_xml
|
|
23f8cee3b11c839216f9ef7621fbcc039a4e57be
|
py/longest-harmonious-subsequence.py
|
py/longest-harmonious-subsequence.py
|
from collections import Counter
class Solution(object):
def findLHS(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
c = Counter(nums)
m = 0
for k in c:
if k + 1 in c:
m = max(m, c[k] + c[k + 1])
return m
|
Add py solution for 594. Longest Harmonious Subsequence
|
Add py solution for 594. Longest Harmonious Subsequence
594. Longest Harmonious Subsequence: https://leetcode.com/problems/longest-harmonious-subsequence/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 594. Longest Harmonious Subsequence
594. Longest Harmonious Subsequence: https://leetcode.com/problems/longest-harmonious-subsequence/
|
from collections import Counter
class Solution(object):
def findLHS(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
c = Counter(nums)
m = 0
for k in c:
if k + 1 in c:
m = max(m, c[k] + c[k + 1])
return m
|
<commit_before><commit_msg>Add py solution for 594. Longest Harmonious Subsequence
594. Longest Harmonious Subsequence: https://leetcode.com/problems/longest-harmonious-subsequence/<commit_after>
|
from collections import Counter
class Solution(object):
def findLHS(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
c = Counter(nums)
m = 0
for k in c:
if k + 1 in c:
m = max(m, c[k] + c[k + 1])
return m
|
Add py solution for 594. Longest Harmonious Subsequence
594. Longest Harmonious Subsequence: https://leetcode.com/problems/longest-harmonious-subsequence/from collections import Counter
class Solution(object):
def findLHS(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
c = Counter(nums)
m = 0
for k in c:
if k + 1 in c:
m = max(m, c[k] + c[k + 1])
return m
|
<commit_before><commit_msg>Add py solution for 594. Longest Harmonious Subsequence
594. Longest Harmonious Subsequence: https://leetcode.com/problems/longest-harmonious-subsequence/<commit_after>from collections import Counter
class Solution(object):
def findLHS(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
c = Counter(nums)
m = 0
for k in c:
if k + 1 in c:
m = max(m, c[k] + c[k + 1])
return m
|
|
555c5ce53f454d538701c56628749b3e9f363034
|
py/longest-uncommon-subsequence-i.py
|
py/longest-uncommon-subsequence-i.py
|
class Solution(object):
def findLUSlength(self, a, b):
"""
:type a: str
:type b: str
:rtype: int
"""
if a == b:
return -1
else:
return max(len(a), len(b))
|
Add py solution for 521. Longest Uncommon Subsequence I
|
Add py solution for 521. Longest Uncommon Subsequence I
521. Longest Uncommon Subsequence I: https://leetcode.com/problems/longest-uncommon-subsequence-i/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 521. Longest Uncommon Subsequence I
521. Longest Uncommon Subsequence I: https://leetcode.com/problems/longest-uncommon-subsequence-i/
|
class Solution(object):
def findLUSlength(self, a, b):
"""
:type a: str
:type b: str
:rtype: int
"""
if a == b:
return -1
else:
return max(len(a), len(b))
|
<commit_before><commit_msg>Add py solution for 521. Longest Uncommon Subsequence I
521. Longest Uncommon Subsequence I: https://leetcode.com/problems/longest-uncommon-subsequence-i/<commit_after>
|
class Solution(object):
def findLUSlength(self, a, b):
"""
:type a: str
:type b: str
:rtype: int
"""
if a == b:
return -1
else:
return max(len(a), len(b))
|
Add py solution for 521. Longest Uncommon Subsequence I
521. Longest Uncommon Subsequence I: https://leetcode.com/problems/longest-uncommon-subsequence-i/class Solution(object):
def findLUSlength(self, a, b):
"""
:type a: str
:type b: str
:rtype: int
"""
if a == b:
return -1
else:
return max(len(a), len(b))
|
<commit_before><commit_msg>Add py solution for 521. Longest Uncommon Subsequence I
521. Longest Uncommon Subsequence I: https://leetcode.com/problems/longest-uncommon-subsequence-i/<commit_after>class Solution(object):
def findLUSlength(self, a, b):
"""
:type a: str
:type b: str
:rtype: int
"""
if a == b:
return -1
else:
return max(len(a), len(b))
|
|
b8bb7aef7371f52f53f5871b9e27375c39aced97
|
tests/unit/docs/test_subresource.py
|
tests/unit/docs/test_subresource.py
|
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests.unit.docs import BaseDocsTest
from boto3.docs.subresource import SubResourceDocumenter
class TestSubResourceDocumenter(BaseDocsTest):
def test_document_sub_resources(self):
sub_resource_documentor = SubResourceDocumenter(self.resource)
sub_resource_documentor.document_sub_resources(self.doc_structure)
self.assert_contains_lines_in_order([
'.. py:method:: Sample(name)',
' Creates a Sample resource.::',
" sample = myservice.Sample('name')",
' :type name: string',
" :param name: The Sample's name identifier.",
' :rtype: :py:class:`MyService.Sample`',
' :returns: A Sample resource',
])
|
Add test for subresource documentation
|
Add test for subresource documentation
|
Python
|
apache-2.0
|
boto/boto3
|
Add test for subresource documentation
|
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests.unit.docs import BaseDocsTest
from boto3.docs.subresource import SubResourceDocumenter
class TestSubResourceDocumenter(BaseDocsTest):
def test_document_sub_resources(self):
sub_resource_documentor = SubResourceDocumenter(self.resource)
sub_resource_documentor.document_sub_resources(self.doc_structure)
self.assert_contains_lines_in_order([
'.. py:method:: Sample(name)',
' Creates a Sample resource.::',
" sample = myservice.Sample('name')",
' :type name: string',
" :param name: The Sample's name identifier.",
' :rtype: :py:class:`MyService.Sample`',
' :returns: A Sample resource',
])
|
<commit_before><commit_msg>Add test for subresource documentation<commit_after>
|
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests.unit.docs import BaseDocsTest
from boto3.docs.subresource import SubResourceDocumenter
class TestSubResourceDocumenter(BaseDocsTest):
def test_document_sub_resources(self):
sub_resource_documentor = SubResourceDocumenter(self.resource)
sub_resource_documentor.document_sub_resources(self.doc_structure)
self.assert_contains_lines_in_order([
'.. py:method:: Sample(name)',
' Creates a Sample resource.::',
" sample = myservice.Sample('name')",
' :type name: string',
" :param name: The Sample's name identifier.",
' :rtype: :py:class:`MyService.Sample`',
' :returns: A Sample resource',
])
|
Add test for subresource documentation# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests.unit.docs import BaseDocsTest
from boto3.docs.subresource import SubResourceDocumenter
class TestSubResourceDocumenter(BaseDocsTest):
def test_document_sub_resources(self):
sub_resource_documentor = SubResourceDocumenter(self.resource)
sub_resource_documentor.document_sub_resources(self.doc_structure)
self.assert_contains_lines_in_order([
'.. py:method:: Sample(name)',
' Creates a Sample resource.::',
" sample = myservice.Sample('name')",
' :type name: string',
" :param name: The Sample's name identifier.",
' :rtype: :py:class:`MyService.Sample`',
' :returns: A Sample resource',
])
|
<commit_before><commit_msg>Add test for subresource documentation<commit_after># Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests.unit.docs import BaseDocsTest
from boto3.docs.subresource import SubResourceDocumenter
class TestSubResourceDocumenter(BaseDocsTest):
def test_document_sub_resources(self):
sub_resource_documentor = SubResourceDocumenter(self.resource)
sub_resource_documentor.document_sub_resources(self.doc_structure)
self.assert_contains_lines_in_order([
'.. py:method:: Sample(name)',
' Creates a Sample resource.::',
" sample = myservice.Sample('name')",
' :type name: string',
" :param name: The Sample's name identifier.",
' :rtype: :py:class:`MyService.Sample`',
' :returns: A Sample resource',
])
|
|
c29ec289d8fe252aa8fd8d913abb3f1bd263eab1
|
scripts/web-server/alexa-pi.py
|
scripts/web-server/alexa-pi.py
|
from flask import Flask
from flask_ask import Ask, statement, convert_errors
import logging
from rfsend import rf_send
GPIO.setmode(GPIO.BCM)
app = Flask(__name__)
ask = Ask(app, '/')
logging.getLogger("flask_ask").setLevel(logging.DEBUG)
@ask.intent('LocationControlIntent', mapping={'status': 'status', 'location': 'location'})
def location_control(status, location):
render_message(location, status)
return statement('Turning {} {}!'.format(location, status))
def render_message(location, status):
loc = location.lower()
status = status.lower()
if loc == 'lights':
rf_send('4', status)
rf_send('5', status)
rf_send('2', status)
elif loc == 'christmas':
rf_send('2', status)
elif location == 'kettle':
rf_send('1', status)
if __name__ == '__main__':
port = 5500 #the custom port you want
app.run(host='0.0.0.0', port=port)
|
Add Raspberry PI Alexa control server
|
Add Raspberry PI Alexa control server
|
Python
|
bsd-3-clause
|
kbsezginel/raspberry-pi,kbsezginel/raspberry-pi,kbsezginel/raspberry-pi,kbsezginel/raspberry-pi
|
Add Raspberry PI Alexa control server
|
from flask import Flask
from flask_ask import Ask, statement, convert_errors
import logging
from rfsend import rf_send
GPIO.setmode(GPIO.BCM)
app = Flask(__name__)
ask = Ask(app, '/')
logging.getLogger("flask_ask").setLevel(logging.DEBUG)
@ask.intent('LocationControlIntent', mapping={'status': 'status', 'location': 'location'})
def location_control(status, location):
render_message(location, status)
return statement('Turning {} {}!'.format(location, status))
def render_message(location, status):
loc = location.lower()
status = status.lower()
if loc == 'lights':
rf_send('4', status)
rf_send('5', status)
rf_send('2', status)
elif loc == 'christmas':
rf_send('2', status)
elif location == 'kettle':
rf_send('1', status)
if __name__ == '__main__':
port = 5500 #the custom port you want
app.run(host='0.0.0.0', port=port)
|
<commit_before><commit_msg>Add Raspberry PI Alexa control server<commit_after>
|
from flask import Flask
from flask_ask import Ask, statement, convert_errors
import logging
from rfsend import rf_send
GPIO.setmode(GPIO.BCM)
app = Flask(__name__)
ask = Ask(app, '/')
logging.getLogger("flask_ask").setLevel(logging.DEBUG)
@ask.intent('LocationControlIntent', mapping={'status': 'status', 'location': 'location'})
def location_control(status, location):
render_message(location, status)
return statement('Turning {} {}!'.format(location, status))
def render_message(location, status):
loc = location.lower()
status = status.lower()
if loc == 'lights':
rf_send('4', status)
rf_send('5', status)
rf_send('2', status)
elif loc == 'christmas':
rf_send('2', status)
elif location == 'kettle':
rf_send('1', status)
if __name__ == '__main__':
port = 5500 #the custom port you want
app.run(host='0.0.0.0', port=port)
|
Add Raspberry PI Alexa control serverfrom flask import Flask
from flask_ask import Ask, statement, convert_errors
import logging
from rfsend import rf_send
GPIO.setmode(GPIO.BCM)
app = Flask(__name__)
ask = Ask(app, '/')
logging.getLogger("flask_ask").setLevel(logging.DEBUG)
@ask.intent('LocationControlIntent', mapping={'status': 'status', 'location': 'location'})
def location_control(status, location):
render_message(location, status)
return statement('Turning {} {}!'.format(location, status))
def render_message(location, status):
loc = location.lower()
status = status.lower()
if loc == 'lights':
rf_send('4', status)
rf_send('5', status)
rf_send('2', status)
elif loc == 'christmas':
rf_send('2', status)
elif location == 'kettle':
rf_send('1', status)
if __name__ == '__main__':
port = 5500 #the custom port you want
app.run(host='0.0.0.0', port=port)
|
<commit_before><commit_msg>Add Raspberry PI Alexa control server<commit_after>from flask import Flask
from flask_ask import Ask, statement, convert_errors
import logging
from rfsend import rf_send
GPIO.setmode(GPIO.BCM)
app = Flask(__name__)
ask = Ask(app, '/')
logging.getLogger("flask_ask").setLevel(logging.DEBUG)
@ask.intent('LocationControlIntent', mapping={'status': 'status', 'location': 'location'})
def location_control(status, location):
render_message(location, status)
return statement('Turning {} {}!'.format(location, status))
def render_message(location, status):
loc = location.lower()
status = status.lower()
if loc == 'lights':
rf_send('4', status)
rf_send('5', status)
rf_send('2', status)
elif loc == 'christmas':
rf_send('2', status)
elif location == 'kettle':
rf_send('1', status)
if __name__ == '__main__':
port = 5500 #the custom port you want
app.run(host='0.0.0.0', port=port)
|
|
29ad205b01a45b87d939c8bebb3c2d92ef98f6cb
|
zephyr/management/commands/send_stats.py
|
zephyr/management/commands/send_stats.py
|
from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
help = """Send some stats to statsd.
Usage: python manage.py send_stats [incr|decr|timing|timer|gauge] name val"""
def handle(self, *args, **options):
if len(args) != 3:
print "Usage: python manage.py send_stats [incr|decr|timing|timer|gauge] name val"
exit(1)
operation = args[0]
name = args[1]
val = args[2]
if settings.USING_STATSD:
from statsd import statsd
func = getattr(statsd, operation)
func(name, val)
|
Add a management command to send a statsd message
|
Add a management command to send a statsd message
(imported from commit 25ead4a0dee185a139eb5401e109c9cbe937c7bf)
|
Python
|
apache-2.0
|
hayderimran7/zulip,Drooids/zulip,Gabriel0402/zulip,levixie/zulip,dxq-git/zulip,PhilSk/zulip,yocome/zulip,esander91/zulip,aliceriot/zulip,eeshangarg/zulip,codeKonami/zulip,tommyip/zulip,jeffcao/zulip,mohsenSy/zulip,hafeez3000/zulip,jonesgithub/zulip,hengqujushi/zulip,JPJPJPOPOP/zulip,synicalsyntax/zulip,adnanh/zulip,tommyip/zulip,blaze225/zulip,zofuthan/zulip,babbage/zulip,willingc/zulip,deer-hope/zulip,Qgap/zulip,jimmy54/zulip,dnmfarrell/zulip,vikas-parashar/zulip,developerfm/zulip,peiwei/zulip,Suninus/zulip,samatdav/zulip,thomasboyt/zulip,levixie/zulip,hustlzp/zulip,qq1012803704/zulip,ApsOps/zulip,paxapy/zulip,bitemyapp/zulip,grave-w-grave/zulip,vakila/zulip,hackerkid/zulip,m1ssou/zulip,so0k/zulip,TigorC/zulip,ipernet/zulip,vikas-parashar/zulip,verma-varsha/zulip,arpitpanwar/zulip,tbutter/zulip,susansls/zulip,shrikrishnaholla/zulip,Cheppers/zulip,akuseru/zulip,eastlhu/zulip,adnanh/zulip,codeKonami/zulip,luyifan/zulip,LAndreas/zulip,Vallher/zulip,jainayush975/zulip,tdr130/zulip,blaze225/zulip,DazWorrall/zulip,peguin40/zulip,esander91/zulip,brainwane/zulip,blaze225/zulip,umkay/zulip,ApsOps/zulip,joshisa/zulip,ufosky-server/zulip,moria/zulip,Qgap/zulip,dawran6/zulip,wweiradio/zulip,kokoar/zulip,johnnygaddarr/zulip,bluesea/zulip,sharmaeklavya2/zulip,dattatreya303/zulip,udxxabp/zulip,natanovia/zulip,yocome/zulip,karamcnair/zulip,jainayush975/zulip,glovebx/zulip,ryanbackman/zulip,Galexrt/zulip,saitodisse/zulip,EasonYi/zulip,lfranchi/zulip,themass/zulip,MariaFaBella85/zulip,dattatreya303/zulip,johnnygaddarr/zulip,wweiradio/zulip,schatt/zulip,tdr130/zulip,PaulPetring/zulip,alliejones/zulip,seapasulli/zulip,Jianchun1/zulip,atomic-labs/zulip,mdavid/zulip,bastianh/zulip,xuanhan863/zulip,akuseru/zulip,pradiptad/zulip,isht3/zulip,seapasulli/zulip,vakila/zulip,thomasboyt/zulip,themass/zulip,hafeez3000/zulip,armooo/zulip,so0k/zulip,dawran6/zulip,alliejones/zulip,babbage/zulip,saitodisse/zulip,zachallaun/zulip,hengqujushi/zulip,LAndreas/zulip,sup95/zulip,glovebx/zulip,vikas-parashar/zulip,cosmicAsymmetry/zulip,wavelets/zulip,Cheppers/zulip,joyhchen/zulip,nicholasbs/zulip,TigorC/zulip,wweiradio/zulip,arpith/zulip,ApsOps/zulip,moria/zulip,isht3/zulip,fw1121/zulip,yocome/zulip,m1ssou/zulip,Juanvulcano/zulip,hafeez3000/zulip,bluesea/zulip,ApsOps/zulip,isht3/zulip,bssrdf/zulip,Drooids/zulip,bluesea/zulip,souravbadami/zulip,lfranchi/zulip,RobotCaleb/zulip,shubhamdhama/zulip,hustlzp/zulip,tbutter/zulip,qq1012803704/zulip,wdaher/zulip,littledogboy/zulip,shubhamdhama/zulip,luyifan/zulip,KingxBanana/zulip,thomasboyt/zulip,cosmicAsymmetry/zulip,souravbadami/zulip,joyhchen/zulip,ryanbackman/zulip,dnmfarrell/zulip,andersk/zulip,DazWorrall/zulip,wangdeshui/zulip,noroot/zulip,souravbadami/zulip,samatdav/zulip,udxxabp/zulip,bssrdf/zulip,shubhamdhama/zulip,karamcnair/zulip,dnmfarrell/zulip,christi3k/zulip,vabs22/zulip,ipernet/zulip,jimmy54/zulip,jrowan/zulip,ryansnowboarder/zulip,guiquanz/zulip,TigorC/zulip,krtkmj/zulip,Galexrt/zulip,verma-varsha/zulip,Suninus/zulip,lfranchi/zulip,noroot/zulip,saitodisse/zulip,he15his/zulip,technicalpickles/zulip,christi3k/zulip,gigawhitlocks/zulip,jessedhillon/zulip,grave-w-grave/zulip,hackerkid/zulip,eastlhu/zulip,TigorC/zulip,yuvipanda/zulip,hengqujushi/zulip,he15his/zulip,hustlzp/zulip,jackrzhang/zulip,bitemyapp/zulip,timabbott/zulip,blaze225/zulip,avastu/zulip,xuanhan863/zulip,bowlofstew/zulip,jphilipsen05/zulip,ericzhou2008/zulip,zofuthan/zulip,mansilladev/zulip,reyha/zulip,joshisa/zulip,wdaher/zulip,rht/zulip,zwily/zulip,Jianchun1/zulip,ikasumiwt/zulip,kaiyuanheshang/zulip,amanharitsh123/zulip,vabs22/zulip,wweiradio/zulip,DazWorrall/zulip,gigawhitlocks/zulip,jessedhillon/zulip,ashwinirudrappa/zulip,bitemyapp/zulip,technicalpickles/zulip,bssrdf/zulip,zofuthan/zulip,hayderimran7/zulip,jackrzhang/zulip,akuseru/zulip,eastlhu/zulip,ericzhou2008/zulip,LeeRisk/zulip,voidException/zulip,reyha/zulip,showell/zulip,sharmaeklavya2/zulip,aakash-cr7/zulip,dhcrzf/zulip,technicalpickles/zulip,huangkebo/zulip,johnnygaddarr/zulip,MayB/zulip,andersk/zulip,armooo/zulip,itnihao/zulip,bitemyapp/zulip,peguin40/zulip,rht/zulip,qq1012803704/zulip,easyfmxu/zulip,noroot/zulip,AZtheAsian/zulip,joshisa/zulip,jainayush975/zulip,pradiptad/zulip,aakash-cr7/zulip,KingxBanana/zulip,jonesgithub/zulip,Drooids/zulip,samatdav/zulip,JanzTam/zulip,Qgap/zulip,firstblade/zulip,tbutter/zulip,Batterfii/zulip,krtkmj/zulip,andersk/zulip,proliming/zulip,zacps/zulip,nicholasbs/zulip,kou/zulip,ericzhou2008/zulip,peiwei/zulip,Frouk/zulip,willingc/zulip,jimmy54/zulip,m1ssou/zulip,verma-varsha/zulip,bowlofstew/zulip,Drooids/zulip,karamcnair/zulip,calvinleenyc/zulip,kaiyuanheshang/zulip,akuseru/zulip,andersk/zulip,reyha/zulip,kaiyuanheshang/zulip,kou/zulip,jphilipsen05/zulip,amallia/zulip,jainayush975/zulip,technicalpickles/zulip,rishig/zulip,AZtheAsian/zulip,developerfm/zulip,bitemyapp/zulip,amanharitsh123/zulip,johnny9/zulip,JPJPJPOPOP/zulip,peguin40/zulip,hafeez3000/zulip,wangdeshui/zulip,bluesea/zulip,praveenaki/zulip,KJin99/zulip,thomasboyt/zulip,gigawhitlocks/zulip,shrikrishnaholla/zulip,schatt/zulip,karamcnair/zulip,jimmy54/zulip,guiquanz/zulip,zulip/zulip,armooo/zulip,so0k/zulip,mdavid/zulip,praveenaki/zulip,tommyip/zulip,MayB/zulip,ikasumiwt/zulip,willingc/zulip,KJin99/zulip,gkotian/zulip,deer-hope/zulip,stamhe/zulip,mahim97/zulip,showell/zulip,ericzhou2008/zulip,aps-sids/zulip,shaunstanislaus/zulip,amallia/zulip,shaunstanislaus/zulip,cosmicAsymmetry/zulip,arpith/zulip,avastu/zulip,ahmadassaf/zulip,Batterfii/zulip,jphilipsen05/zulip,themass/zulip,gigawhitlocks/zulip,zofuthan/zulip,wangdeshui/zulip,seapasulli/zulip,amyliu345/zulip,DazWorrall/zulip,shubhamdhama/zulip,dwrpayne/zulip,j831/zulip,firstblade/zulip,yuvipanda/zulip,isht3/zulip,johnny9/zulip,DazWorrall/zulip,synicalsyntax/zulip,zorojean/zulip,Drooids/zulip,Suninus/zulip,JanzTam/zulip,brockwhittaker/zulip,moria/zulip,technicalpickles/zulip,Qgap/zulip,JPJPJPOPOP/zulip,umkay/zulip,he15his/zulip,Frouk/zulip,avastu/zulip,calvinleenyc/zulip,developerfm/zulip,zhaoweigg/zulip,tiansiyuan/zulip,sup95/zulip,synicalsyntax/zulip,natanovia/zulip,eeshangarg/zulip,seapasulli/zulip,proliming/zulip,arpith/zulip,mdavid/zulip,firstblade/zulip,brockwhittaker/zulip,xuxiao/zulip,rht/zulip,pradiptad/zulip,isht3/zulip,peiwei/zulip,dotcool/zulip,jrowan/zulip,huangkebo/zulip,gkotian/zulip,umkay/zulip,aps-sids/zulip,bastianh/zulip,Vallher/zulip,codeKonami/zulip,tiansiyuan/zulip,synicalsyntax/zulip,adnanh/zulip,Vallher/zulip,voidException/zulip,arpitpanwar/zulip,Frouk/zulip,jerryge/zulip,amyliu345/zulip,jphilipsen05/zulip,xuanhan863/zulip,reyha/zulip,LAndreas/zulip,seapasulli/zulip,huangkebo/zulip,mansilladev/zulip,amallia/zulip,levixie/zulip,levixie/zulip,jrowan/zulip,esander91/zulip,samatdav/zulip,samatdav/zulip,JanzTam/zulip,swinghu/zulip,bastianh/zulip,babbage/zulip,arpitpanwar/zulip,vikas-parashar/zulip,dawran6/zulip,qq1012803704/zulip,andersk/zulip,xuanhan863/zulip,atomic-labs/zulip,gigawhitlocks/zulip,susansls/zulip,Jianchun1/zulip,christi3k/zulip,so0k/zulip,developerfm/zulip,PhilSk/zulip,Gabriel0402/zulip,seapasulli/zulip,brockwhittaker/zulip,wavelets/zulip,bowlofstew/zulip,jerryge/zulip,yocome/zulip,guiquanz/zulip,aps-sids/zulip,luyifan/zulip,dhcrzf/zulip,PhilSk/zulip,dhcrzf/zulip,developerfm/zulip,ahmadassaf/zulip,vakila/zulip,susansls/zulip,voidException/zulip,rht/zulip,Drooids/zulip,KJin99/zulip,tommyip/zulip,verma-varsha/zulip,jessedhillon/zulip,krtkmj/zulip,dxq-git/zulip,Diptanshu8/zulip,jimmy54/zulip,alliejones/zulip,dnmfarrell/zulip,huangkebo/zulip,shrikrishnaholla/zulip,dattatreya303/zulip,dnmfarrell/zulip,tdr130/zulip,deer-hope/zulip,jonesgithub/zulip,thomasboyt/zulip,itnihao/zulip,SmartPeople/zulip,Frouk/zulip,vaidap/zulip,vabs22/zulip,tdr130/zulip,KJin99/zulip,Qgap/zulip,ipernet/zulip,atomic-labs/zulip,suxinde2009/zulip,timabbott/zulip,Batterfii/zulip,themass/zulip,Gabriel0402/zulip,vakila/zulip,dotcool/zulip,lfranchi/zulip,mahim97/zulip,ApsOps/zulip,LAndreas/zulip,kaiyuanheshang/zulip,amallia/zulip,eastlhu/zulip,ashwinirudrappa/zulip,timabbott/zulip,tbutter/zulip,DazWorrall/zulip,rishig/zulip,RobotCaleb/zulip,bluesea/zulip,kokoar/zulip,stamhe/zulip,susansls/zulip,bssrdf/zulip,hayderimran7/zulip,dwrpayne/zulip,bastianh/zulip,hustlzp/zulip,punchagan/zulip,praveenaki/zulip,zhaoweigg/zulip,aps-sids/zulip,jessedhillon/zulip,hustlzp/zulip,paxapy/zulip,voidException/zulip,gkotian/zulip,dawran6/zulip,gkotian/zulip,dotcool/zulip,PhilSk/zulip,kokoar/zulip,Qgap/zulip,jerryge/zulip,moria/zulip,hj3938/zulip,MariaFaBella85/zulip,AZtheAsian/zulip,praveenaki/zulip,zorojean/zulip,PhilSk/zulip,joshisa/zulip,hustlzp/zulip,johnny9/zulip,dhcrzf/zulip,xuxiao/zulip,shaunstanislaus/zulip,shubhamdhama/zulip,bluesea/zulip,esander91/zulip,kaiyuanheshang/zulip,aps-sids/zulip,zwily/zulip,susansls/zulip,deer-hope/zulip,arpith/zulip,jessedhillon/zulip,umkay/zulip,EasonYi/zulip,dawran6/zulip,JPJPJPOPOP/zulip,rishig/zulip,krtkmj/zulip,Suninus/zulip,blaze225/zulip,jonesgithub/zulip,tbutter/zulip,bastianh/zulip,dxq-git/zulip,amyliu345/zulip,shaunstanislaus/zulip,itnihao/zulip,MayB/zulip,verma-varsha/zulip,amanharitsh123/zulip,wangdeshui/zulip,eastlhu/zulip,udxxabp/zulip,EasonYi/zulip,ericzhou2008/zulip,codeKonami/zulip,wavelets/zulip,esander91/zulip,dattatreya303/zulip,johnnygaddarr/zulip,guiquanz/zulip,zhaoweigg/zulip,atomic-labs/zulip,wweiradio/zulip,showell/zulip,atomic-labs/zulip,dwrpayne/zulip,hengqujushi/zulip,tiansiyuan/zulip,swinghu/zulip,JPJPJPOPOP/zulip,umkay/zulip,peiwei/zulip,tbutter/zulip,dnmfarrell/zulip,codeKonami/zulip,dotcool/zulip,qq1012803704/zulip,zhaoweigg/zulip,andersk/zulip,stamhe/zulip,noroot/zulip,SmartPeople/zulip,shaunstanislaus/zulip,wavelets/zulip,vaidap/zulip,mdavid/zulip,m1ssou/zulip,Batterfii/zulip,vakila/zulip,Jianchun1/zulip,ufosky-server/zulip,eeshangarg/zulip,zachallaun/zulip,moria/zulip,DazWorrall/zulip,willingc/zulip,xuanhan863/zulip,peiwei/zulip,atomic-labs/zulip,peguin40/zulip,zulip/zulip,hj3938/zulip,tdr130/zulip,moria/zulip,jonesgithub/zulip,vabs22/zulip,saitodisse/zulip,luyifan/zulip,zorojean/zulip,xuxiao/zulip,he15his/zulip,zacps/zulip,Jianchun1/zulip,dhcrzf/zulip,vaidap/zulip,niftynei/zulip,joyhchen/zulip,Qgap/zulip,timabbott/zulip,firstblade/zulip,fw1121/zulip,Galexrt/zulip,qq1012803704/zulip,Cheppers/zulip,bssrdf/zulip,PhilSk/zulip,hackerkid/zulip,avastu/zulip,kou/zulip,zacps/zulip,MariaFaBella85/zulip,fw1121/zulip,avastu/zulip,showell/zulip,zachallaun/zulip,zacps/zulip,nicholasbs/zulip,hj3938/zulip,rht/zulip,swinghu/zulip,natanovia/zulip,mahim97/zulip,adnanh/zulip,bluesea/zulip,krtkmj/zulip,vaidap/zulip,wdaher/zulip,shubhamdhama/zulip,hayderimran7/zulip,grave-w-grave/zulip,amanharitsh123/zulip,noroot/zulip,luyifan/zulip,swinghu/zulip,Diptanshu8/zulip,EasonYi/zulip,Galexrt/zulip,wavelets/zulip,ikasumiwt/zulip,udxxabp/zulip,blaze225/zulip,he15his/zulip,bastianh/zulip,peiwei/zulip,hj3938/zulip,sharmaeklavya2/zulip,KingxBanana/zulip,arpith/zulip,sonali0901/zulip,punchagan/zulip,ericzhou2008/zulip,tdr130/zulip,wavelets/zulip,calvinleenyc/zulip,hengqujushi/zulip,kokoar/zulip,hackerkid/zulip,swinghu/zulip,vabs22/zulip,aliceriot/zulip,yuvipanda/zulip,RobotCaleb/zulip,zachallaun/zulip,akuseru/zulip,ashwinirudrappa/zulip,timabbott/zulip,jimmy54/zulip,sup95/zulip,thomasboyt/zulip,bitemyapp/zulip,huangkebo/zulip,armooo/zulip,wangdeshui/zulip,proliming/zulip,JPJPJPOPOP/zulip,cosmicAsymmetry/zulip,moria/zulip,levixie/zulip,dotcool/zulip,jeffcao/zulip,Juanvulcano/zulip,paxapy/zulip,jainayush975/zulip,ashwinirudrappa/zulip,nicholasbs/zulip,m1ssou/zulip,technicalpickles/zulip,babbage/zulip,calvinleenyc/zulip,huangkebo/zulip,johnnygaddarr/zulip,KingxBanana/zulip,yocome/zulip,amallia/zulip,luyifan/zulip,he15his/zulip,jackrzhang/zulip,zulip/zulip,wweiradio/zulip,so0k/zulip,jeffcao/zulip,j831/zulip,Galexrt/zulip,punchagan/zulip,udxxabp/zulip,tommyip/zulip,bowlofstew/zulip,Jianchun1/zulip,jphilipsen05/zulip,niftynei/zulip,stamhe/zulip,sonali0901/zulip,karamcnair/zulip,itnihao/zulip,showell/zulip,ApsOps/zulip,aakash-cr7/zulip,amanharitsh123/zulip,shubhamdhama/zulip,easyfmxu/zulip,pradiptad/zulip,LAndreas/zulip,calvinleenyc/zulip,eastlhu/zulip,praveenaki/zulip,Batterfii/zulip,suxinde2009/zulip,eeshangarg/zulip,ApsOps/zulip,sharmaeklavya2/zulip,swinghu/zulip,Juanvulcano/zulip,ahmadassaf/zulip,j831/zulip,brainwane/zulip,easyfmxu/zulip,themass/zulip,shrikrishnaholla/zulip,umkay/zulip,sup95/zulip,zulip/zulip,mdavid/zulip,esander91/zulip,johnny9/zulip,zorojean/zulip,joshisa/zulip,littledogboy/zulip,paxapy/zulip,ufosky-server/zulip,adnanh/zulip,esander91/zulip,sonali0901/zulip,rishig/zulip,codeKonami/zulip,armooo/zulip,ryanbackman/zulip,mohsenSy/zulip,PaulPetring/zulip,jrowan/zulip,glovebx/zulip,Suninus/zulip,Drooids/zulip,zofuthan/zulip,ashwinirudrappa/zulip,aliceriot/zulip,xuxiao/zulip,zofuthan/zulip,jackrzhang/zulip,samatdav/zulip,jeffcao/zulip,dattatreya303/zulip,aliceriot/zulip,tommyip/zulip,suxinde2009/zulip,Gabriel0402/zulip,Cheppers/zulip,LeeRisk/zulip,mohsenSy/zulip,themass/zulip,EasonYi/zulip,hj3938/zulip,MayB/zulip,zulip/zulip,dxq-git/zulip,suxinde2009/zulip,seapasulli/zulip,ufosky-server/zulip,jackrzhang/zulip,dotcool/zulip,littledogboy/zulip,joyhchen/zulip,hackerkid/zulip,kokoar/zulip,showell/zulip,lfranchi/zulip,bowlofstew/zulip,technicalpickles/zulip,johnny9/zulip,saitodisse/zulip,wdaher/zulip,Cheppers/zulip,vikas-parashar/zulip,tiansiyuan/zulip,rht/zulip,ashwinirudrappa/zulip,bssrdf/zulip,voidException/zulip,KJin99/zulip,mansilladev/zulip,brockwhittaker/zulip,zwily/zulip,xuxiao/zulip,aliceriot/zulip,PaulPetring/zulip,Suninus/zulip,MayB/zulip,Diptanshu8/zulip,JanzTam/zulip,amanharitsh123/zulip,dhcrzf/zulip,gkotian/zulip,zachallaun/zulip,sup95/zulip,vaidap/zulip,dwrpayne/zulip,RobotCaleb/zulip,bssrdf/zulip,xuanhan863/zulip,gkotian/zulip,susansls/zulip,dhcrzf/zulip,ikasumiwt/zulip,wdaher/zulip,proliming/zulip,MayB/zulip,Batterfii/zulip,zorojean/zulip,Cheppers/zulip,yuvipanda/zulip,pradiptad/zulip,joyhchen/zulip,Diptanshu8/zulip,shrikrishnaholla/zulip,vakila/zulip,Frouk/zulip,littledogboy/zulip,dwrpayne/zulip,rishig/zulip,wdaher/zulip,LeeRisk/zulip,udxxabp/zulip,adnanh/zulip,tiansiyuan/zulip,developerfm/zulip,SmartPeople/zulip,timabbott/zulip,brainwane/zulip,j831/zulip,LAndreas/zulip,tbutter/zulip,amyliu345/zulip,ahmadassaf/zulip,avastu/zulip,vaidap/zulip,synicalsyntax/zulip,gigawhitlocks/zulip,firstblade/zulip,zhaoweigg/zulip,niftynei/zulip,aps-sids/zulip,praveenaki/zulip,jerryge/zulip,willingc/zulip,sonali0901/zulip,mansilladev/zulip,zhaoweigg/zulip,hayderimran7/zulip,bitemyapp/zulip,ahmadassaf/zulip,LeeRisk/zulip,Vallher/zulip,LAndreas/zulip,glovebx/zulip,alliejones/zulip,jerryge/zulip,zorojean/zulip,praveenaki/zulip,kokoar/zulip,Juanvulcano/zulip,ashwinirudrappa/zulip,karamcnair/zulip,joshisa/zulip,arpitpanwar/zulip,hengqujushi/zulip,rishig/zulip,grave-w-grave/zulip,suxinde2009/zulip,proliming/zulip,ufosky-server/zulip,ipernet/zulip,wangdeshui/zulip,armooo/zulip,dxq-git/zulip,punchagan/zulip,jonesgithub/zulip,synicalsyntax/zulip,voidException/zulip,avastu/zulip,JanzTam/zulip,swinghu/zulip,codeKonami/zulip,levixie/zulip,voidException/zulip,saitodisse/zulip,mansilladev/zulip,zulip/zulip,PaulPetring/zulip,luyifan/zulip,so0k/zulip,jerryge/zulip,hj3938/zulip,schatt/zulip,jessedhillon/zulip,ipernet/zulip,zacps/zulip,j831/zulip,kou/zulip,rht/zulip,jerryge/zulip,brockwhittaker/zulip,alliejones/zulip,vakila/zulip,jimmy54/zulip,souravbadami/zulip,glovebx/zulip,SmartPeople/zulip,zwily/zulip,cosmicAsymmetry/zulip,sonali0901/zulip,calvinleenyc/zulip,Frouk/zulip,brainwane/zulip,nicholasbs/zulip,deer-hope/zulip,jeffcao/zulip,hayderimran7/zulip,akuseru/zulip,mdavid/zulip,deer-hope/zulip,Suninus/zulip,umkay/zulip,souravbadami/zulip,ryanbackman/zulip,dxq-git/zulip,akuseru/zulip,punchagan/zulip,itnihao/zulip,Frouk/zulip,hafeez3000/zulip,guiquanz/zulip,RobotCaleb/zulip,xuxiao/zulip,natanovia/zulip,levixie/zulip,pradiptad/zulip,EasonYi/zulip,ipernet/zulip,LeeRisk/zulip,developerfm/zulip,tommyip/zulip,amallia/zulip,punchagan/zulip,Gabriel0402/zulip,glovebx/zulip,udxxabp/zulip,lfranchi/zulip,dawran6/zulip,hafeez3000/zulip,ikasumiwt/zulip,KJin99/zulip,peiwei/zulip,alliejones/zulip,amyliu345/zulip,ryanbackman/zulip,bowlofstew/zulip,ryansnowboarder/zulip,tdr130/zulip,mansilladev/zulip,Gabriel0402/zulip,gkotian/zulip,mahim97/zulip,cosmicAsymmetry/zulip,niftynei/zulip,hengqujushi/zulip,ryanbackman/zulip,dotcool/zulip,johnny9/zulip,arpitpanwar/zulip,aps-sids/zulip,TigorC/zulip,LeeRisk/zulip,fw1121/zulip,kaiyuanheshang/zulip,noroot/zulip,amyliu345/zulip,PaulPetring/zulip,littledogboy/zulip,tiansiyuan/zulip,MayB/zulip,yuvipanda/zulip,zwily/zulip,brainwane/zulip,vikas-parashar/zulip,punchagan/zulip,dattatreya303/zulip,Cheppers/zulip,xuanhan863/zulip,kokoar/zulip,johnnygaddarr/zulip,eeshangarg/zulip,hackerkid/zulip,thomasboyt/zulip,arpitpanwar/zulip,littledogboy/zulip,johnnygaddarr/zulip,adnanh/zulip,ikasumiwt/zulip,lfranchi/zulip,Galexrt/zulip,aakash-cr7/zulip,sonali0901/zulip,brainwane/zulip,yuvipanda/zulip,kou/zulip,jackrzhang/zulip,karamcnair/zulip,zorojean/zulip,arpith/zulip,peguin40/zulip,ryansnowboarder/zulip,wdaher/zulip,showell/zulip,KingxBanana/zulip,mohsenSy/zulip,MariaFaBella85/zulip,aliceriot/zulip,alliejones/zulip,shaunstanislaus/zulip,Juanvulcano/zulip,itnihao/zulip,Galexrt/zulip,jphilipsen05/zulip,aakash-cr7/zulip,mahim97/zulip,j831/zulip,ryansnowboarder/zulip,fw1121/zulip,Vallher/zulip,hafeez3000/zulip,KJin99/zulip,natanovia/zulip,jrowan/zulip,MariaFaBella85/zulip,ericzhou2008/zulip,stamhe/zulip,zachallaun/zulip,fw1121/zulip,babbage/zulip,joyhchen/zulip,huangkebo/zulip,Diptanshu8/zulip,deer-hope/zulip,zwily/zulip,ryansnowboarder/zulip,itnihao/zulip,ahmadassaf/zulip,PaulPetring/zulip,KingxBanana/zulip,arpitpanwar/zulip,RobotCaleb/zulip,wangdeshui/zulip,vabs22/zulip,verma-varsha/zulip,jainayush975/zulip,fw1121/zulip,MariaFaBella85/zulip,hackerkid/zulip,kou/zulip,glovebx/zulip,andersk/zulip,krtkmj/zulip,yuvipanda/zulip,christi3k/zulip,MariaFaBella85/zulip,peguin40/zulip,mdavid/zulip,gigawhitlocks/zulip,m1ssou/zulip,sup95/zulip,zulip/zulip,eeshangarg/zulip,jonesgithub/zulip,schatt/zulip,he15his/zulip,yocome/zulip,ryansnowboarder/zulip,reyha/zulip,qq1012803704/zulip,schatt/zulip,mohsenSy/zulip,shaunstanislaus/zulip,zachallaun/zulip,JanzTam/zulip,RobotCaleb/zulip,SmartPeople/zulip,aakash-cr7/zulip,jeffcao/zulip,ryansnowboarder/zulip,stamhe/zulip,zacps/zulip,mansilladev/zulip,kaiyuanheshang/zulip,easyfmxu/zulip,jessedhillon/zulip,reyha/zulip,proliming/zulip,wavelets/zulip,Diptanshu8/zulip,brainwane/zulip,nicholasbs/zulip,themass/zulip,PaulPetring/zulip,noroot/zulip,tiansiyuan/zulip,amallia/zulip,ikasumiwt/zulip,isht3/zulip,Gabriel0402/zulip,natanovia/zulip,suxinde2009/zulip,eastlhu/zulip,niftynei/zulip,zwily/zulip,ahmadassaf/zulip,LeeRisk/zulip,paxapy/zulip,dxq-git/zulip,suxinde2009/zulip,babbage/zulip,timabbott/zulip,eeshangarg/zulip,shrikrishnaholla/zulip,Juanvulcano/zulip,souravbadami/zulip,EasonYi/zulip,bowlofstew/zulip,saitodisse/zulip,guiquanz/zulip,stamhe/zulip,proliming/zulip,zhaoweigg/zulip,schatt/zulip,dwrpayne/zulip,shrikrishnaholla/zulip,joshisa/zulip,ufosky-server/zulip,jeffcao/zulip,sharmaeklavya2/zulip,dnmfarrell/zulip,JanzTam/zulip,grave-w-grave/zulip,ufosky-server/zulip,Vallher/zulip,hj3938/zulip,hayderimran7/zulip,AZtheAsian/zulip,jackrzhang/zulip,AZtheAsian/zulip,mahim97/zulip,natanovia/zulip,schatt/zulip,zofuthan/zulip,guiquanz/zulip,m1ssou/zulip,Vallher/zulip,niftynei/zulip,easyfmxu/zulip,dwrpayne/zulip,grave-w-grave/zulip,SmartPeople/zulip,nicholasbs/zulip,paxapy/zulip,babbage/zulip,sharmaeklavya2/zulip,wweiradio/zulip,willingc/zulip,mohsenSy/zulip,xuxiao/zulip,AZtheAsian/zulip,bastianh/zulip,TigorC/zulip,easyfmxu/zulip,christi3k/zulip,kou/zulip,firstblade/zulip,firstblade/zulip,krtkmj/zulip,hustlzp/zulip,willingc/zulip,rishig/zulip,pradiptad/zulip,so0k/zulip,armooo/zulip,brockwhittaker/zulip,christi3k/zulip,synicalsyntax/zulip,ipernet/zulip,jrowan/zulip,Batterfii/zulip,atomic-labs/zulip,johnny9/zulip,yocome/zulip,aliceriot/zulip,easyfmxu/zulip,littledogboy/zulip
|
Add a management command to send a statsd message
(imported from commit 25ead4a0dee185a139eb5401e109c9cbe937c7bf)
|
from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
help = """Send some stats to statsd.
Usage: python manage.py send_stats [incr|decr|timing|timer|gauge] name val"""
def handle(self, *args, **options):
if len(args) != 3:
print "Usage: python manage.py send_stats [incr|decr|timing|timer|gauge] name val"
exit(1)
operation = args[0]
name = args[1]
val = args[2]
if settings.USING_STATSD:
from statsd import statsd
func = getattr(statsd, operation)
func(name, val)
|
<commit_before><commit_msg>Add a management command to send a statsd message
(imported from commit 25ead4a0dee185a139eb5401e109c9cbe937c7bf)<commit_after>
|
from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
help = """Send some stats to statsd.
Usage: python manage.py send_stats [incr|decr|timing|timer|gauge] name val"""
def handle(self, *args, **options):
if len(args) != 3:
print "Usage: python manage.py send_stats [incr|decr|timing|timer|gauge] name val"
exit(1)
operation = args[0]
name = args[1]
val = args[2]
if settings.USING_STATSD:
from statsd import statsd
func = getattr(statsd, operation)
func(name, val)
|
Add a management command to send a statsd message
(imported from commit 25ead4a0dee185a139eb5401e109c9cbe937c7bf)from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
help = """Send some stats to statsd.
Usage: python manage.py send_stats [incr|decr|timing|timer|gauge] name val"""
def handle(self, *args, **options):
if len(args) != 3:
print "Usage: python manage.py send_stats [incr|decr|timing|timer|gauge] name val"
exit(1)
operation = args[0]
name = args[1]
val = args[2]
if settings.USING_STATSD:
from statsd import statsd
func = getattr(statsd, operation)
func(name, val)
|
<commit_before><commit_msg>Add a management command to send a statsd message
(imported from commit 25ead4a0dee185a139eb5401e109c9cbe937c7bf)<commit_after>from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
help = """Send some stats to statsd.
Usage: python manage.py send_stats [incr|decr|timing|timer|gauge] name val"""
def handle(self, *args, **options):
if len(args) != 3:
print "Usage: python manage.py send_stats [incr|decr|timing|timer|gauge] name val"
exit(1)
operation = args[0]
name = args[1]
val = args[2]
if settings.USING_STATSD:
from statsd import statsd
func = getattr(statsd, operation)
func(name, val)
|
|
0f855b84ccd7e0c1b068b56540f36ac02dca4b66
|
zerver/migrations/0074_fix_duplicate_attachments.py
|
zerver/migrations/0074_fix_duplicate_attachments.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-13 22:12
from __future__ import unicode_literals
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from django.db.models import Count
def fix_duplicate_attachments(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
"""Migration 0041 had a bug, where if multiple messages referenced the
same attachment, rather than creating a single attachment object
for all of them, we would incorrectly create one for each message.
This results in exceptions looking up the Attachment object
corresponding to a file that was used in multiple messages that
predate migration 0041.
This migration fixes this by removing the duplicates, moving their
messages onto a single canonical Attachment object (per path_id).
"""
Attachment = apps.get_model('zerver', 'Attachment')
# Loop through all groups of Attachment objects with the same `path_id`
for group in Attachment.objects.values('path_id').annotate(Count('id')).order_by().filter(id__count__gt=1):
# Sort by the minimum message ID, to find the first attachment
attachments = sorted(list(Attachment.objects.filter(path_id=group['path_id']).order_by("id")),
key = lambda x: min(x.messages.all().values_list('id')[0]))
surviving = attachments[0]
to_cleanup = attachments[1:]
for a in to_cleanup:
# For each duplicate attachment, we transfer its messages
# to the canonical attachment object for that path, and
# then delete the original attachment.
for msg in a.messages.all():
surviving.messages.add(msg)
surviving.is_realm_public = surviving.is_realm_public or a.is_realm_public
surviving.save()
a.delete()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0073_custom_profile_fields'),
]
operations = [
migrations.RunPython(fix_duplicate_attachments)
]
|
Add migration to fix duplicate attachment objects.
|
attachment: Add migration to fix duplicate attachment objects.
The comment in the migration explains this change in detail.
|
Python
|
apache-2.0
|
eeshangarg/zulip,ryanbackman/zulip,synicalsyntax/zulip,SmartPeople/zulip,dhcrzf/zulip,shubhamdhama/zulip,zulip/zulip,rishig/zulip,shubhamdhama/zulip,rht/zulip,punchagan/zulip,jrowan/zulip,timabbott/zulip,brainwane/zulip,jackrzhang/zulip,vaidap/zulip,rishig/zulip,rishig/zulip,rht/zulip,jackrzhang/zulip,rishig/zulip,punchagan/zulip,synicalsyntax/zulip,amanharitsh123/zulip,j831/zulip,eeshangarg/zulip,andersk/zulip,tommyip/zulip,brockwhittaker/zulip,showell/zulip,hackerkid/zulip,brainwane/zulip,verma-varsha/zulip,rht/zulip,andersk/zulip,amanharitsh123/zulip,vabs22/zulip,hackerkid/zulip,brainwane/zulip,mahim97/zulip,zulip/zulip,zulip/zulip,zulip/zulip,zulip/zulip,tommyip/zulip,vaidap/zulip,tommyip/zulip,dhcrzf/zulip,andersk/zulip,andersk/zulip,christi3k/zulip,rishig/zulip,zulip/zulip,eeshangarg/zulip,j831/zulip,rht/zulip,andersk/zulip,dhcrzf/zulip,hackerkid/zulip,mahim97/zulip,showell/zulip,dhcrzf/zulip,shubhamdhama/zulip,j831/zulip,tommyip/zulip,synicalsyntax/zulip,amanharitsh123/zulip,christi3k/zulip,j831/zulip,vaidap/zulip,rishig/zulip,timabbott/zulip,ryanbackman/zulip,Galexrt/zulip,brainwane/zulip,andersk/zulip,christi3k/zulip,dhcrzf/zulip,zulip/zulip,timabbott/zulip,shubhamdhama/zulip,Galexrt/zulip,kou/zulip,eeshangarg/zulip,showell/zulip,tommyip/zulip,vabs22/zulip,vaidap/zulip,jrowan/zulip,tommyip/zulip,mahim97/zulip,verma-varsha/zulip,SmartPeople/zulip,tommyip/zulip,timabbott/zulip,shubhamdhama/zulip,timabbott/zulip,punchagan/zulip,ryanbackman/zulip,kou/zulip,SmartPeople/zulip,rishig/zulip,SmartPeople/zulip,eeshangarg/zulip,Galexrt/zulip,punchagan/zulip,synicalsyntax/zulip,christi3k/zulip,Galexrt/zulip,jphilipsen05/zulip,jrowan/zulip,ryanbackman/zulip,brainwane/zulip,Galexrt/zulip,synicalsyntax/zulip,punchagan/zulip,mahim97/zulip,SmartPeople/zulip,jphilipsen05/zulip,kou/zulip,mahim97/zulip,amanharitsh123/zulip,jphilipsen05/zulip,dhcrzf/zulip,jackrzhang/zulip,mahim97/zulip,christi3k/zulip,jackrzhang/zulip,dhcrzf/zulip,hackerkid/zulip,hackerkid/zulip,punchagan/zulip,brockwhittaker/zulip,andersk/zulip,brockwhittaker/zulip,shubhamdhama/zulip,brockwhittaker/zulip,Galexrt/zulip,showell/zulip,jackrzhang/zulip,showell/zulip,brainwane/zulip,jrowan/zulip,showell/zulip,jrowan/zulip,brockwhittaker/zulip,jackrzhang/zulip,verma-varsha/zulip,synicalsyntax/zulip,verma-varsha/zulip,eeshangarg/zulip,jrowan/zulip,vaidap/zulip,SmartPeople/zulip,christi3k/zulip,brainwane/zulip,timabbott/zulip,rht/zulip,jackrzhang/zulip,eeshangarg/zulip,jphilipsen05/zulip,vabs22/zulip,amanharitsh123/zulip,synicalsyntax/zulip,j831/zulip,kou/zulip,hackerkid/zulip,ryanbackman/zulip,vaidap/zulip,hackerkid/zulip,j831/zulip,ryanbackman/zulip,brockwhittaker/zulip,shubhamdhama/zulip,amanharitsh123/zulip,jphilipsen05/zulip,vabs22/zulip,verma-varsha/zulip,timabbott/zulip,rht/zulip,Galexrt/zulip,kou/zulip,kou/zulip,verma-varsha/zulip,showell/zulip,kou/zulip,vabs22/zulip,punchagan/zulip,vabs22/zulip,jphilipsen05/zulip,rht/zulip
|
attachment: Add migration to fix duplicate attachment objects.
The comment in the migration explains this change in detail.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-13 22:12
from __future__ import unicode_literals
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from django.db.models import Count
def fix_duplicate_attachments(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
"""Migration 0041 had a bug, where if multiple messages referenced the
same attachment, rather than creating a single attachment object
for all of them, we would incorrectly create one for each message.
This results in exceptions looking up the Attachment object
corresponding to a file that was used in multiple messages that
predate migration 0041.
This migration fixes this by removing the duplicates, moving their
messages onto a single canonical Attachment object (per path_id).
"""
Attachment = apps.get_model('zerver', 'Attachment')
# Loop through all groups of Attachment objects with the same `path_id`
for group in Attachment.objects.values('path_id').annotate(Count('id')).order_by().filter(id__count__gt=1):
# Sort by the minimum message ID, to find the first attachment
attachments = sorted(list(Attachment.objects.filter(path_id=group['path_id']).order_by("id")),
key = lambda x: min(x.messages.all().values_list('id')[0]))
surviving = attachments[0]
to_cleanup = attachments[1:]
for a in to_cleanup:
# For each duplicate attachment, we transfer its messages
# to the canonical attachment object for that path, and
# then delete the original attachment.
for msg in a.messages.all():
surviving.messages.add(msg)
surviving.is_realm_public = surviving.is_realm_public or a.is_realm_public
surviving.save()
a.delete()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0073_custom_profile_fields'),
]
operations = [
migrations.RunPython(fix_duplicate_attachments)
]
|
<commit_before><commit_msg>attachment: Add migration to fix duplicate attachment objects.
The comment in the migration explains this change in detail.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-13 22:12
from __future__ import unicode_literals
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from django.db.models import Count
def fix_duplicate_attachments(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
"""Migration 0041 had a bug, where if multiple messages referenced the
same attachment, rather than creating a single attachment object
for all of them, we would incorrectly create one for each message.
This results in exceptions looking up the Attachment object
corresponding to a file that was used in multiple messages that
predate migration 0041.
This migration fixes this by removing the duplicates, moving their
messages onto a single canonical Attachment object (per path_id).
"""
Attachment = apps.get_model('zerver', 'Attachment')
# Loop through all groups of Attachment objects with the same `path_id`
for group in Attachment.objects.values('path_id').annotate(Count('id')).order_by().filter(id__count__gt=1):
# Sort by the minimum message ID, to find the first attachment
attachments = sorted(list(Attachment.objects.filter(path_id=group['path_id']).order_by("id")),
key = lambda x: min(x.messages.all().values_list('id')[0]))
surviving = attachments[0]
to_cleanup = attachments[1:]
for a in to_cleanup:
# For each duplicate attachment, we transfer its messages
# to the canonical attachment object for that path, and
# then delete the original attachment.
for msg in a.messages.all():
surviving.messages.add(msg)
surviving.is_realm_public = surviving.is_realm_public or a.is_realm_public
surviving.save()
a.delete()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0073_custom_profile_fields'),
]
operations = [
migrations.RunPython(fix_duplicate_attachments)
]
|
attachment: Add migration to fix duplicate attachment objects.
The comment in the migration explains this change in detail.# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-13 22:12
from __future__ import unicode_literals
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from django.db.models import Count
def fix_duplicate_attachments(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
"""Migration 0041 had a bug, where if multiple messages referenced the
same attachment, rather than creating a single attachment object
for all of them, we would incorrectly create one for each message.
This results in exceptions looking up the Attachment object
corresponding to a file that was used in multiple messages that
predate migration 0041.
This migration fixes this by removing the duplicates, moving their
messages onto a single canonical Attachment object (per path_id).
"""
Attachment = apps.get_model('zerver', 'Attachment')
# Loop through all groups of Attachment objects with the same `path_id`
for group in Attachment.objects.values('path_id').annotate(Count('id')).order_by().filter(id__count__gt=1):
# Sort by the minimum message ID, to find the first attachment
attachments = sorted(list(Attachment.objects.filter(path_id=group['path_id']).order_by("id")),
key = lambda x: min(x.messages.all().values_list('id')[0]))
surviving = attachments[0]
to_cleanup = attachments[1:]
for a in to_cleanup:
# For each duplicate attachment, we transfer its messages
# to the canonical attachment object for that path, and
# then delete the original attachment.
for msg in a.messages.all():
surviving.messages.add(msg)
surviving.is_realm_public = surviving.is_realm_public or a.is_realm_public
surviving.save()
a.delete()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0073_custom_profile_fields'),
]
operations = [
migrations.RunPython(fix_duplicate_attachments)
]
|
<commit_before><commit_msg>attachment: Add migration to fix duplicate attachment objects.
The comment in the migration explains this change in detail.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-13 22:12
from __future__ import unicode_literals
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from django.db.models import Count
def fix_duplicate_attachments(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
"""Migration 0041 had a bug, where if multiple messages referenced the
same attachment, rather than creating a single attachment object
for all of them, we would incorrectly create one for each message.
This results in exceptions looking up the Attachment object
corresponding to a file that was used in multiple messages that
predate migration 0041.
This migration fixes this by removing the duplicates, moving their
messages onto a single canonical Attachment object (per path_id).
"""
Attachment = apps.get_model('zerver', 'Attachment')
# Loop through all groups of Attachment objects with the same `path_id`
for group in Attachment.objects.values('path_id').annotate(Count('id')).order_by().filter(id__count__gt=1):
# Sort by the minimum message ID, to find the first attachment
attachments = sorted(list(Attachment.objects.filter(path_id=group['path_id']).order_by("id")),
key = lambda x: min(x.messages.all().values_list('id')[0]))
surviving = attachments[0]
to_cleanup = attachments[1:]
for a in to_cleanup:
# For each duplicate attachment, we transfer its messages
# to the canonical attachment object for that path, and
# then delete the original attachment.
for msg in a.messages.all():
surviving.messages.add(msg)
surviving.is_realm_public = surviving.is_realm_public or a.is_realm_public
surviving.save()
a.delete()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0073_custom_profile_fields'),
]
operations = [
migrations.RunPython(fix_duplicate_attachments)
]
|
|
086351fb56e2c777bcd6b332c1550acb46fc6fa6
|
src/ggrc_basic_permissions/roles/Auditor.py
|
src/ggrc_basic_permissions/roles/Auditor.py
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "Audit"
description = """
The permissions required by an auditor to access relevant resources for the
program being audited.
"""
permissions = {
"read": [
"Audit",
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Context",
],
"create": [
"Request",
"ControlAssessment",
"Issue",
"Relationship",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse"
],
"delete": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
],
}
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "Audit"
description = """
The permissions required by an auditor to access relevant resources for the
program being audited.
"""
permissions = {
"read": [
"Audit",
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Comment",
"Context",
],
"create": [
"Request",
"ControlAssessment",
"Issue",
"Relationship",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Comment",
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse"
],
"delete": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
],
}
|
Allow auditors to CR comments in context
|
Allow auditors to CR comments in context
|
Python
|
apache-2.0
|
edofic/ggrc-core,josthkko/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,jmakov/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,prasannav7/ggrc-core,AleksNeStu/ggrc-core,NejcZupec/ggrc-core,plamut/ggrc-core,j0gurt/ggrc-core,edofic/ggrc-core,j0gurt/ggrc-core,prasannav7/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,jmakov/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,kr41/ggrc-core,VinnieJohns/ggrc-core,NejcZupec/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,prasannav7/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,edofic/ggrc-core,prasannav7/ggrc-core,jmakov/ggrc-core,plamut/ggrc-core,jmakov/ggrc-core,AleksNeStu/ggrc-core,NejcZupec/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,jmakov/ggrc-core
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "Audit"
description = """
The permissions required by an auditor to access relevant resources for the
program being audited.
"""
permissions = {
"read": [
"Audit",
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Context",
],
"create": [
"Request",
"ControlAssessment",
"Issue",
"Relationship",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse"
],
"delete": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
],
}
Allow auditors to CR comments in context
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "Audit"
description = """
The permissions required by an auditor to access relevant resources for the
program being audited.
"""
permissions = {
"read": [
"Audit",
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Comment",
"Context",
],
"create": [
"Request",
"ControlAssessment",
"Issue",
"Relationship",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Comment",
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse"
],
"delete": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
],
}
|
<commit_before># Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "Audit"
description = """
The permissions required by an auditor to access relevant resources for the
program being audited.
"""
permissions = {
"read": [
"Audit",
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Context",
],
"create": [
"Request",
"ControlAssessment",
"Issue",
"Relationship",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse"
],
"delete": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
],
}
<commit_msg>Allow auditors to CR comments in context<commit_after>
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "Audit"
description = """
The permissions required by an auditor to access relevant resources for the
program being audited.
"""
permissions = {
"read": [
"Audit",
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Comment",
"Context",
],
"create": [
"Request",
"ControlAssessment",
"Issue",
"Relationship",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Comment",
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse"
],
"delete": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
],
}
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "Audit"
description = """
The permissions required by an auditor to access relevant resources for the
program being audited.
"""
permissions = {
"read": [
"Audit",
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Context",
],
"create": [
"Request",
"ControlAssessment",
"Issue",
"Relationship",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse"
],
"delete": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
],
}
Allow auditors to CR comments in context# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "Audit"
description = """
The permissions required by an auditor to access relevant resources for the
program being audited.
"""
permissions = {
"read": [
"Audit",
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Comment",
"Context",
],
"create": [
"Request",
"ControlAssessment",
"Issue",
"Relationship",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Comment",
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse"
],
"delete": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
],
}
|
<commit_before># Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "Audit"
description = """
The permissions required by an auditor to access relevant resources for the
program being audited.
"""
permissions = {
"read": [
"Audit",
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Context",
],
"create": [
"Request",
"ControlAssessment",
"Issue",
"Relationship",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse"
],
"delete": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
],
}
<commit_msg>Allow auditors to CR comments in context<commit_after># Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "Audit"
description = """
The permissions required by an auditor to access relevant resources for the
program being audited.
"""
permissions = {
"read": [
"Audit",
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Meeting",
"ObjectDocument",
"ObjectPerson",
"Relationship",
"Document",
"Meeting",
"UserRole",
"Comment",
"Context",
],
"create": [
"Request",
"ControlAssessment",
"Issue",
"Relationship",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Comment",
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse"
],
"delete": [
"Request",
"ControlAssessment",
"Issue",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
],
}
|
244068d0689973ff462b455ac8db849b0839e694
|
democracy/tests/test_hearing_admin.py
|
democracy/tests/test_hearing_admin.py
|
# -*- coding: utf-8 -*-
import pytest
from django.contrib import admin
from django.core.urlresolvers import reverse
@pytest.mark.django_db
def test_hearing_admin_renders(admin_client):
url = reverse('admin:democracy_hearing_add')
response = admin_client.get(url)
|
Add rudimentary smoke test for hearing admin
|
Add rudimentary smoke test for hearing admin
|
Python
|
mit
|
vikoivun/kerrokantasi,stephawe/kerrokantasi,vikoivun/kerrokantasi,stephawe/kerrokantasi,stephawe/kerrokantasi,vikoivun/kerrokantasi,City-of-Helsinki/kerrokantasi,City-of-Helsinki/kerrokantasi,City-of-Helsinki/kerrokantasi,City-of-Helsinki/kerrokantasi
|
Add rudimentary smoke test for hearing admin
|
# -*- coding: utf-8 -*-
import pytest
from django.contrib import admin
from django.core.urlresolvers import reverse
@pytest.mark.django_db
def test_hearing_admin_renders(admin_client):
url = reverse('admin:democracy_hearing_add')
response = admin_client.get(url)
|
<commit_before><commit_msg>Add rudimentary smoke test for hearing admin<commit_after>
|
# -*- coding: utf-8 -*-
import pytest
from django.contrib import admin
from django.core.urlresolvers import reverse
@pytest.mark.django_db
def test_hearing_admin_renders(admin_client):
url = reverse('admin:democracy_hearing_add')
response = admin_client.get(url)
|
Add rudimentary smoke test for hearing admin# -*- coding: utf-8 -*-
import pytest
from django.contrib import admin
from django.core.urlresolvers import reverse
@pytest.mark.django_db
def test_hearing_admin_renders(admin_client):
url = reverse('admin:democracy_hearing_add')
response = admin_client.get(url)
|
<commit_before><commit_msg>Add rudimentary smoke test for hearing admin<commit_after># -*- coding: utf-8 -*-
import pytest
from django.contrib import admin
from django.core.urlresolvers import reverse
@pytest.mark.django_db
def test_hearing_admin_renders(admin_client):
url = reverse('admin:democracy_hearing_add')
response = admin_client.get(url)
|
|
6a767b028647220199bd90c1b26226802a089a6f
|
models/official/detection/export_tflite_model.py
|
models/official/detection/export_tflite_model.py
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""A binary to export the tflite model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import flags
import tensorflow.compat.v1 as tf
FLAGS = flags.FLAGS
flags.DEFINE_string('saved_model_dir', None, 'The saved model directory.')
flags.DEFINE_string('output_dir', None, 'The export tflite model directory.')
flags.mark_flag_as_required('saved_model_dir')
flags.mark_flag_as_required('output_dir')
def main(argv):
del argv # Unused.
converter = tf.lite.TFLiteConverter.from_saved_model(FLAGS.saved_model_dir)
converter.experimental_new_converter = True
tflite_model = converter.convert()
tflite_model_path = os.path.join(FLAGS.output_dir, 'model.tflite')
with tf.gfile.GFile(tflite_model_path, 'wb') as f:
f.write(tflite_model)
if __name__ == '__main__':
tf.app.run(main)
|
Add a binary to convert model to tflite supported foramt.
|
Add a binary to convert model to tflite supported foramt.
PiperOrigin-RevId: 302497553
|
Python
|
apache-2.0
|
tensorflow/tpu,tensorflow/tpu,tensorflow/tpu,tensorflow/tpu
|
Add a binary to convert model to tflite supported foramt.
PiperOrigin-RevId: 302497553
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""A binary to export the tflite model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import flags
import tensorflow.compat.v1 as tf
FLAGS = flags.FLAGS
flags.DEFINE_string('saved_model_dir', None, 'The saved model directory.')
flags.DEFINE_string('output_dir', None, 'The export tflite model directory.')
flags.mark_flag_as_required('saved_model_dir')
flags.mark_flag_as_required('output_dir')
def main(argv):
del argv # Unused.
converter = tf.lite.TFLiteConverter.from_saved_model(FLAGS.saved_model_dir)
converter.experimental_new_converter = True
tflite_model = converter.convert()
tflite_model_path = os.path.join(FLAGS.output_dir, 'model.tflite')
with tf.gfile.GFile(tflite_model_path, 'wb') as f:
f.write(tflite_model)
if __name__ == '__main__':
tf.app.run(main)
|
<commit_before><commit_msg>Add a binary to convert model to tflite supported foramt.
PiperOrigin-RevId: 302497553<commit_after>
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""A binary to export the tflite model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import flags
import tensorflow.compat.v1 as tf
FLAGS = flags.FLAGS
flags.DEFINE_string('saved_model_dir', None, 'The saved model directory.')
flags.DEFINE_string('output_dir', None, 'The export tflite model directory.')
flags.mark_flag_as_required('saved_model_dir')
flags.mark_flag_as_required('output_dir')
def main(argv):
del argv # Unused.
converter = tf.lite.TFLiteConverter.from_saved_model(FLAGS.saved_model_dir)
converter.experimental_new_converter = True
tflite_model = converter.convert()
tflite_model_path = os.path.join(FLAGS.output_dir, 'model.tflite')
with tf.gfile.GFile(tflite_model_path, 'wb') as f:
f.write(tflite_model)
if __name__ == '__main__':
tf.app.run(main)
|
Add a binary to convert model to tflite supported foramt.
PiperOrigin-RevId: 302497553# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""A binary to export the tflite model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import flags
import tensorflow.compat.v1 as tf
FLAGS = flags.FLAGS
flags.DEFINE_string('saved_model_dir', None, 'The saved model directory.')
flags.DEFINE_string('output_dir', None, 'The export tflite model directory.')
flags.mark_flag_as_required('saved_model_dir')
flags.mark_flag_as_required('output_dir')
def main(argv):
del argv # Unused.
converter = tf.lite.TFLiteConverter.from_saved_model(FLAGS.saved_model_dir)
converter.experimental_new_converter = True
tflite_model = converter.convert()
tflite_model_path = os.path.join(FLAGS.output_dir, 'model.tflite')
with tf.gfile.GFile(tflite_model_path, 'wb') as f:
f.write(tflite_model)
if __name__ == '__main__':
tf.app.run(main)
|
<commit_before><commit_msg>Add a binary to convert model to tflite supported foramt.
PiperOrigin-RevId: 302497553<commit_after># Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""A binary to export the tflite model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import flags
import tensorflow.compat.v1 as tf
FLAGS = flags.FLAGS
flags.DEFINE_string('saved_model_dir', None, 'The saved model directory.')
flags.DEFINE_string('output_dir', None, 'The export tflite model directory.')
flags.mark_flag_as_required('saved_model_dir')
flags.mark_flag_as_required('output_dir')
def main(argv):
del argv # Unused.
converter = tf.lite.TFLiteConverter.from_saved_model(FLAGS.saved_model_dir)
converter.experimental_new_converter = True
tflite_model = converter.convert()
tflite_model_path = os.path.join(FLAGS.output_dir, 'model.tflite')
with tf.gfile.GFile(tflite_model_path, 'wb') as f:
f.write(tflite_model)
if __name__ == '__main__':
tf.app.run(main)
|
|
739cca47c700a3a5965465c958297eee2de8f8b6
|
lib/bridgedb/test/test_util.py
|
lib/bridgedb/test/test_util.py
|
# -*- coding: utf-8 -*-
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :authors: Isis Lovecruft 0xA3ADB67A2CDB8B35 <isis@torproject.org>
# please also see AUTHORS file
# :copyright: (c) 2014, Isis Lovecruft
# (c) 2007-2014, The Tor Project, Inc.
# (c) 2007-2014, all entities within the AUTHORS file
# :license: see LICENSE for licensing information
"""Unittests for the :mod:`bridgedb.util` module."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import os
from twisted.trial import unittest
from bridgedb import util
class MiscLoggingUtilTests(unittest.TestCase):
"""Unittests for miscellaneous logging functions in :mod:`bridgedb.util`."""
def test_getLogHandlers(self):
"""util._getLogHandlers() should return ['rotating', 'console'] if
both stderr and logfile logging are enabled.
"""
logHandlers = util._getLogHandlers()
self.assertIsInstance(logHandlers, list)
self.assertEqual(len(logHandlers), 2)
def test_getLogHandlers_disableStderr(self):
"""util._getLogHandlers() should return ['rotating'] if stderr logging
is disabled.
"""
logHandlers = util._getLogHandlers(logToStderr=False)
self.assertIsInstance(logHandlers, list)
self.assertEqual(len(logHandlers), 1)
self.assertTrue('console' not in logHandlers)
def test_getRotatingFileHandler(self):
"""_getRotatingFileHandler() should create a file with 0600
permissions (os.ST_WRITE | os.ST_APPEND).
"""
filename = str(self.id()) + '.log'
logHandler = util._getRotatingFileHandler(filename)
self.assertTrue(os.path.isfile(filename))
self.assertEqual(os.stat_result(os.stat(filename)).st_mode, 33152)
self.assertIsInstance(logHandler,
util.logging.handlers.RotatingFileHandler)
def test_configureLogging(self):
"""Configure logging should be callable without borking anything."""
from bridgedb.persistent import Conf
util.configureLogging(Conf())
util.logging.info("BridgeDB's email address: bridges@torproject.org")
|
Add unittests for bridgedb.util module.
|
Add unittests for bridgedb.util module.
|
Python
|
bsd-3-clause
|
mmaker/bridgedb,pagea/bridgedb,mmaker/bridgedb,pagea/bridgedb
|
Add unittests for bridgedb.util module.
|
# -*- coding: utf-8 -*-
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :authors: Isis Lovecruft 0xA3ADB67A2CDB8B35 <isis@torproject.org>
# please also see AUTHORS file
# :copyright: (c) 2014, Isis Lovecruft
# (c) 2007-2014, The Tor Project, Inc.
# (c) 2007-2014, all entities within the AUTHORS file
# :license: see LICENSE for licensing information
"""Unittests for the :mod:`bridgedb.util` module."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import os
from twisted.trial import unittest
from bridgedb import util
class MiscLoggingUtilTests(unittest.TestCase):
"""Unittests for miscellaneous logging functions in :mod:`bridgedb.util`."""
def test_getLogHandlers(self):
"""util._getLogHandlers() should return ['rotating', 'console'] if
both stderr and logfile logging are enabled.
"""
logHandlers = util._getLogHandlers()
self.assertIsInstance(logHandlers, list)
self.assertEqual(len(logHandlers), 2)
def test_getLogHandlers_disableStderr(self):
"""util._getLogHandlers() should return ['rotating'] if stderr logging
is disabled.
"""
logHandlers = util._getLogHandlers(logToStderr=False)
self.assertIsInstance(logHandlers, list)
self.assertEqual(len(logHandlers), 1)
self.assertTrue('console' not in logHandlers)
def test_getRotatingFileHandler(self):
"""_getRotatingFileHandler() should create a file with 0600
permissions (os.ST_WRITE | os.ST_APPEND).
"""
filename = str(self.id()) + '.log'
logHandler = util._getRotatingFileHandler(filename)
self.assertTrue(os.path.isfile(filename))
self.assertEqual(os.stat_result(os.stat(filename)).st_mode, 33152)
self.assertIsInstance(logHandler,
util.logging.handlers.RotatingFileHandler)
def test_configureLogging(self):
"""Configure logging should be callable without borking anything."""
from bridgedb.persistent import Conf
util.configureLogging(Conf())
util.logging.info("BridgeDB's email address: bridges@torproject.org")
|
<commit_before><commit_msg>Add unittests for bridgedb.util module.<commit_after>
|
# -*- coding: utf-8 -*-
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :authors: Isis Lovecruft 0xA3ADB67A2CDB8B35 <isis@torproject.org>
# please also see AUTHORS file
# :copyright: (c) 2014, Isis Lovecruft
# (c) 2007-2014, The Tor Project, Inc.
# (c) 2007-2014, all entities within the AUTHORS file
# :license: see LICENSE for licensing information
"""Unittests for the :mod:`bridgedb.util` module."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import os
from twisted.trial import unittest
from bridgedb import util
class MiscLoggingUtilTests(unittest.TestCase):
"""Unittests for miscellaneous logging functions in :mod:`bridgedb.util`."""
def test_getLogHandlers(self):
"""util._getLogHandlers() should return ['rotating', 'console'] if
both stderr and logfile logging are enabled.
"""
logHandlers = util._getLogHandlers()
self.assertIsInstance(logHandlers, list)
self.assertEqual(len(logHandlers), 2)
def test_getLogHandlers_disableStderr(self):
"""util._getLogHandlers() should return ['rotating'] if stderr logging
is disabled.
"""
logHandlers = util._getLogHandlers(logToStderr=False)
self.assertIsInstance(logHandlers, list)
self.assertEqual(len(logHandlers), 1)
self.assertTrue('console' not in logHandlers)
def test_getRotatingFileHandler(self):
"""_getRotatingFileHandler() should create a file with 0600
permissions (os.ST_WRITE | os.ST_APPEND).
"""
filename = str(self.id()) + '.log'
logHandler = util._getRotatingFileHandler(filename)
self.assertTrue(os.path.isfile(filename))
self.assertEqual(os.stat_result(os.stat(filename)).st_mode, 33152)
self.assertIsInstance(logHandler,
util.logging.handlers.RotatingFileHandler)
def test_configureLogging(self):
"""Configure logging should be callable without borking anything."""
from bridgedb.persistent import Conf
util.configureLogging(Conf())
util.logging.info("BridgeDB's email address: bridges@torproject.org")
|
Add unittests for bridgedb.util module.# -*- coding: utf-8 -*-
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :authors: Isis Lovecruft 0xA3ADB67A2CDB8B35 <isis@torproject.org>
# please also see AUTHORS file
# :copyright: (c) 2014, Isis Lovecruft
# (c) 2007-2014, The Tor Project, Inc.
# (c) 2007-2014, all entities within the AUTHORS file
# :license: see LICENSE for licensing information
"""Unittests for the :mod:`bridgedb.util` module."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import os
from twisted.trial import unittest
from bridgedb import util
class MiscLoggingUtilTests(unittest.TestCase):
"""Unittests for miscellaneous logging functions in :mod:`bridgedb.util`."""
def test_getLogHandlers(self):
"""util._getLogHandlers() should return ['rotating', 'console'] if
both stderr and logfile logging are enabled.
"""
logHandlers = util._getLogHandlers()
self.assertIsInstance(logHandlers, list)
self.assertEqual(len(logHandlers), 2)
def test_getLogHandlers_disableStderr(self):
"""util._getLogHandlers() should return ['rotating'] if stderr logging
is disabled.
"""
logHandlers = util._getLogHandlers(logToStderr=False)
self.assertIsInstance(logHandlers, list)
self.assertEqual(len(logHandlers), 1)
self.assertTrue('console' not in logHandlers)
def test_getRotatingFileHandler(self):
"""_getRotatingFileHandler() should create a file with 0600
permissions (os.ST_WRITE | os.ST_APPEND).
"""
filename = str(self.id()) + '.log'
logHandler = util._getRotatingFileHandler(filename)
self.assertTrue(os.path.isfile(filename))
self.assertEqual(os.stat_result(os.stat(filename)).st_mode, 33152)
self.assertIsInstance(logHandler,
util.logging.handlers.RotatingFileHandler)
def test_configureLogging(self):
"""Configure logging should be callable without borking anything."""
from bridgedb.persistent import Conf
util.configureLogging(Conf())
util.logging.info("BridgeDB's email address: bridges@torproject.org")
|
<commit_before><commit_msg>Add unittests for bridgedb.util module.<commit_after># -*- coding: utf-8 -*-
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :authors: Isis Lovecruft 0xA3ADB67A2CDB8B35 <isis@torproject.org>
# please also see AUTHORS file
# :copyright: (c) 2014, Isis Lovecruft
# (c) 2007-2014, The Tor Project, Inc.
# (c) 2007-2014, all entities within the AUTHORS file
# :license: see LICENSE for licensing information
"""Unittests for the :mod:`bridgedb.util` module."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import os
from twisted.trial import unittest
from bridgedb import util
class MiscLoggingUtilTests(unittest.TestCase):
"""Unittests for miscellaneous logging functions in :mod:`bridgedb.util`."""
def test_getLogHandlers(self):
"""util._getLogHandlers() should return ['rotating', 'console'] if
both stderr and logfile logging are enabled.
"""
logHandlers = util._getLogHandlers()
self.assertIsInstance(logHandlers, list)
self.assertEqual(len(logHandlers), 2)
def test_getLogHandlers_disableStderr(self):
"""util._getLogHandlers() should return ['rotating'] if stderr logging
is disabled.
"""
logHandlers = util._getLogHandlers(logToStderr=False)
self.assertIsInstance(logHandlers, list)
self.assertEqual(len(logHandlers), 1)
self.assertTrue('console' not in logHandlers)
def test_getRotatingFileHandler(self):
"""_getRotatingFileHandler() should create a file with 0600
permissions (os.ST_WRITE | os.ST_APPEND).
"""
filename = str(self.id()) + '.log'
logHandler = util._getRotatingFileHandler(filename)
self.assertTrue(os.path.isfile(filename))
self.assertEqual(os.stat_result(os.stat(filename)).st_mode, 33152)
self.assertIsInstance(logHandler,
util.logging.handlers.RotatingFileHandler)
def test_configureLogging(self):
"""Configure logging should be callable without borking anything."""
from bridgedb.persistent import Conf
util.configureLogging(Conf())
util.logging.info("BridgeDB's email address: bridges@torproject.org")
|
|
f6572a125d8129cdf22ea8a1f5e60416d10b3f5b
|
lintcode/Medium/182_Delete_Digits.py
|
lintcode/Medium/182_Delete_Digits.py
|
class Solution:
"""
@param A: A positive integer which has N digits, A is a string.
@param k: Remove k digits.
@return: A string
"""
def DeleteDigits(self, A, k):
# write you code here
c = 0
res = A
while (c < k):
init = res[0]
break_out = False
for i in range(1, len(res)):
if (int(res[i]) < int(res[i - 1])):
res = res[:i - 1] + res[i:]
break_out = True
break
if (not break_out):
res = res[:-1]
c += 1
i = 0
c = 0
while (c < len(res)):
if (res[c] == "0" and c != len(res) - 1):
i += 1
if (res[c] != "0"):
break
c += 1
return res[i:]
|
Add solution to lintcode question 182
|
Add solution to lintcode question 182
|
Python
|
mit
|
Rhadow/leetcode,Rhadow/leetcode,Rhadow/leetcode,Rhadow/leetcode
|
Add solution to lintcode question 182
|
class Solution:
"""
@param A: A positive integer which has N digits, A is a string.
@param k: Remove k digits.
@return: A string
"""
def DeleteDigits(self, A, k):
# write you code here
c = 0
res = A
while (c < k):
init = res[0]
break_out = False
for i in range(1, len(res)):
if (int(res[i]) < int(res[i - 1])):
res = res[:i - 1] + res[i:]
break_out = True
break
if (not break_out):
res = res[:-1]
c += 1
i = 0
c = 0
while (c < len(res)):
if (res[c] == "0" and c != len(res) - 1):
i += 1
if (res[c] != "0"):
break
c += 1
return res[i:]
|
<commit_before><commit_msg>Add solution to lintcode question 182<commit_after>
|
class Solution:
"""
@param A: A positive integer which has N digits, A is a string.
@param k: Remove k digits.
@return: A string
"""
def DeleteDigits(self, A, k):
# write you code here
c = 0
res = A
while (c < k):
init = res[0]
break_out = False
for i in range(1, len(res)):
if (int(res[i]) < int(res[i - 1])):
res = res[:i - 1] + res[i:]
break_out = True
break
if (not break_out):
res = res[:-1]
c += 1
i = 0
c = 0
while (c < len(res)):
if (res[c] == "0" and c != len(res) - 1):
i += 1
if (res[c] != "0"):
break
c += 1
return res[i:]
|
Add solution to lintcode question 182class Solution:
"""
@param A: A positive integer which has N digits, A is a string.
@param k: Remove k digits.
@return: A string
"""
def DeleteDigits(self, A, k):
# write you code here
c = 0
res = A
while (c < k):
init = res[0]
break_out = False
for i in range(1, len(res)):
if (int(res[i]) < int(res[i - 1])):
res = res[:i - 1] + res[i:]
break_out = True
break
if (not break_out):
res = res[:-1]
c += 1
i = 0
c = 0
while (c < len(res)):
if (res[c] == "0" and c != len(res) - 1):
i += 1
if (res[c] != "0"):
break
c += 1
return res[i:]
|
<commit_before><commit_msg>Add solution to lintcode question 182<commit_after>class Solution:
"""
@param A: A positive integer which has N digits, A is a string.
@param k: Remove k digits.
@return: A string
"""
def DeleteDigits(self, A, k):
# write you code here
c = 0
res = A
while (c < k):
init = res[0]
break_out = False
for i in range(1, len(res)):
if (int(res[i]) < int(res[i - 1])):
res = res[:i - 1] + res[i:]
break_out = True
break
if (not break_out):
res = res[:-1]
c += 1
i = 0
c = 0
while (c < len(res)):
if (res[c] == "0" and c != len(res) - 1):
i += 1
if (res[c] != "0"):
break
c += 1
return res[i:]
|
|
e875b89f9370da901d72d4bb7b0cc581cde541de
|
teuthology/task/rbd_fsx.py
|
teuthology/task/rbd_fsx.py
|
import contextlib
import logging
from ..orchestra import run
from teuthology.parallel import parallel
log = logging.getLogger(__name__)
@contextlib.contextmanager
def task(ctx, config):
"""
Run fsx on an rbd image.
Currently this requires running as client.admin
to create a pool.
Specify which clients to run on as a list::
tasks:
ceph:
rbd_fsx:
clients: [client.0, client.1]
You can optionally change some properties of fsx:
tasks:
ceph:
rbd_fsx:
clients: <list of clients>
seed: <random seed number, or 0 to use the time>
ops: <number of operations to do>
size: <maximum image size in bytes>
"""
log.info('starting rbd_fsx...')
with parallel() as p:
for role in config['clients']:
p.spawn(_run_one_client, ctx, config, role)
yield
def _run_one_client(ctx, config, role):
(remote,) = ctx.cluster.only(role).remotes.iterkeys()
remote.run(
args=[
'CEPH_CONF=/tmp/cephtest/ceph.conf',
'LD_LIBRARY_PATH=/tmp/cephtest/binary/usr/local/lib',
'/tmp/cephtest/enable-coredump',
'/tmp/cephtest/binary/usr/local/bin/ceph-coverage',
'/tmp/cephtest/archive/coverage',
'/tmp/cephtest/binary/usr/local/bin/test_librbd_fsx',
'-d',
'-W', '-R', # mmap doesn't work with rbd
'-P', '/tmp/cephtest/archive',
'-t', '4194304',
'-l', str(config.get('size', 1073741824)),
'-S', str(config.get('seed', 0)),
'-N', str(config.get('ops', 1000)),
'pool_{pool}'.format(pool=role),
'image_{image}'.format(image=role),
],
)
|
Add task for running fsx on an rbd image.
|
Add task for running fsx on an rbd image.
|
Python
|
mit
|
robbat2/teuthology,zhouyuan/teuthology,dreamhost/teuthology,SUSE/teuthology,SUSE/teuthology,ivotron/teuthology,tchaikov/teuthology,tchaikov/teuthology,t-miyamae/teuthology,ceph/teuthology,ceph/teuthology,zhouyuan/teuthology,ktdreyer/teuthology,yghannam/teuthology,ktdreyer/teuthology,robbat2/teuthology,dmick/teuthology,caibo2014/teuthology,dreamhost/teuthology,michaelsevilla/teuthology,SUSE/teuthology,michaelsevilla/teuthology,caibo2014/teuthology,yghannam/teuthology,dmick/teuthology,dmick/teuthology,ivotron/teuthology,t-miyamae/teuthology
|
Add task for running fsx on an rbd image.
|
import contextlib
import logging
from ..orchestra import run
from teuthology.parallel import parallel
log = logging.getLogger(__name__)
@contextlib.contextmanager
def task(ctx, config):
"""
Run fsx on an rbd image.
Currently this requires running as client.admin
to create a pool.
Specify which clients to run on as a list::
tasks:
ceph:
rbd_fsx:
clients: [client.0, client.1]
You can optionally change some properties of fsx:
tasks:
ceph:
rbd_fsx:
clients: <list of clients>
seed: <random seed number, or 0 to use the time>
ops: <number of operations to do>
size: <maximum image size in bytes>
"""
log.info('starting rbd_fsx...')
with parallel() as p:
for role in config['clients']:
p.spawn(_run_one_client, ctx, config, role)
yield
def _run_one_client(ctx, config, role):
(remote,) = ctx.cluster.only(role).remotes.iterkeys()
remote.run(
args=[
'CEPH_CONF=/tmp/cephtest/ceph.conf',
'LD_LIBRARY_PATH=/tmp/cephtest/binary/usr/local/lib',
'/tmp/cephtest/enable-coredump',
'/tmp/cephtest/binary/usr/local/bin/ceph-coverage',
'/tmp/cephtest/archive/coverage',
'/tmp/cephtest/binary/usr/local/bin/test_librbd_fsx',
'-d',
'-W', '-R', # mmap doesn't work with rbd
'-P', '/tmp/cephtest/archive',
'-t', '4194304',
'-l', str(config.get('size', 1073741824)),
'-S', str(config.get('seed', 0)),
'-N', str(config.get('ops', 1000)),
'pool_{pool}'.format(pool=role),
'image_{image}'.format(image=role),
],
)
|
<commit_before><commit_msg>Add task for running fsx on an rbd image.<commit_after>
|
import contextlib
import logging
from ..orchestra import run
from teuthology.parallel import parallel
log = logging.getLogger(__name__)
@contextlib.contextmanager
def task(ctx, config):
"""
Run fsx on an rbd image.
Currently this requires running as client.admin
to create a pool.
Specify which clients to run on as a list::
tasks:
ceph:
rbd_fsx:
clients: [client.0, client.1]
You can optionally change some properties of fsx:
tasks:
ceph:
rbd_fsx:
clients: <list of clients>
seed: <random seed number, or 0 to use the time>
ops: <number of operations to do>
size: <maximum image size in bytes>
"""
log.info('starting rbd_fsx...')
with parallel() as p:
for role in config['clients']:
p.spawn(_run_one_client, ctx, config, role)
yield
def _run_one_client(ctx, config, role):
(remote,) = ctx.cluster.only(role).remotes.iterkeys()
remote.run(
args=[
'CEPH_CONF=/tmp/cephtest/ceph.conf',
'LD_LIBRARY_PATH=/tmp/cephtest/binary/usr/local/lib',
'/tmp/cephtest/enable-coredump',
'/tmp/cephtest/binary/usr/local/bin/ceph-coverage',
'/tmp/cephtest/archive/coverage',
'/tmp/cephtest/binary/usr/local/bin/test_librbd_fsx',
'-d',
'-W', '-R', # mmap doesn't work with rbd
'-P', '/tmp/cephtest/archive',
'-t', '4194304',
'-l', str(config.get('size', 1073741824)),
'-S', str(config.get('seed', 0)),
'-N', str(config.get('ops', 1000)),
'pool_{pool}'.format(pool=role),
'image_{image}'.format(image=role),
],
)
|
Add task for running fsx on an rbd image.import contextlib
import logging
from ..orchestra import run
from teuthology.parallel import parallel
log = logging.getLogger(__name__)
@contextlib.contextmanager
def task(ctx, config):
"""
Run fsx on an rbd image.
Currently this requires running as client.admin
to create a pool.
Specify which clients to run on as a list::
tasks:
ceph:
rbd_fsx:
clients: [client.0, client.1]
You can optionally change some properties of fsx:
tasks:
ceph:
rbd_fsx:
clients: <list of clients>
seed: <random seed number, or 0 to use the time>
ops: <number of operations to do>
size: <maximum image size in bytes>
"""
log.info('starting rbd_fsx...')
with parallel() as p:
for role in config['clients']:
p.spawn(_run_one_client, ctx, config, role)
yield
def _run_one_client(ctx, config, role):
(remote,) = ctx.cluster.only(role).remotes.iterkeys()
remote.run(
args=[
'CEPH_CONF=/tmp/cephtest/ceph.conf',
'LD_LIBRARY_PATH=/tmp/cephtest/binary/usr/local/lib',
'/tmp/cephtest/enable-coredump',
'/tmp/cephtest/binary/usr/local/bin/ceph-coverage',
'/tmp/cephtest/archive/coverage',
'/tmp/cephtest/binary/usr/local/bin/test_librbd_fsx',
'-d',
'-W', '-R', # mmap doesn't work with rbd
'-P', '/tmp/cephtest/archive',
'-t', '4194304',
'-l', str(config.get('size', 1073741824)),
'-S', str(config.get('seed', 0)),
'-N', str(config.get('ops', 1000)),
'pool_{pool}'.format(pool=role),
'image_{image}'.format(image=role),
],
)
|
<commit_before><commit_msg>Add task for running fsx on an rbd image.<commit_after>import contextlib
import logging
from ..orchestra import run
from teuthology.parallel import parallel
log = logging.getLogger(__name__)
@contextlib.contextmanager
def task(ctx, config):
"""
Run fsx on an rbd image.
Currently this requires running as client.admin
to create a pool.
Specify which clients to run on as a list::
tasks:
ceph:
rbd_fsx:
clients: [client.0, client.1]
You can optionally change some properties of fsx:
tasks:
ceph:
rbd_fsx:
clients: <list of clients>
seed: <random seed number, or 0 to use the time>
ops: <number of operations to do>
size: <maximum image size in bytes>
"""
log.info('starting rbd_fsx...')
with parallel() as p:
for role in config['clients']:
p.spawn(_run_one_client, ctx, config, role)
yield
def _run_one_client(ctx, config, role):
(remote,) = ctx.cluster.only(role).remotes.iterkeys()
remote.run(
args=[
'CEPH_CONF=/tmp/cephtest/ceph.conf',
'LD_LIBRARY_PATH=/tmp/cephtest/binary/usr/local/lib',
'/tmp/cephtest/enable-coredump',
'/tmp/cephtest/binary/usr/local/bin/ceph-coverage',
'/tmp/cephtest/archive/coverage',
'/tmp/cephtest/binary/usr/local/bin/test_librbd_fsx',
'-d',
'-W', '-R', # mmap doesn't work with rbd
'-P', '/tmp/cephtest/archive',
'-t', '4194304',
'-l', str(config.get('size', 1073741824)),
'-S', str(config.get('seed', 0)),
'-N', str(config.get('ops', 1000)),
'pool_{pool}'.format(pool=role),
'image_{image}'.format(image=role),
],
)
|
|
b732ea4bd64a05c7d75ddbce0925d5402142c67d
|
scripts/add_pos.py
|
scripts/add_pos.py
|
#!/usr/bin/env python
import csv
from itertools import tee
import nltk
MATCH_FILE = 'data/6705bigrams-PopewDryden.txt'
OUTPUT_FILE = 'data/6705bigrams-Output.txt'
def tokenize(text):
"""This handles tokenizing and normalizing everything."""
return [
token.lower()
for token in nltk.wordpunct_tokenize(text)
if token.isalnum()
]
# This is from the Python docs
# https://docs.python.org/3/library/itertools.html
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
a, b = tee(iterable)
next(b, None)
return zip(a, b)
def main():
with open(MATCH_FILE) as f:
with open(OUTPUT_FILE, 'w') as fout:
reader = csv.reader(f, 'excel-tab')
writer = csv.writer(fout, 'excel-tab')
for row in reader:
bigram = tuple(tokenize(row[3]))
line = tokenize(row[5])
row.insert(5, '-1')
for i, pair in enumerate(pairwise(line)):
if pair == bigram:
row[5] = i + 1
break
writer.writerow(row)
if __name__ == '__main__':
main()
|
Add line position to TSV file.
|
Add line position to TSV file.
|
Python
|
apache-2.0
|
erochest/c18sgml
|
Add line position to TSV file.
|
#!/usr/bin/env python
import csv
from itertools import tee
import nltk
MATCH_FILE = 'data/6705bigrams-PopewDryden.txt'
OUTPUT_FILE = 'data/6705bigrams-Output.txt'
def tokenize(text):
"""This handles tokenizing and normalizing everything."""
return [
token.lower()
for token in nltk.wordpunct_tokenize(text)
if token.isalnum()
]
# This is from the Python docs
# https://docs.python.org/3/library/itertools.html
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
a, b = tee(iterable)
next(b, None)
return zip(a, b)
def main():
with open(MATCH_FILE) as f:
with open(OUTPUT_FILE, 'w') as fout:
reader = csv.reader(f, 'excel-tab')
writer = csv.writer(fout, 'excel-tab')
for row in reader:
bigram = tuple(tokenize(row[3]))
line = tokenize(row[5])
row.insert(5, '-1')
for i, pair in enumerate(pairwise(line)):
if pair == bigram:
row[5] = i + 1
break
writer.writerow(row)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add line position to TSV file.<commit_after>
|
#!/usr/bin/env python
import csv
from itertools import tee
import nltk
MATCH_FILE = 'data/6705bigrams-PopewDryden.txt'
OUTPUT_FILE = 'data/6705bigrams-Output.txt'
def tokenize(text):
"""This handles tokenizing and normalizing everything."""
return [
token.lower()
for token in nltk.wordpunct_tokenize(text)
if token.isalnum()
]
# This is from the Python docs
# https://docs.python.org/3/library/itertools.html
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
a, b = tee(iterable)
next(b, None)
return zip(a, b)
def main():
with open(MATCH_FILE) as f:
with open(OUTPUT_FILE, 'w') as fout:
reader = csv.reader(f, 'excel-tab')
writer = csv.writer(fout, 'excel-tab')
for row in reader:
bigram = tuple(tokenize(row[3]))
line = tokenize(row[5])
row.insert(5, '-1')
for i, pair in enumerate(pairwise(line)):
if pair == bigram:
row[5] = i + 1
break
writer.writerow(row)
if __name__ == '__main__':
main()
|
Add line position to TSV file.#!/usr/bin/env python
import csv
from itertools import tee
import nltk
MATCH_FILE = 'data/6705bigrams-PopewDryden.txt'
OUTPUT_FILE = 'data/6705bigrams-Output.txt'
def tokenize(text):
"""This handles tokenizing and normalizing everything."""
return [
token.lower()
for token in nltk.wordpunct_tokenize(text)
if token.isalnum()
]
# This is from the Python docs
# https://docs.python.org/3/library/itertools.html
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
a, b = tee(iterable)
next(b, None)
return zip(a, b)
def main():
with open(MATCH_FILE) as f:
with open(OUTPUT_FILE, 'w') as fout:
reader = csv.reader(f, 'excel-tab')
writer = csv.writer(fout, 'excel-tab')
for row in reader:
bigram = tuple(tokenize(row[3]))
line = tokenize(row[5])
row.insert(5, '-1')
for i, pair in enumerate(pairwise(line)):
if pair == bigram:
row[5] = i + 1
break
writer.writerow(row)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add line position to TSV file.<commit_after>#!/usr/bin/env python
import csv
from itertools import tee
import nltk
MATCH_FILE = 'data/6705bigrams-PopewDryden.txt'
OUTPUT_FILE = 'data/6705bigrams-Output.txt'
def tokenize(text):
"""This handles tokenizing and normalizing everything."""
return [
token.lower()
for token in nltk.wordpunct_tokenize(text)
if token.isalnum()
]
# This is from the Python docs
# https://docs.python.org/3/library/itertools.html
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
a, b = tee(iterable)
next(b, None)
return zip(a, b)
def main():
with open(MATCH_FILE) as f:
with open(OUTPUT_FILE, 'w') as fout:
reader = csv.reader(f, 'excel-tab')
writer = csv.writer(fout, 'excel-tab')
for row in reader:
bigram = tuple(tokenize(row[3]))
line = tokenize(row[5])
row.insert(5, '-1')
for i, pair in enumerate(pairwise(line)):
if pair == bigram:
row[5] = i + 1
break
writer.writerow(row)
if __name__ == '__main__':
main()
|
|
429cf2018a5480e7a2ab083b95bfe74d3cc8bfcb
|
scripts/exports.py
|
scripts/exports.py
|
"""
Some really simple export scripts.
"""
from __future__ import unicode_literals
import io
import iterutils
import pymongo
import twitterproj
def grouper(n, iterable, fillvalue=None):
"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
args = [iter(iterable)] * n
return iterutils.izip_longest(fillvalue=fillvalue, *args)
def by_count(filename, collection):
tweets, db, client = twitterproj.connect()
chunksize = int(1e6)
lines = []
with io.open(filename, 'w', encoding='utf-8') as fobj:
c = collection.find().sort('count', pymongo.DESCENDING)
for chunk in grouper(chunksize, c, None):
# Filler is None, so we make sure it is something.
lines = ["{},{}".format(doc['_id'], int(doc['count'])) for doc in chunk if doc]
fobj.write('\n'.join(lines))
def users_by_count(filename, hashtags=True):
"""
Write users and the number of tweets to file.
If hashtags is True, then only count the tweets that have hashtags.
"""
tweets, db, client = twitterproj.connect()
if hashtags:
collection = db.userHashtagTweetCount
else:
collection = db.userTweetCount
by_count(filename, collection)
def hashtags_by_count(filename):
tweets, db, client = twitterproj.connect()
by_count(filename, db.hashtags)
if __name__ == '__main__':
hashtags_by_count('hashtags.csv')
users_by_count('userCounts_hashtag_tweets.csv', hashtags=True)
users_by_count('userCounts_all_tweets.csv', hashtags=False)
|
Add script to export hashtags counts, user tweet counts, etc.
|
Add script to export hashtags counts, user tweet counts, etc.
|
Python
|
unlicense
|
chebee7i/twitter,chebee7i/twitter,chebee7i/twitter
|
Add script to export hashtags counts, user tweet counts, etc.
|
"""
Some really simple export scripts.
"""
from __future__ import unicode_literals
import io
import iterutils
import pymongo
import twitterproj
def grouper(n, iterable, fillvalue=None):
"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
args = [iter(iterable)] * n
return iterutils.izip_longest(fillvalue=fillvalue, *args)
def by_count(filename, collection):
tweets, db, client = twitterproj.connect()
chunksize = int(1e6)
lines = []
with io.open(filename, 'w', encoding='utf-8') as fobj:
c = collection.find().sort('count', pymongo.DESCENDING)
for chunk in grouper(chunksize, c, None):
# Filler is None, so we make sure it is something.
lines = ["{},{}".format(doc['_id'], int(doc['count'])) for doc in chunk if doc]
fobj.write('\n'.join(lines))
def users_by_count(filename, hashtags=True):
"""
Write users and the number of tweets to file.
If hashtags is True, then only count the tweets that have hashtags.
"""
tweets, db, client = twitterproj.connect()
if hashtags:
collection = db.userHashtagTweetCount
else:
collection = db.userTweetCount
by_count(filename, collection)
def hashtags_by_count(filename):
tweets, db, client = twitterproj.connect()
by_count(filename, db.hashtags)
if __name__ == '__main__':
hashtags_by_count('hashtags.csv')
users_by_count('userCounts_hashtag_tweets.csv', hashtags=True)
users_by_count('userCounts_all_tweets.csv', hashtags=False)
|
<commit_before><commit_msg>Add script to export hashtags counts, user tweet counts, etc.<commit_after>
|
"""
Some really simple export scripts.
"""
from __future__ import unicode_literals
import io
import iterutils
import pymongo
import twitterproj
def grouper(n, iterable, fillvalue=None):
"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
args = [iter(iterable)] * n
return iterutils.izip_longest(fillvalue=fillvalue, *args)
def by_count(filename, collection):
tweets, db, client = twitterproj.connect()
chunksize = int(1e6)
lines = []
with io.open(filename, 'w', encoding='utf-8') as fobj:
c = collection.find().sort('count', pymongo.DESCENDING)
for chunk in grouper(chunksize, c, None):
# Filler is None, so we make sure it is something.
lines = ["{},{}".format(doc['_id'], int(doc['count'])) for doc in chunk if doc]
fobj.write('\n'.join(lines))
def users_by_count(filename, hashtags=True):
"""
Write users and the number of tweets to file.
If hashtags is True, then only count the tweets that have hashtags.
"""
tweets, db, client = twitterproj.connect()
if hashtags:
collection = db.userHashtagTweetCount
else:
collection = db.userTweetCount
by_count(filename, collection)
def hashtags_by_count(filename):
tweets, db, client = twitterproj.connect()
by_count(filename, db.hashtags)
if __name__ == '__main__':
hashtags_by_count('hashtags.csv')
users_by_count('userCounts_hashtag_tweets.csv', hashtags=True)
users_by_count('userCounts_all_tweets.csv', hashtags=False)
|
Add script to export hashtags counts, user tweet counts, etc."""
Some really simple export scripts.
"""
from __future__ import unicode_literals
import io
import iterutils
import pymongo
import twitterproj
def grouper(n, iterable, fillvalue=None):
"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
args = [iter(iterable)] * n
return iterutils.izip_longest(fillvalue=fillvalue, *args)
def by_count(filename, collection):
tweets, db, client = twitterproj.connect()
chunksize = int(1e6)
lines = []
with io.open(filename, 'w', encoding='utf-8') as fobj:
c = collection.find().sort('count', pymongo.DESCENDING)
for chunk in grouper(chunksize, c, None):
# Filler is None, so we make sure it is something.
lines = ["{},{}".format(doc['_id'], int(doc['count'])) for doc in chunk if doc]
fobj.write('\n'.join(lines))
def users_by_count(filename, hashtags=True):
"""
Write users and the number of tweets to file.
If hashtags is True, then only count the tweets that have hashtags.
"""
tweets, db, client = twitterproj.connect()
if hashtags:
collection = db.userHashtagTweetCount
else:
collection = db.userTweetCount
by_count(filename, collection)
def hashtags_by_count(filename):
tweets, db, client = twitterproj.connect()
by_count(filename, db.hashtags)
if __name__ == '__main__':
hashtags_by_count('hashtags.csv')
users_by_count('userCounts_hashtag_tweets.csv', hashtags=True)
users_by_count('userCounts_all_tweets.csv', hashtags=False)
|
<commit_before><commit_msg>Add script to export hashtags counts, user tweet counts, etc.<commit_after>"""
Some really simple export scripts.
"""
from __future__ import unicode_literals
import io
import iterutils
import pymongo
import twitterproj
def grouper(n, iterable, fillvalue=None):
"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
args = [iter(iterable)] * n
return iterutils.izip_longest(fillvalue=fillvalue, *args)
def by_count(filename, collection):
tweets, db, client = twitterproj.connect()
chunksize = int(1e6)
lines = []
with io.open(filename, 'w', encoding='utf-8') as fobj:
c = collection.find().sort('count', pymongo.DESCENDING)
for chunk in grouper(chunksize, c, None):
# Filler is None, so we make sure it is something.
lines = ["{},{}".format(doc['_id'], int(doc['count'])) for doc in chunk if doc]
fobj.write('\n'.join(lines))
def users_by_count(filename, hashtags=True):
"""
Write users and the number of tweets to file.
If hashtags is True, then only count the tweets that have hashtags.
"""
tweets, db, client = twitterproj.connect()
if hashtags:
collection = db.userHashtagTweetCount
else:
collection = db.userTweetCount
by_count(filename, collection)
def hashtags_by_count(filename):
tweets, db, client = twitterproj.connect()
by_count(filename, db.hashtags)
if __name__ == '__main__':
hashtags_by_count('hashtags.csv')
users_by_count('userCounts_hashtag_tweets.csv', hashtags=True)
users_by_count('userCounts_all_tweets.csv', hashtags=False)
|
|
5babccca12e1cbef655957b038594eadb1fe63bc
|
nose2/tests/unit/test_prof_plugin.py
|
nose2/tests/unit/test_prof_plugin.py
|
import unittest2
from ..plugins import prof
from ._common import Stub, FakeStartTestRunEvent
class TestProfPlugin(unittest2.TestCase):
tags = ['unit']
def setUp(self):
self.plugin = prof.Profiler()
self.hotshot = prof.hotshot
self.stats = prof.stats
prof.hotshot = Stub()
prof.stats = Stub()
def tearDown(self):
prof.hotshot = self.hotshot
prof.stats = self.stats
def test_startTestRun_sets_executeTests(self):
_prof = Stub()
_prof.runcall = object()
prof.hotshot.Profile = lambda filename: _prof
event = FakeStartTestRunEvent()
self.plugin.startTestRun(event)
assert event.executeTests is _prof.runcall, \
"executeTests was not replaced"
|
from nose2.plugins import prof
from nose2.events import StartTestRunEvent
from nose2.tests._common import Stub, TestCase
class TestProfPlugin(TestCase):
tags = ['unit']
def setUp(self):
self.plugin = prof.Profiler()
self.hotshot = prof.hotshot
self.stats = prof.stats
prof.hotshot = Stub()
prof.stats = Stub()
def tearDown(self):
prof.hotshot = self.hotshot
prof.stats = self.stats
def test_startTestRun_sets_executeTests(self):
_prof = Stub()
_prof.runcall = object()
prof.hotshot.Profile = lambda filename: _prof
event = StartTestRunEvent(runner=None, suite=None, result=None,
startTime=None, executeTests=None)
self.plugin.startTestRun(event)
assert event.executeTests is _prof.runcall, \
"executeTests was not replaced"
|
Use real events and proper TestCase
|
Use real events and proper TestCase
|
Python
|
bsd-2-clause
|
ojengwa/nose2,ezigman/nose2,leth/nose2,ptthiem/nose2,leth/nose2,little-dude/nose2,ptthiem/nose2,ezigman/nose2,little-dude/nose2,ojengwa/nose2
|
import unittest2
from ..plugins import prof
from ._common import Stub, FakeStartTestRunEvent
class TestProfPlugin(unittest2.TestCase):
tags = ['unit']
def setUp(self):
self.plugin = prof.Profiler()
self.hotshot = prof.hotshot
self.stats = prof.stats
prof.hotshot = Stub()
prof.stats = Stub()
def tearDown(self):
prof.hotshot = self.hotshot
prof.stats = self.stats
def test_startTestRun_sets_executeTests(self):
_prof = Stub()
_prof.runcall = object()
prof.hotshot.Profile = lambda filename: _prof
event = FakeStartTestRunEvent()
self.plugin.startTestRun(event)
assert event.executeTests is _prof.runcall, \
"executeTests was not replaced"
Use real events and proper TestCase
|
from nose2.plugins import prof
from nose2.events import StartTestRunEvent
from nose2.tests._common import Stub, TestCase
class TestProfPlugin(TestCase):
tags = ['unit']
def setUp(self):
self.plugin = prof.Profiler()
self.hotshot = prof.hotshot
self.stats = prof.stats
prof.hotshot = Stub()
prof.stats = Stub()
def tearDown(self):
prof.hotshot = self.hotshot
prof.stats = self.stats
def test_startTestRun_sets_executeTests(self):
_prof = Stub()
_prof.runcall = object()
prof.hotshot.Profile = lambda filename: _prof
event = StartTestRunEvent(runner=None, suite=None, result=None,
startTime=None, executeTests=None)
self.plugin.startTestRun(event)
assert event.executeTests is _prof.runcall, \
"executeTests was not replaced"
|
<commit_before>import unittest2
from ..plugins import prof
from ._common import Stub, FakeStartTestRunEvent
class TestProfPlugin(unittest2.TestCase):
tags = ['unit']
def setUp(self):
self.plugin = prof.Profiler()
self.hotshot = prof.hotshot
self.stats = prof.stats
prof.hotshot = Stub()
prof.stats = Stub()
def tearDown(self):
prof.hotshot = self.hotshot
prof.stats = self.stats
def test_startTestRun_sets_executeTests(self):
_prof = Stub()
_prof.runcall = object()
prof.hotshot.Profile = lambda filename: _prof
event = FakeStartTestRunEvent()
self.plugin.startTestRun(event)
assert event.executeTests is _prof.runcall, \
"executeTests was not replaced"
<commit_msg>Use real events and proper TestCase<commit_after>
|
from nose2.plugins import prof
from nose2.events import StartTestRunEvent
from nose2.tests._common import Stub, TestCase
class TestProfPlugin(TestCase):
tags = ['unit']
def setUp(self):
self.plugin = prof.Profiler()
self.hotshot = prof.hotshot
self.stats = prof.stats
prof.hotshot = Stub()
prof.stats = Stub()
def tearDown(self):
prof.hotshot = self.hotshot
prof.stats = self.stats
def test_startTestRun_sets_executeTests(self):
_prof = Stub()
_prof.runcall = object()
prof.hotshot.Profile = lambda filename: _prof
event = StartTestRunEvent(runner=None, suite=None, result=None,
startTime=None, executeTests=None)
self.plugin.startTestRun(event)
assert event.executeTests is _prof.runcall, \
"executeTests was not replaced"
|
import unittest2
from ..plugins import prof
from ._common import Stub, FakeStartTestRunEvent
class TestProfPlugin(unittest2.TestCase):
tags = ['unit']
def setUp(self):
self.plugin = prof.Profiler()
self.hotshot = prof.hotshot
self.stats = prof.stats
prof.hotshot = Stub()
prof.stats = Stub()
def tearDown(self):
prof.hotshot = self.hotshot
prof.stats = self.stats
def test_startTestRun_sets_executeTests(self):
_prof = Stub()
_prof.runcall = object()
prof.hotshot.Profile = lambda filename: _prof
event = FakeStartTestRunEvent()
self.plugin.startTestRun(event)
assert event.executeTests is _prof.runcall, \
"executeTests was not replaced"
Use real events and proper TestCasefrom nose2.plugins import prof
from nose2.events import StartTestRunEvent
from nose2.tests._common import Stub, TestCase
class TestProfPlugin(TestCase):
tags = ['unit']
def setUp(self):
self.plugin = prof.Profiler()
self.hotshot = prof.hotshot
self.stats = prof.stats
prof.hotshot = Stub()
prof.stats = Stub()
def tearDown(self):
prof.hotshot = self.hotshot
prof.stats = self.stats
def test_startTestRun_sets_executeTests(self):
_prof = Stub()
_prof.runcall = object()
prof.hotshot.Profile = lambda filename: _prof
event = StartTestRunEvent(runner=None, suite=None, result=None,
startTime=None, executeTests=None)
self.plugin.startTestRun(event)
assert event.executeTests is _prof.runcall, \
"executeTests was not replaced"
|
<commit_before>import unittest2
from ..plugins import prof
from ._common import Stub, FakeStartTestRunEvent
class TestProfPlugin(unittest2.TestCase):
tags = ['unit']
def setUp(self):
self.plugin = prof.Profiler()
self.hotshot = prof.hotshot
self.stats = prof.stats
prof.hotshot = Stub()
prof.stats = Stub()
def tearDown(self):
prof.hotshot = self.hotshot
prof.stats = self.stats
def test_startTestRun_sets_executeTests(self):
_prof = Stub()
_prof.runcall = object()
prof.hotshot.Profile = lambda filename: _prof
event = FakeStartTestRunEvent()
self.plugin.startTestRun(event)
assert event.executeTests is _prof.runcall, \
"executeTests was not replaced"
<commit_msg>Use real events and proper TestCase<commit_after>from nose2.plugins import prof
from nose2.events import StartTestRunEvent
from nose2.tests._common import Stub, TestCase
class TestProfPlugin(TestCase):
tags = ['unit']
def setUp(self):
self.plugin = prof.Profiler()
self.hotshot = prof.hotshot
self.stats = prof.stats
prof.hotshot = Stub()
prof.stats = Stub()
def tearDown(self):
prof.hotshot = self.hotshot
prof.stats = self.stats
def test_startTestRun_sets_executeTests(self):
_prof = Stub()
_prof.runcall = object()
prof.hotshot.Profile = lambda filename: _prof
event = StartTestRunEvent(runner=None, suite=None, result=None,
startTime=None, executeTests=None)
self.plugin.startTestRun(event)
assert event.executeTests is _prof.runcall, \
"executeTests was not replaced"
|
55a17865393f8c4d489f41dccbcc656670c81f2b
|
bika/lims/fields.py
|
bika/lims/fields.py
|
"""Generic field extensions
"""
from Acquisition import aq_inner
from Acquisition import aq_parent
from Acquisition import Implicit
from Acquisition import ImplicitAcquisitionWrapper
from archetypes.schemaextender.field import ExtensionField
from archetypes.schemaextender.field import ExtensionField
from archetypes.schemaextender.interfaces import IExtensionField
from Products.Archetypes.public import *
from Products.ATExtensions.ateapi import DateTimeField
from Products.ATExtensions.ateapi import RecordField, RecordsField
from zope.interface import implements
class ExtBooleanField(ExtensionField, BooleanField):
"Field extender"
class ExtComputedField(ExtensionField, ComputedField):
"Field extender"
class ExtDateTimeField(ExtensionField, DateTimeField):
"Field extender"
class ExtIntegerField(ExtensionField, IntegerField):
"Field extender"
class ExtLinesField(ExtensionField, LinesField):
"Field extender"
class ExtRecordField(ExtensionField, RecordField):
"Field extender"
class ExtRecordsField(ExtensionField, RecordsField):
"Field extender"
class ExtReferenceField(ExtensionField, ReferenceField):
"Field extender"
class ExtStringField(ExtensionField, StringField):
"Field extender"
class ExtTextField(ExtensionField, TextField):
"Field extender"
#
# Method Initialization
# apply default getters and setters to schemaextender fields.
#
def apply_default_methods(context):
for field in context.schema.fields():
fieldname = field.getName()
if getattr(context, 'get'+fieldname, None) is None:
setattr(context, 'get'+fieldname, field_getter(context, fieldname))
if getattr(context, 'set'+fieldname, None) is None:
setattr(context, 'set'+fieldname, field_setter(context, fieldname))
class field_getter:
def __init__(self, context, fieldname, **kwargs):
self.context = context
self.fieldname = fieldname
def __call__(self):
return self.context.Schema()[self.fieldname].getAccessor(self.context)(**kwargs)
class field_setter:
def __init__(self, context, fieldname):
self.context = context
self.fieldname = fieldname
def __call__(self, value, **kwargs):
return self.context.Schema()[self.fieldname].getMutator(self.context)(value, **kwargs)
|
Move schemaextender automatic getter and setter methods
|
Move schemaextender automatic getter and setter methods
|
Python
|
agpl-3.0
|
anneline/Bika-LIMS,labsanmartin/Bika-LIMS,rockfruit/bika.lims,labsanmartin/Bika-LIMS,DeBortoliWines/Bika-LIMS,DeBortoliWines/Bika-LIMS,rockfruit/bika.lims,veroc/Bika-LIMS,veroc/Bika-LIMS,labsanmartin/Bika-LIMS,anneline/Bika-LIMS,DeBortoliWines/Bika-LIMS,anneline/Bika-LIMS,veroc/Bika-LIMS
|
Move schemaextender automatic getter and setter methods
|
"""Generic field extensions
"""
from Acquisition import aq_inner
from Acquisition import aq_parent
from Acquisition import Implicit
from Acquisition import ImplicitAcquisitionWrapper
from archetypes.schemaextender.field import ExtensionField
from archetypes.schemaextender.field import ExtensionField
from archetypes.schemaextender.interfaces import IExtensionField
from Products.Archetypes.public import *
from Products.ATExtensions.ateapi import DateTimeField
from Products.ATExtensions.ateapi import RecordField, RecordsField
from zope.interface import implements
class ExtBooleanField(ExtensionField, BooleanField):
"Field extender"
class ExtComputedField(ExtensionField, ComputedField):
"Field extender"
class ExtDateTimeField(ExtensionField, DateTimeField):
"Field extender"
class ExtIntegerField(ExtensionField, IntegerField):
"Field extender"
class ExtLinesField(ExtensionField, LinesField):
"Field extender"
class ExtRecordField(ExtensionField, RecordField):
"Field extender"
class ExtRecordsField(ExtensionField, RecordsField):
"Field extender"
class ExtReferenceField(ExtensionField, ReferenceField):
"Field extender"
class ExtStringField(ExtensionField, StringField):
"Field extender"
class ExtTextField(ExtensionField, TextField):
"Field extender"
#
# Method Initialization
# apply default getters and setters to schemaextender fields.
#
def apply_default_methods(context):
for field in context.schema.fields():
fieldname = field.getName()
if getattr(context, 'get'+fieldname, None) is None:
setattr(context, 'get'+fieldname, field_getter(context, fieldname))
if getattr(context, 'set'+fieldname, None) is None:
setattr(context, 'set'+fieldname, field_setter(context, fieldname))
class field_getter:
def __init__(self, context, fieldname, **kwargs):
self.context = context
self.fieldname = fieldname
def __call__(self):
return self.context.Schema()[self.fieldname].getAccessor(self.context)(**kwargs)
class field_setter:
def __init__(self, context, fieldname):
self.context = context
self.fieldname = fieldname
def __call__(self, value, **kwargs):
return self.context.Schema()[self.fieldname].getMutator(self.context)(value, **kwargs)
|
<commit_before><commit_msg>Move schemaextender automatic getter and setter methods<commit_after>
|
"""Generic field extensions
"""
from Acquisition import aq_inner
from Acquisition import aq_parent
from Acquisition import Implicit
from Acquisition import ImplicitAcquisitionWrapper
from archetypes.schemaextender.field import ExtensionField
from archetypes.schemaextender.field import ExtensionField
from archetypes.schemaextender.interfaces import IExtensionField
from Products.Archetypes.public import *
from Products.ATExtensions.ateapi import DateTimeField
from Products.ATExtensions.ateapi import RecordField, RecordsField
from zope.interface import implements
class ExtBooleanField(ExtensionField, BooleanField):
"Field extender"
class ExtComputedField(ExtensionField, ComputedField):
"Field extender"
class ExtDateTimeField(ExtensionField, DateTimeField):
"Field extender"
class ExtIntegerField(ExtensionField, IntegerField):
"Field extender"
class ExtLinesField(ExtensionField, LinesField):
"Field extender"
class ExtRecordField(ExtensionField, RecordField):
"Field extender"
class ExtRecordsField(ExtensionField, RecordsField):
"Field extender"
class ExtReferenceField(ExtensionField, ReferenceField):
"Field extender"
class ExtStringField(ExtensionField, StringField):
"Field extender"
class ExtTextField(ExtensionField, TextField):
"Field extender"
#
# Method Initialization
# apply default getters and setters to schemaextender fields.
#
def apply_default_methods(context):
for field in context.schema.fields():
fieldname = field.getName()
if getattr(context, 'get'+fieldname, None) is None:
setattr(context, 'get'+fieldname, field_getter(context, fieldname))
if getattr(context, 'set'+fieldname, None) is None:
setattr(context, 'set'+fieldname, field_setter(context, fieldname))
class field_getter:
def __init__(self, context, fieldname, **kwargs):
self.context = context
self.fieldname = fieldname
def __call__(self):
return self.context.Schema()[self.fieldname].getAccessor(self.context)(**kwargs)
class field_setter:
def __init__(self, context, fieldname):
self.context = context
self.fieldname = fieldname
def __call__(self, value, **kwargs):
return self.context.Schema()[self.fieldname].getMutator(self.context)(value, **kwargs)
|
Move schemaextender automatic getter and setter methods"""Generic field extensions
"""
from Acquisition import aq_inner
from Acquisition import aq_parent
from Acquisition import Implicit
from Acquisition import ImplicitAcquisitionWrapper
from archetypes.schemaextender.field import ExtensionField
from archetypes.schemaextender.field import ExtensionField
from archetypes.schemaextender.interfaces import IExtensionField
from Products.Archetypes.public import *
from Products.ATExtensions.ateapi import DateTimeField
from Products.ATExtensions.ateapi import RecordField, RecordsField
from zope.interface import implements
class ExtBooleanField(ExtensionField, BooleanField):
"Field extender"
class ExtComputedField(ExtensionField, ComputedField):
"Field extender"
class ExtDateTimeField(ExtensionField, DateTimeField):
"Field extender"
class ExtIntegerField(ExtensionField, IntegerField):
"Field extender"
class ExtLinesField(ExtensionField, LinesField):
"Field extender"
class ExtRecordField(ExtensionField, RecordField):
"Field extender"
class ExtRecordsField(ExtensionField, RecordsField):
"Field extender"
class ExtReferenceField(ExtensionField, ReferenceField):
"Field extender"
class ExtStringField(ExtensionField, StringField):
"Field extender"
class ExtTextField(ExtensionField, TextField):
"Field extender"
#
# Method Initialization
# apply default getters and setters to schemaextender fields.
#
def apply_default_methods(context):
for field in context.schema.fields():
fieldname = field.getName()
if getattr(context, 'get'+fieldname, None) is None:
setattr(context, 'get'+fieldname, field_getter(context, fieldname))
if getattr(context, 'set'+fieldname, None) is None:
setattr(context, 'set'+fieldname, field_setter(context, fieldname))
class field_getter:
def __init__(self, context, fieldname, **kwargs):
self.context = context
self.fieldname = fieldname
def __call__(self):
return self.context.Schema()[self.fieldname].getAccessor(self.context)(**kwargs)
class field_setter:
def __init__(self, context, fieldname):
self.context = context
self.fieldname = fieldname
def __call__(self, value, **kwargs):
return self.context.Schema()[self.fieldname].getMutator(self.context)(value, **kwargs)
|
<commit_before><commit_msg>Move schemaextender automatic getter and setter methods<commit_after>"""Generic field extensions
"""
from Acquisition import aq_inner
from Acquisition import aq_parent
from Acquisition import Implicit
from Acquisition import ImplicitAcquisitionWrapper
from archetypes.schemaextender.field import ExtensionField
from archetypes.schemaextender.field import ExtensionField
from archetypes.schemaextender.interfaces import IExtensionField
from Products.Archetypes.public import *
from Products.ATExtensions.ateapi import DateTimeField
from Products.ATExtensions.ateapi import RecordField, RecordsField
from zope.interface import implements
class ExtBooleanField(ExtensionField, BooleanField):
"Field extender"
class ExtComputedField(ExtensionField, ComputedField):
"Field extender"
class ExtDateTimeField(ExtensionField, DateTimeField):
"Field extender"
class ExtIntegerField(ExtensionField, IntegerField):
"Field extender"
class ExtLinesField(ExtensionField, LinesField):
"Field extender"
class ExtRecordField(ExtensionField, RecordField):
"Field extender"
class ExtRecordsField(ExtensionField, RecordsField):
"Field extender"
class ExtReferenceField(ExtensionField, ReferenceField):
"Field extender"
class ExtStringField(ExtensionField, StringField):
"Field extender"
class ExtTextField(ExtensionField, TextField):
"Field extender"
#
# Method Initialization
# apply default getters and setters to schemaextender fields.
#
def apply_default_methods(context):
for field in context.schema.fields():
fieldname = field.getName()
if getattr(context, 'get'+fieldname, None) is None:
setattr(context, 'get'+fieldname, field_getter(context, fieldname))
if getattr(context, 'set'+fieldname, None) is None:
setattr(context, 'set'+fieldname, field_setter(context, fieldname))
class field_getter:
def __init__(self, context, fieldname, **kwargs):
self.context = context
self.fieldname = fieldname
def __call__(self):
return self.context.Schema()[self.fieldname].getAccessor(self.context)(**kwargs)
class field_setter:
def __init__(self, context, fieldname):
self.context = context
self.fieldname = fieldname
def __call__(self, value, **kwargs):
return self.context.Schema()[self.fieldname].getMutator(self.context)(value, **kwargs)
|
|
a1531197784cce0222720581a3bc47cd7b83e0ca
|
bluebottle/utils/template_loaders.py
|
bluebottle/utils/template_loaders.py
|
from django.template.loader import BaseLoader
from django.db import connection
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils._os import safe_join
from django.template.base import TemplateDoesNotExist
class TenantTemplateLoader(BaseLoader):
is_usable = True
def get_template_sources(self, template_name, template_dirs=None):
"""
Returns the absolute paths to "template_name", when appended to each
directory in "template_dirs". Any paths that don't lie inside one of the
template dirs are excluded from the result set, for security reasons.
"""
if not connection.tenant:
return
if not template_dirs:
try:
template_dirs = settings.MULTITENANT_TEMPLATE_DIRS
except AttributeError:
raise ImproperlyConfigured('To use %s.%s you must define the MULTITENANT_TEMPLATE_DIRS' %
(__name__, TenantTemplateLoader.__name__))
for template_dir in template_dirs:
try:
if '%s' in template_dir:
yield safe_join(template_dir % connection.tenant.client_name, 'templates', template_name)
else:
yield safe_join(template_dir, connection.tenant.client_name, 'templates', template_name)
except UnicodeDecodeError:
# The template dir name was a bytestring that wasn't valid UTF-8.
raise
except ValueError:
# The joined path was located outside of this particular
# template_dir (it might be inside another one, so this isn't
# fatal).
pass
def load_template_source(self, template_name, template_dirs=None):
tried = []
for filepath in self.get_template_sources(template_name, template_dirs):
try:
with open(filepath, 'rb') as fp:
return (fp.read().decode(settings.FILE_CHARSET), filepath)
except IOError:
tried.append(filepath)
if tried:
error_msg = "Tried %s" % tried
else:
error_msg = "Your TEMPLATE_DIRS setting is empty. Change it to point to at least one template directory."
raise TemplateDoesNotExist(error_msg)
load_template_source.is_usable = True
|
Add template loader for clients
|
Add template loader for clients
|
Python
|
bsd-3-clause
|
jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Add template loader for clients
|
from django.template.loader import BaseLoader
from django.db import connection
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils._os import safe_join
from django.template.base import TemplateDoesNotExist
class TenantTemplateLoader(BaseLoader):
is_usable = True
def get_template_sources(self, template_name, template_dirs=None):
"""
Returns the absolute paths to "template_name", when appended to each
directory in "template_dirs". Any paths that don't lie inside one of the
template dirs are excluded from the result set, for security reasons.
"""
if not connection.tenant:
return
if not template_dirs:
try:
template_dirs = settings.MULTITENANT_TEMPLATE_DIRS
except AttributeError:
raise ImproperlyConfigured('To use %s.%s you must define the MULTITENANT_TEMPLATE_DIRS' %
(__name__, TenantTemplateLoader.__name__))
for template_dir in template_dirs:
try:
if '%s' in template_dir:
yield safe_join(template_dir % connection.tenant.client_name, 'templates', template_name)
else:
yield safe_join(template_dir, connection.tenant.client_name, 'templates', template_name)
except UnicodeDecodeError:
# The template dir name was a bytestring that wasn't valid UTF-8.
raise
except ValueError:
# The joined path was located outside of this particular
# template_dir (it might be inside another one, so this isn't
# fatal).
pass
def load_template_source(self, template_name, template_dirs=None):
tried = []
for filepath in self.get_template_sources(template_name, template_dirs):
try:
with open(filepath, 'rb') as fp:
return (fp.read().decode(settings.FILE_CHARSET), filepath)
except IOError:
tried.append(filepath)
if tried:
error_msg = "Tried %s" % tried
else:
error_msg = "Your TEMPLATE_DIRS setting is empty. Change it to point to at least one template directory."
raise TemplateDoesNotExist(error_msg)
load_template_source.is_usable = True
|
<commit_before><commit_msg>Add template loader for clients<commit_after>
|
from django.template.loader import BaseLoader
from django.db import connection
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils._os import safe_join
from django.template.base import TemplateDoesNotExist
class TenantTemplateLoader(BaseLoader):
is_usable = True
def get_template_sources(self, template_name, template_dirs=None):
"""
Returns the absolute paths to "template_name", when appended to each
directory in "template_dirs". Any paths that don't lie inside one of the
template dirs are excluded from the result set, for security reasons.
"""
if not connection.tenant:
return
if not template_dirs:
try:
template_dirs = settings.MULTITENANT_TEMPLATE_DIRS
except AttributeError:
raise ImproperlyConfigured('To use %s.%s you must define the MULTITENANT_TEMPLATE_DIRS' %
(__name__, TenantTemplateLoader.__name__))
for template_dir in template_dirs:
try:
if '%s' in template_dir:
yield safe_join(template_dir % connection.tenant.client_name, 'templates', template_name)
else:
yield safe_join(template_dir, connection.tenant.client_name, 'templates', template_name)
except UnicodeDecodeError:
# The template dir name was a bytestring that wasn't valid UTF-8.
raise
except ValueError:
# The joined path was located outside of this particular
# template_dir (it might be inside another one, so this isn't
# fatal).
pass
def load_template_source(self, template_name, template_dirs=None):
tried = []
for filepath in self.get_template_sources(template_name, template_dirs):
try:
with open(filepath, 'rb') as fp:
return (fp.read().decode(settings.FILE_CHARSET), filepath)
except IOError:
tried.append(filepath)
if tried:
error_msg = "Tried %s" % tried
else:
error_msg = "Your TEMPLATE_DIRS setting is empty. Change it to point to at least one template directory."
raise TemplateDoesNotExist(error_msg)
load_template_source.is_usable = True
|
Add template loader for clientsfrom django.template.loader import BaseLoader
from django.db import connection
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils._os import safe_join
from django.template.base import TemplateDoesNotExist
class TenantTemplateLoader(BaseLoader):
is_usable = True
def get_template_sources(self, template_name, template_dirs=None):
"""
Returns the absolute paths to "template_name", when appended to each
directory in "template_dirs". Any paths that don't lie inside one of the
template dirs are excluded from the result set, for security reasons.
"""
if not connection.tenant:
return
if not template_dirs:
try:
template_dirs = settings.MULTITENANT_TEMPLATE_DIRS
except AttributeError:
raise ImproperlyConfigured('To use %s.%s you must define the MULTITENANT_TEMPLATE_DIRS' %
(__name__, TenantTemplateLoader.__name__))
for template_dir in template_dirs:
try:
if '%s' in template_dir:
yield safe_join(template_dir % connection.tenant.client_name, 'templates', template_name)
else:
yield safe_join(template_dir, connection.tenant.client_name, 'templates', template_name)
except UnicodeDecodeError:
# The template dir name was a bytestring that wasn't valid UTF-8.
raise
except ValueError:
# The joined path was located outside of this particular
# template_dir (it might be inside another one, so this isn't
# fatal).
pass
def load_template_source(self, template_name, template_dirs=None):
tried = []
for filepath in self.get_template_sources(template_name, template_dirs):
try:
with open(filepath, 'rb') as fp:
return (fp.read().decode(settings.FILE_CHARSET), filepath)
except IOError:
tried.append(filepath)
if tried:
error_msg = "Tried %s" % tried
else:
error_msg = "Your TEMPLATE_DIRS setting is empty. Change it to point to at least one template directory."
raise TemplateDoesNotExist(error_msg)
load_template_source.is_usable = True
|
<commit_before><commit_msg>Add template loader for clients<commit_after>from django.template.loader import BaseLoader
from django.db import connection
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils._os import safe_join
from django.template.base import TemplateDoesNotExist
class TenantTemplateLoader(BaseLoader):
is_usable = True
def get_template_sources(self, template_name, template_dirs=None):
"""
Returns the absolute paths to "template_name", when appended to each
directory in "template_dirs". Any paths that don't lie inside one of the
template dirs are excluded from the result set, for security reasons.
"""
if not connection.tenant:
return
if not template_dirs:
try:
template_dirs = settings.MULTITENANT_TEMPLATE_DIRS
except AttributeError:
raise ImproperlyConfigured('To use %s.%s you must define the MULTITENANT_TEMPLATE_DIRS' %
(__name__, TenantTemplateLoader.__name__))
for template_dir in template_dirs:
try:
if '%s' in template_dir:
yield safe_join(template_dir % connection.tenant.client_name, 'templates', template_name)
else:
yield safe_join(template_dir, connection.tenant.client_name, 'templates', template_name)
except UnicodeDecodeError:
# The template dir name was a bytestring that wasn't valid UTF-8.
raise
except ValueError:
# The joined path was located outside of this particular
# template_dir (it might be inside another one, so this isn't
# fatal).
pass
def load_template_source(self, template_name, template_dirs=None):
tried = []
for filepath in self.get_template_sources(template_name, template_dirs):
try:
with open(filepath, 'rb') as fp:
return (fp.read().decode(settings.FILE_CHARSET), filepath)
except IOError:
tried.append(filepath)
if tried:
error_msg = "Tried %s" % tried
else:
error_msg = "Your TEMPLATE_DIRS setting is empty. Change it to point to at least one template directory."
raise TemplateDoesNotExist(error_msg)
load_template_source.is_usable = True
|
|
6d42f4c26550486af6caa1ee74475d051af18940
|
python/qisrc/test/test_qisrc_reset.py
|
python/qisrc/test/test_qisrc_reset.py
|
# this is only used on buildfarm, so only test this
import pytest
import qisrc.snapshot
def test_reset_dash_f(qisrc_action, git_server):
git_server.create_repo("foo")
manifest_url = git_server.manifest_url
git_worktree = qisrc_action.git_worktree
tmpdir = qisrc_action.tmpdir
git_worktree.configure_manifest("default", manifest_url)
snapshot = tmpdir.join("snapshot").strpath
qisrc.snapshot.generate_snapshot(git_worktree,
snapshot,
deprecated_format=False)
qisrc_action("reset", "--snapshot", snapshot, "--force")
|
Add a test for qisrc reset
|
Add a test for qisrc reset
Change-Id: I6d009b46aefc53c7eca18d77225796614d200d10
|
Python
|
bsd-3-clause
|
dmerejkowsky/qibuild,aldebaran/qibuild,dmerejkowsky/qibuild,dmerejkowsky/qibuild,dmerejkowsky/qibuild,aldebaran/qibuild,aldebaran/qibuild,dmerejkowsky/qibuild,aldebaran/qibuild
|
Add a test for qisrc reset
Change-Id: I6d009b46aefc53c7eca18d77225796614d200d10
|
# this is only used on buildfarm, so only test this
import pytest
import qisrc.snapshot
def test_reset_dash_f(qisrc_action, git_server):
git_server.create_repo("foo")
manifest_url = git_server.manifest_url
git_worktree = qisrc_action.git_worktree
tmpdir = qisrc_action.tmpdir
git_worktree.configure_manifest("default", manifest_url)
snapshot = tmpdir.join("snapshot").strpath
qisrc.snapshot.generate_snapshot(git_worktree,
snapshot,
deprecated_format=False)
qisrc_action("reset", "--snapshot", snapshot, "--force")
|
<commit_before><commit_msg>Add a test for qisrc reset
Change-Id: I6d009b46aefc53c7eca18d77225796614d200d10<commit_after>
|
# this is only used on buildfarm, so only test this
import pytest
import qisrc.snapshot
def test_reset_dash_f(qisrc_action, git_server):
git_server.create_repo("foo")
manifest_url = git_server.manifest_url
git_worktree = qisrc_action.git_worktree
tmpdir = qisrc_action.tmpdir
git_worktree.configure_manifest("default", manifest_url)
snapshot = tmpdir.join("snapshot").strpath
qisrc.snapshot.generate_snapshot(git_worktree,
snapshot,
deprecated_format=False)
qisrc_action("reset", "--snapshot", snapshot, "--force")
|
Add a test for qisrc reset
Change-Id: I6d009b46aefc53c7eca18d77225796614d200d10# this is only used on buildfarm, so only test this
import pytest
import qisrc.snapshot
def test_reset_dash_f(qisrc_action, git_server):
git_server.create_repo("foo")
manifest_url = git_server.manifest_url
git_worktree = qisrc_action.git_worktree
tmpdir = qisrc_action.tmpdir
git_worktree.configure_manifest("default", manifest_url)
snapshot = tmpdir.join("snapshot").strpath
qisrc.snapshot.generate_snapshot(git_worktree,
snapshot,
deprecated_format=False)
qisrc_action("reset", "--snapshot", snapshot, "--force")
|
<commit_before><commit_msg>Add a test for qisrc reset
Change-Id: I6d009b46aefc53c7eca18d77225796614d200d10<commit_after># this is only used on buildfarm, so only test this
import pytest
import qisrc.snapshot
def test_reset_dash_f(qisrc_action, git_server):
git_server.create_repo("foo")
manifest_url = git_server.manifest_url
git_worktree = qisrc_action.git_worktree
tmpdir = qisrc_action.tmpdir
git_worktree.configure_manifest("default", manifest_url)
snapshot = tmpdir.join("snapshot").strpath
qisrc.snapshot.generate_snapshot(git_worktree,
snapshot,
deprecated_format=False)
qisrc_action("reset", "--snapshot", snapshot, "--force")
|
|
7847b0dab326110ffa834fc35e98f8b9e3fef3e3
|
PID_determine_coef.py
|
PID_determine_coef.py
|
from LLD import *
import csv
SAMPLING_PERIOD = 15
MAX_DUTY = 1800/3200
lld = LLD()
class Fake_Tank(object):
def __init__(self, name):
self.resistor_duty = 0
self.tank_name = name
hot = Fake_Tank("Hot")
mash = Fake_Tank("Mash")
boil = Fake_Tank("Boil")
tanks = (mash, boil, hot)
tut = boil # for tank under test
lld.set_resistors_duty(tanks, (0,MAX_DUTY,0))
time.sleep(3)
with open('pid_calib.csv', 'w', newline='') as csvfile:
csvwriter = csv.writer(csvfile, delimiter=' ',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
csvwriter.writerow(["Setting","Temperature"])
t = lld.get_temperature(tut)
while(t < 90):
csvwriter.writerow(["1",t])
print(t)
time.sleep(SAMPLING_PERIOD)
t = lld.get_temperature(tut)
lld.set_resistors_duty(tanks, (0, 0, 0))
i = 0
while (i < SAMPLING_PERIOD * 4 * 15):
t = lld.get_temperature(tut)
csvwriter.writerow(["1",t])
print(t)
time.sleep(SAMPLING_PERIOD)
print("calib test is done, take care of hot water")
|
Add a script to calibrate PID
|
Add a script to calibrate PID
|
Python
|
mit
|
flagos/pico-brewUI,flagos/pico-brewUI,flagos/pico-brewUI,flagos/pico-brewUI,flagos/pico-brewUI
|
Add a script to calibrate PID
|
from LLD import *
import csv
SAMPLING_PERIOD = 15
MAX_DUTY = 1800/3200
lld = LLD()
class Fake_Tank(object):
def __init__(self, name):
self.resistor_duty = 0
self.tank_name = name
hot = Fake_Tank("Hot")
mash = Fake_Tank("Mash")
boil = Fake_Tank("Boil")
tanks = (mash, boil, hot)
tut = boil # for tank under test
lld.set_resistors_duty(tanks, (0,MAX_DUTY,0))
time.sleep(3)
with open('pid_calib.csv', 'w', newline='') as csvfile:
csvwriter = csv.writer(csvfile, delimiter=' ',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
csvwriter.writerow(["Setting","Temperature"])
t = lld.get_temperature(tut)
while(t < 90):
csvwriter.writerow(["1",t])
print(t)
time.sleep(SAMPLING_PERIOD)
t = lld.get_temperature(tut)
lld.set_resistors_duty(tanks, (0, 0, 0))
i = 0
while (i < SAMPLING_PERIOD * 4 * 15):
t = lld.get_temperature(tut)
csvwriter.writerow(["1",t])
print(t)
time.sleep(SAMPLING_PERIOD)
print("calib test is done, take care of hot water")
|
<commit_before><commit_msg>Add a script to calibrate PID<commit_after>
|
from LLD import *
import csv
SAMPLING_PERIOD = 15
MAX_DUTY = 1800/3200
lld = LLD()
class Fake_Tank(object):
def __init__(self, name):
self.resistor_duty = 0
self.tank_name = name
hot = Fake_Tank("Hot")
mash = Fake_Tank("Mash")
boil = Fake_Tank("Boil")
tanks = (mash, boil, hot)
tut = boil # for tank under test
lld.set_resistors_duty(tanks, (0,MAX_DUTY,0))
time.sleep(3)
with open('pid_calib.csv', 'w', newline='') as csvfile:
csvwriter = csv.writer(csvfile, delimiter=' ',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
csvwriter.writerow(["Setting","Temperature"])
t = lld.get_temperature(tut)
while(t < 90):
csvwriter.writerow(["1",t])
print(t)
time.sleep(SAMPLING_PERIOD)
t = lld.get_temperature(tut)
lld.set_resistors_duty(tanks, (0, 0, 0))
i = 0
while (i < SAMPLING_PERIOD * 4 * 15):
t = lld.get_temperature(tut)
csvwriter.writerow(["1",t])
print(t)
time.sleep(SAMPLING_PERIOD)
print("calib test is done, take care of hot water")
|
Add a script to calibrate PIDfrom LLD import *
import csv
SAMPLING_PERIOD = 15
MAX_DUTY = 1800/3200
lld = LLD()
class Fake_Tank(object):
def __init__(self, name):
self.resistor_duty = 0
self.tank_name = name
hot = Fake_Tank("Hot")
mash = Fake_Tank("Mash")
boil = Fake_Tank("Boil")
tanks = (mash, boil, hot)
tut = boil # for tank under test
lld.set_resistors_duty(tanks, (0,MAX_DUTY,0))
time.sleep(3)
with open('pid_calib.csv', 'w', newline='') as csvfile:
csvwriter = csv.writer(csvfile, delimiter=' ',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
csvwriter.writerow(["Setting","Temperature"])
t = lld.get_temperature(tut)
while(t < 90):
csvwriter.writerow(["1",t])
print(t)
time.sleep(SAMPLING_PERIOD)
t = lld.get_temperature(tut)
lld.set_resistors_duty(tanks, (0, 0, 0))
i = 0
while (i < SAMPLING_PERIOD * 4 * 15):
t = lld.get_temperature(tut)
csvwriter.writerow(["1",t])
print(t)
time.sleep(SAMPLING_PERIOD)
print("calib test is done, take care of hot water")
|
<commit_before><commit_msg>Add a script to calibrate PID<commit_after>from LLD import *
import csv
SAMPLING_PERIOD = 15
MAX_DUTY = 1800/3200
lld = LLD()
class Fake_Tank(object):
def __init__(self, name):
self.resistor_duty = 0
self.tank_name = name
hot = Fake_Tank("Hot")
mash = Fake_Tank("Mash")
boil = Fake_Tank("Boil")
tanks = (mash, boil, hot)
tut = boil # for tank under test
lld.set_resistors_duty(tanks, (0,MAX_DUTY,0))
time.sleep(3)
with open('pid_calib.csv', 'w', newline='') as csvfile:
csvwriter = csv.writer(csvfile, delimiter=' ',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
csvwriter.writerow(["Setting","Temperature"])
t = lld.get_temperature(tut)
while(t < 90):
csvwriter.writerow(["1",t])
print(t)
time.sleep(SAMPLING_PERIOD)
t = lld.get_temperature(tut)
lld.set_resistors_duty(tanks, (0, 0, 0))
i = 0
while (i < SAMPLING_PERIOD * 4 * 15):
t = lld.get_temperature(tut)
csvwriter.writerow(["1",t])
print(t)
time.sleep(SAMPLING_PERIOD)
print("calib test is done, take care of hot water")
|
|
08e892ce9ee62518a5a79890e7af4bbdcdb41aed
|
lms/djangoapps/django_comment_client/management/commands/create_roles_for_existing.py
|
lms/djangoapps/django_comment_client/management/commands/create_roles_for_existing.py
|
"""
This must be run only after seed_permissions_roles.py!
Creates default roles for all users currently in the database. Just runs through
Enrollments.
"""
from django.core.management.base import BaseCommand, CommandError
from student.models import CourseEnrollment
from django_comment_client.permissions import assign_default_role
class Command(BaseCommand):
args = 'course_id'
help = 'Seed default permisssions and roles'
def handle(self, *args, **options):
if len(args) != 0:
raise CommandError("This Command takes no arguments")
print "Updated roles for ",
for i, enrollment in enumerate(CourseEnrollment.objects.all(), start=1):
assign_default_role(None, enrollment)
if i % 1000 == 0:
print "{0}...".format(i),
print
|
Add command to seed forum permissions to existing users
|
Add command to seed forum permissions to existing users
|
Python
|
agpl-3.0
|
IITBinterns13/edx-platform-dev,cpennington/edx-platform,nagyistoce/edx-platform,halvertoluke/edx-platform,kmoocdev2/edx-platform,ovnicraft/edx-platform,ampax/edx-platform,arifsetiawan/edx-platform,edx-solutions/edx-platform,andyzsf/edx,mcgachey/edx-platform,xingyepei/edx-platform,vasyarv/edx-platform,Edraak/circleci-edx-platform,Stanford-Online/edx-platform,mbareta/edx-platform-ft,Edraak/edraak-platform,sudheerchintala/LearnEraPlatForm,alexthered/kienhoc-platform,peterm-itr/edx-platform,eduNEXT/edx-platform,zofuthan/edx-platform,rismalrv/edx-platform,4eek/edx-platform,ampax/edx-platform,playm2mboy/edx-platform,JioEducation/edx-platform,zubair-arbi/edx-platform,valtech-mooc/edx-platform,pelikanchik/edx-platform,bigdatauniversity/edx-platform,chand3040/cloud_that,jelugbo/tundex,pku9104038/edx-platform,kmoocdev2/edx-platform,ak2703/edx-platform,nagyistoce/edx-platform,AkA84/edx-platform,shabab12/edx-platform,utecuy/edx-platform,ahmadiga/min_edx,IndonesiaX/edx-platform,eemirtekin/edx-platform,mtlchun/edx,BehavioralInsightsTeam/edx-platform,mushtaqak/edx-platform,miptliot/edx-platform,bdero/edx-platform,unicri/edx-platform,WatanabeYasumasa/edx-platform,tiagochiavericosta/edx-platform,caesar2164/edx-platform,MakeHer/edx-platform,jolyonb/edx-platform,SravanthiSinha/edx-platform,shurihell/testasia,pomegranited/edx-platform,EduPepperPD/pepper2013,hkawasaki/kawasaki-aio8-1,devs1991/test_edx_docmode,tiagochiavericosta/edx-platform,arbrandes/edx-platform,EDUlib/edx-platform,chauhanhardik/populo_2,appsembler/edx-platform,jruiperezv/ANALYSE,rhndg/openedx,longmen21/edx-platform,cecep-edu/edx-platform,olexiim/edx-platform,nanolearningllc/edx-platform-cypress-2,jazkarta/edx-platform,ahmadio/edx-platform,fintech-circle/edx-platform,solashirai/edx-platform,jazkarta/edx-platform-for-isc,hkawasaki/kawasaki-aio8-0,xingyepei/edx-platform,zofuthan/edx-platform,chauhanhardik/populo_2,xuxiao19910803/edx-platform,fly19890211/edx-platform,jelugbo/tundex,kamalx/edx-platform,Semi-global/edx-platform,chudaol/edx-platform,romain-li/edx-platform,pabloborrego93/edx-platform,doganov/edx-platform,bigdatauniversity/edx-platform,etzhou/edx-platform,leansoft/edx-platform,sameetb-cuelogic/edx-platform-test,ferabra/edx-platform,hastexo/edx-platform,alexthered/kienhoc-platform,xuxiao19910803/edx-platform,praveen-pal/edx-platform,zofuthan/edx-platform,rhndg/openedx,apigee/edx-platform,CredoReference/edx-platform,cyanna/edx-platform,4eek/edx-platform,shurihell/testasia,zofuthan/edx-platform,jonathan-beard/edx-platform,jazkarta/edx-platform,waheedahmed/edx-platform,ubc/edx-platform,fintech-circle/edx-platform,EduPepperPDTesting/pepper2013-testing,marcore/edx-platform,vasyarv/edx-platform,SravanthiSinha/edx-platform,beacloudgenius/edx-platform,sameetb-cuelogic/edx-platform-test,romain-li/edx-platform,nttks/edx-platform,synergeticsedx/deployment-wipro,ahmadiga/min_edx,alexthered/kienhoc-platform,pabloborrego93/edx-platform,morenopc/edx-platform,chand3040/cloud_that,nttks/jenkins-test,Softmotions/edx-platform,procangroup/edx-platform,stvstnfrd/edx-platform,Softmotions/edx-platform,Edraak/edx-platform,appliedx/edx-platform,gymnasium/edx-platform,motion2015/a3,chudaol/edx-platform,ampax/edx-platform-backup,hamzehd/edx-platform,nanolearning/edx-platform,IITBinterns13/edx-platform-dev,unicri/edx-platform,Kalyzee/edx-platform,jzoldak/edx-platform,analyseuc3m/ANALYSE-v1,adoosii/edx-platform,jamiefolsom/edx-platform,amir-qayyum-khan/edx-platform,pdehaye/theming-edx-platform,jbzdak/edx-platform,vismartltd/edx-platform,peterm-itr/edx-platform,mjirayu/sit_academy,UOMx/edx-platform,kmoocdev2/edx-platform,jamesblunt/edx-platform,morenopc/edx-platform,nanolearning/edx-platform,utecuy/edx-platform,bitifirefly/edx-platform,proversity-org/edx-platform,auferack08/edx-platform,DefyVentures/edx-platform,openfun/edx-platform,shubhdev/edxOnBaadal,martynovp/edx-platform,arifsetiawan/edx-platform,OmarIthawi/edx-platform,simbs/edx-platform,yokose-ks/edx-platform,cecep-edu/edx-platform,EDUlib/edx-platform,ferabra/edx-platform,proversity-org/edx-platform,jswope00/GAI,zadgroup/edx-platform,hkawasaki/kawasaki-aio8-2,longmen21/edx-platform,louyihua/edx-platform,IONISx/edx-platform,dcosentino/edx-platform,chudaol/edx-platform,SivilTaram/edx-platform,rue89-tech/edx-platform,jamiefolsom/edx-platform,auferack08/edx-platform,motion2015/edx-platform,edx-solutions/edx-platform,PepperPD/edx-pepper-platform,cpennington/edx-platform,valtech-mooc/edx-platform,playm2mboy/edx-platform,jazztpt/edx-platform,pomegranited/edx-platform,tanmaykm/edx-platform,TeachAtTUM/edx-platform,mcgachey/edx-platform,doganov/edx-platform,rationalAgent/edx-platform-custom,zhenzhai/edx-platform,jjmiranda/edx-platform,don-github/edx-platform,ESOedX/edx-platform,martynovp/edx-platform,kalebhartje/schoolboost,abdoosh00/edraak,tanmaykm/edx-platform,eduNEXT/edunext-platform,syjeon/new_edx,CourseTalk/edx-platform,inares/edx-platform,mushtaqak/edx-platform,vikas1885/test1,cyanna/edx-platform,abdoosh00/edx-rtl-final,kursitet/edx-platform,dsajkl/123,ahmadio/edx-platform,motion2015/edx-platform,IONISx/edx-platform,chand3040/cloud_that,Livit/Livit.Learn.EdX,jolyonb/edx-platform,shubhdev/edxOnBaadal,fly19890211/edx-platform,jruiperezv/ANALYSE,hmcmooc/muddx-platform,don-github/edx-platform,tiagochiavericosta/edx-platform,cyanna/edx-platform,amir-qayyum-khan/edx-platform,UOMx/edx-platform,eduNEXT/edx-platform,BehavioralInsightsTeam/edx-platform,SivilTaram/edx-platform,mitocw/edx-platform,sameetb-cuelogic/edx-platform-test,praveen-pal/edx-platform,torchingloom/edx-platform,jelugbo/tundex,y12uc231/edx-platform,appsembler/edx-platform,utecuy/edx-platform,UXE/local-edx,jswope00/GAI,hkawasaki/kawasaki-aio8-0,jamiefolsom/edx-platform,kamalx/edx-platform,antonve/s4-project-mooc,eemirtekin/edx-platform,auferack08/edx-platform,torchingloom/edx-platform,edry/edx-platform,shabab12/edx-platform,mahendra-r/edx-platform,CourseTalk/edx-platform,zhenzhai/edx-platform,eduNEXT/edx-platform,nikolas/edx-platform,AkA84/edx-platform,Kalyzee/edx-platform,BehavioralInsightsTeam/edx-platform,edry/edx-platform,OmarIthawi/edx-platform,prarthitm/edxplatform,unicri/edx-platform,franosincic/edx-platform,zubair-arbi/edx-platform,yokose-ks/edx-platform,olexiim/edx-platform,alexthered/kienhoc-platform,AkA84/edx-platform,defance/edx-platform,DNFcode/edx-platform,rationalAgent/edx-platform-custom,rismalrv/edx-platform,openfun/edx-platform,wwj718/edx-platform,valtech-mooc/edx-platform,wwj718/ANALYSE,WatanabeYasumasa/edx-platform,nanolearningllc/edx-platform-cypress-2,dcosentino/edx-platform,shabab12/edx-platform,jruiperezv/ANALYSE,hkawasaki/kawasaki-aio8-2,naresh21/synergetics-edx-platform,motion2015/edx-platform,kxliugang/edx-platform,Edraak/edx-platform,rationalAgent/edx-platform-custom,IONISx/edx-platform,dkarakats/edx-platform,hastexo/edx-platform,caesar2164/edx-platform,naresh21/synergetics-edx-platform,jonathan-beard/edx-platform,carsongee/edx-platform,angelapper/edx-platform,nanolearningllc/edx-platform-cypress,devs1991/test_edx_docmode,Edraak/edraak-platform,Semi-global/edx-platform,xuxiao19910803/edx-platform,hkawasaki/kawasaki-aio8-0,cselis86/edx-platform,jbzdak/edx-platform,mushtaqak/edx-platform,dcosentino/edx-platform,halvertoluke/edx-platform,raccoongang/edx-platform,Softmotions/edx-platform,rue89-tech/edx-platform,xinjiguaike/edx-platform,chand3040/cloud_that,mjirayu/sit_academy,shurihell/testasia,synergeticsedx/deployment-wipro,10clouds/edx-platform,mjg2203/edx-platform-seas,hastexo/edx-platform,playm2mboy/edx-platform,TsinghuaX/edx-platform,gsehub/edx-platform,vasyarv/edx-platform,ferabra/edx-platform,zadgroup/edx-platform,SivilTaram/edx-platform,praveen-pal/edx-platform,a-parhom/edx-platform,openfun/edx-platform,philanthropy-u/edx-platform,MakeHer/edx-platform,atsolakid/edx-platform,rhndg/openedx,doismellburning/edx-platform,IITBinterns13/edx-platform-dev,bitifirefly/edx-platform,fly19890211/edx-platform,edx/edx-platform,IONISx/edx-platform,knehez/edx-platform,cselis86/edx-platform,cecep-edu/edx-platform,procangroup/edx-platform,wwj718/edx-platform,nanolearningllc/edx-platform-cypress-2,Edraak/edx-platform,SivilTaram/edx-platform,apigee/edx-platform,inares/edx-platform,TsinghuaX/edx-platform,etzhou/edx-platform,beacloudgenius/edx-platform,analyseuc3m/ANALYSE-v1,a-parhom/edx-platform,ak2703/edx-platform,PepperPD/edx-pepper-platform,beacloudgenius/edx-platform,gsehub/edx-platform,proversity-org/edx-platform,apigee/edx-platform,kmoocdev2/edx-platform,EduPepperPD/pepper2013,jazkarta/edx-platform,IndonesiaX/edx-platform,nanolearningllc/edx-platform-cypress,zerobatu/edx-platform,arbrandes/edx-platform,Endika/edx-platform,bdero/edx-platform,shubhdev/openedx,waheedahmed/edx-platform,bigdatauniversity/edx-platform,knehez/edx-platform,Ayub-Khan/edx-platform,DefyVentures/edx-platform,ahmedaljazzar/edx-platform,iivic/BoiseStateX,hastexo/edx-platform,edx-solutions/edx-platform,ahmadiga/min_edx,shubhdev/edx-platform,playm2mboy/edx-platform,MakeHer/edx-platform,nikolas/edx-platform,morpheby/levelup-by,DefyVentures/edx-platform,jbzdak/edx-platform,jamesblunt/edx-platform,kalebhartje/schoolboost,SravanthiSinha/edx-platform,4eek/edx-platform,halvertoluke/edx-platform,DNFcode/edx-platform,longmen21/edx-platform,DNFcode/edx-platform,chrisndodge/edx-platform,unicri/edx-platform,rhndg/openedx,mtlchun/edx,knehez/edx-platform,jswope00/griffinx,a-parhom/edx-platform,atsolakid/edx-platform,gymnasium/edx-platform,chrisndodge/edx-platform,mjg2203/edx-platform-seas,mcgachey/edx-platform,hkawasaki/kawasaki-aio8-2,RPI-OPENEDX/edx-platform,Softmotions/edx-platform,rue89-tech/edx-platform,simbs/edx-platform,etzhou/edx-platform,jswope00/GAI,ubc/edx-platform,motion2015/edx-platform,Shrhawk/edx-platform,EDUlib/edx-platform,Ayub-Khan/edx-platform,jjmiranda/edx-platform,kamalx/edx-platform,wwj718/edx-platform,dkarakats/edx-platform,devs1991/test_edx_docmode,xuxiao19910803/edx,raccoongang/edx-platform,shabab12/edx-platform,nttks/jenkins-test,xinjiguaike/edx-platform,Livit/Livit.Learn.EdX,eemirtekin/edx-platform,apigee/edx-platform,CredoReference/edx-platform,beacloudgenius/edx-platform,teltek/edx-platform,appsembler/edx-platform,MSOpenTech/edx-platform,ampax/edx-platform-backup,polimediaupv/edx-platform,mjg2203/edx-platform-seas,eduNEXT/edx-platform,TsinghuaX/edx-platform,kxliugang/edx-platform,beni55/edx-platform,kmoocdev/edx-platform,cpennington/edx-platform,10clouds/edx-platform,pdehaye/theming-edx-platform,cselis86/edx-platform,nanolearningllc/edx-platform-cypress,EduPepperPD/pepper2013,edx/edx-platform,hkawasaki/kawasaki-aio8-1,msegado/edx-platform,antoviaque/edx-platform,antonve/s4-project-mooc,mbareta/edx-platform-ft,kmoocdev/edx-platform,morenopc/edx-platform,carsongee/edx-platform,cecep-edu/edx-platform,LICEF/edx-platform,pepeportela/edx-platform,EDUlib/edx-platform,simbs/edx-platform,ferabra/edx-platform,IONISx/edx-platform,vikas1885/test1,MSOpenTech/edx-platform,Edraak/circleci-edx-platform,TsinghuaX/edx-platform,xuxiao19910803/edx,mcgachey/edx-platform,benpatterson/edx-platform,J861449197/edx-platform,devs1991/test_edx_docmode,Softmotions/edx-platform,jamesblunt/edx-platform,jzoldak/edx-platform,antonve/s4-project-mooc,pelikanchik/edx-platform,J861449197/edx-platform,J861449197/edx-platform,pelikanchik/edx-platform,chauhanhardik/populo_2,amir-qayyum-khan/edx-platform,LICEF/edx-platform,jonathan-beard/edx-platform,tanmaykm/edx-platform,rationalAgent/edx-platform-custom,arifsetiawan/edx-platform,cognitiveclass/edx-platform,dsajkl/123,appliedx/edx-platform,kalebhartje/schoolboost,shashank971/edx-platform,vasyarv/edx-platform,pdehaye/theming-edx-platform,ZLLab-Mooc/edx-platform,hamzehd/edx-platform,antonve/s4-project-mooc,wwj718/ANALYSE,TeachAtTUM/edx-platform,dcosentino/edx-platform,atsolakid/edx-platform,synergeticsedx/deployment-wipro,Edraak/edraak-platform,leansoft/edx-platform,angelapper/edx-platform,jazztpt/edx-platform,caesar2164/edx-platform,4eek/edx-platform,chauhanhardik/populo,vikas1885/test1,chudaol/edx-platform,MakeHer/edx-platform,don-github/edx-platform,torchingloom/edx-platform,hmcmooc/muddx-platform,pku9104038/edx-platform,dkarakats/edx-platform,zhenzhai/edx-platform,andyzsf/edx,adoosii/edx-platform,martynovp/edx-platform,adoosii/edx-platform,nttks/edx-platform,hkawasaki/kawasaki-aio8-0,jazkarta/edx-platform-for-isc,Edraak/circleci-edx-platform,jbassen/edx-platform,shurihell/testasia,edx/edx-platform,jelugbo/tundex,dsajkl/reqiop,JioEducation/edx-platform,solashirai/edx-platform,romain-li/edx-platform,mtlchun/edx,CourseTalk/edx-platform,cpennington/edx-platform,zubair-arbi/edx-platform,andyzsf/edx,dcosentino/edx-platform,doismellburning/edx-platform,sameetb-cuelogic/edx-platform-test,y12uc231/edx-platform,chauhanhardik/populo_2,longmen21/edx-platform,mjirayu/sit_academy,ESOedX/edx-platform,dsajkl/123,4eek/edx-platform,defance/edx-platform,benpatterson/edx-platform,iivic/BoiseStateX,morpheby/levelup-by,zadgroup/edx-platform,motion2015/a3,franosincic/edx-platform,nttks/jenkins-test,hamzehd/edx-platform,cognitiveclass/edx-platform,MakeHer/edx-platform,benpatterson/edx-platform,RPI-OPENEDX/edx-platform,pomegranited/edx-platform,doismellburning/edx-platform,zhenzhai/edx-platform,hmcmooc/muddx-platform,nanolearningllc/edx-platform-cypress-2,UXE/local-edx,solashirai/edx-platform,xinjiguaike/edx-platform,OmarIthawi/edx-platform,defance/edx-platform,gsehub/edx-platform,nttks/jenkins-test,Lektorium-LLC/edx-platform,LICEF/edx-platform,bitifirefly/edx-platform,peterm-itr/edx-platform,pomegranited/edx-platform,jswope00/griffinx,vasyarv/edx-platform,nagyistoce/edx-platform,JCBarahona/edX,knehez/edx-platform,ahmedaljazzar/edx-platform,bdero/edx-platform,RPI-OPENEDX/edx-platform,mbareta/edx-platform-ft,lduarte1991/edx-platform,marcore/edx-platform,ubc/edx-platform,TeachAtTUM/edx-platform,torchingloom/edx-platform,jazkarta/edx-platform-for-isc,inares/edx-platform,dsajkl/123,philanthropy-u/edx-platform,utecuy/edx-platform,mushtaqak/edx-platform,andyzsf/edx,jjmiranda/edx-platform,ak2703/edx-platform,jolyonb/edx-platform,kursitet/edx-platform,kalebhartje/schoolboost,jonathan-beard/edx-platform,inares/edx-platform,yokose-ks/edx-platform,Semi-global/edx-platform,shashank971/edx-platform,iivic/BoiseStateX,mushtaqak/edx-platform,raccoongang/edx-platform,shashank971/edx-platform,rhndg/openedx,jswope00/griffinx,lduarte1991/edx-platform,shubhdev/edxOnBaadal,arifsetiawan/edx-platform,don-github/edx-platform,zerobatu/edx-platform,shashank971/edx-platform,msegado/edx-platform,caesar2164/edx-platform,AkA84/edx-platform,appliedx/edx-platform,polimediaupv/edx-platform,jswope00/griffinx,teltek/edx-platform,JioEducation/edx-platform,jamesblunt/edx-platform,shubhdev/openedx,10clouds/edx-platform,devs1991/test_edx_docmode,mjirayu/sit_academy,shubhdev/openedx,Shrhawk/edx-platform,romain-li/edx-platform,carsongee/edx-platform,etzhou/edx-platform,PepperPD/edx-pepper-platform,Endika/edx-platform,playm2mboy/edx-platform,10clouds/edx-platform,zerobatu/edx-platform,itsjeyd/edx-platform,y12uc231/edx-platform,deepsrijit1105/edx-platform,Ayub-Khan/edx-platform,morenopc/edx-platform,tiagochiavericosta/edx-platform,JCBarahona/edX,devs1991/test_edx_docmode,Edraak/edraak-platform,etzhou/edx-platform,Unow/edx-platform,rationalAgent/edx-platform-custom,iivic/BoiseStateX,mjg2203/edx-platform-seas,mtlchun/edx,pabloborrego93/edx-platform,sameetb-cuelogic/edx-platform-test,beacloudgenius/edx-platform,mahendra-r/edx-platform,jbassen/edx-platform,motion2015/edx-platform,jruiperezv/ANALYSE,ahmedaljazzar/edx-platform,jazkarta/edx-platform-for-isc,ampax/edx-platform-backup,ahmadio/edx-platform,alu042/edx-platform,jzoldak/edx-platform,atsolakid/edx-platform,xinjiguaike/edx-platform,cyanna/edx-platform,mitocw/edx-platform,zubair-arbi/edx-platform,nanolearning/edx-platform,longmen21/edx-platform,chauhanhardik/populo,pku9104038/edx-platform,jazztpt/edx-platform,kamalx/edx-platform,Kalyzee/edx-platform,CourseTalk/edx-platform,EduPepperPD/pepper2013,LearnEra/LearnEraPlaftform,jbzdak/edx-platform,stvstnfrd/edx-platform,jazkarta/edx-platform,Edraak/edx-platform,devs1991/test_edx_docmode,miptliot/edx-platform,kxliugang/edx-platform,prarthitm/edxplatform,itsjeyd/edx-platform,mitocw/edx-platform,solashirai/edx-platform,adoosii/edx-platform,eemirtekin/edx-platform,defance/edx-platform,hmcmooc/muddx-platform,jelugbo/tundex,olexiim/edx-platform,ahmadiga/min_edx,B-MOOC/edx-platform,arbrandes/edx-platform,peterm-itr/edx-platform,pepeportela/edx-platform,JCBarahona/edX,LearnEra/LearnEraPlaftform,fly19890211/edx-platform,louyihua/edx-platform,LearnEra/LearnEraPlaftform,hkawasaki/kawasaki-aio8-2,jazkarta/edx-platform-for-isc,eestay/edx-platform,IndonesiaX/edx-platform,motion2015/a3,doganov/edx-platform,nanolearningllc/edx-platform-cypress,Shrhawk/edx-platform,Edraak/edx-platform,leansoft/edx-platform,J861449197/edx-platform,louyihua/edx-platform,Ayub-Khan/edx-platform,Shrhawk/edx-platform,EduPepperPDTesting/pepper2013-testing,jzoldak/edx-platform,fintech-circle/edx-platform,kalebhartje/schoolboost,vismartltd/edx-platform,DNFcode/edx-platform,nagyistoce/edx-platform,kmoocdev2/edx-platform,jbassen/edx-platform,vismartltd/edx-platform,WatanabeYasumasa/edx-platform,Edraak/circleci-edx-platform,benpatterson/edx-platform,chudaol/edx-platform,marcore/edx-platform,alexthered/kienhoc-platform,Livit/Livit.Learn.EdX,pomegranited/edx-platform,doganov/edx-platform,jamesblunt/edx-platform,alu042/edx-platform,praveen-pal/edx-platform,Semi-global/edx-platform,EduPepperPD/pepper2013,RPI-OPENEDX/edx-platform,kmoocdev/edx-platform,Lektorium-LLC/edx-platform,beni55/edx-platform,gymnasium/edx-platform,kursitet/edx-platform,eduNEXT/edunext-platform,arifsetiawan/edx-platform,jjmiranda/edx-platform,naresh21/synergetics-edx-platform,Kalyzee/edx-platform,Endika/edx-platform,appliedx/edx-platform,Unow/edx-platform,jazztpt/edx-platform,EduPepperPDTesting/pepper2013-testing,Stanford-Online/edx-platform,JCBarahona/edX,sudheerchintala/LearnEraPlatForm,xuxiao19910803/edx,ahmadiga/min_edx,B-MOOC/edx-platform,ovnicraft/edx-platform,xinjiguaike/edx-platform,beni55/edx-platform,morenopc/edx-platform,miptliot/edx-platform,abdoosh00/edx-rtl-final,itsjeyd/edx-platform,ampax/edx-platform-backup,B-MOOC/edx-platform,hamzehd/edx-platform,pdehaye/theming-edx-platform,wwj718/edx-platform,nttks/edx-platform,LICEF/edx-platform,mbareta/edx-platform-ft,zhenzhai/edx-platform,abdoosh00/edraak,eestay/edx-platform,UXE/local-edx,morpheby/levelup-by,kmoocdev/edx-platform,mitocw/edx-platform,LICEF/edx-platform,antoviaque/edx-platform,martynovp/edx-platform,zubair-arbi/edx-platform,beni55/edx-platform,nanolearning/edx-platform,abdoosh00/edx-rtl-final,rue89-tech/edx-platform,pepeportela/edx-platform,shubhdev/edx-platform,dsajkl/reqiop,halvertoluke/edx-platform,analyseuc3m/ANALYSE-v1,MSOpenTech/edx-platform,Stanford-Online/edx-platform,valtech-mooc/edx-platform,shubhdev/edx-platform,EduPepperPDTesting/pepper2013-testing,doganov/edx-platform,sudheerchintala/LearnEraPlatForm,cselis86/edx-platform,abdoosh00/edx-rtl-final,ubc/edx-platform,mahendra-r/edx-platform,xuxiao19910803/edx-platform,vismartltd/edx-platform,pku9104038/edx-platform,nanolearningllc/edx-platform-cypress-2,chauhanhardik/populo,vikas1885/test1,rue89-tech/edx-platform,mahendra-r/edx-platform,shubhdev/openedx,motion2015/a3,hkawasaki/kawasaki-aio8-1,ampax/edx-platform-backup,antoviaque/edx-platform,ESOedX/edx-platform,RPI-OPENEDX/edx-platform,Edraak/circleci-edx-platform,prarthitm/edxplatform,edx-solutions/edx-platform,edx/edx-platform,cecep-edu/edx-platform,dkarakats/edx-platform,simbs/edx-platform,LearnEra/LearnEraPlaftform,naresh21/synergetics-edx-platform,yokose-ks/edx-platform,antonve/s4-project-mooc,tiagochiavericosta/edx-platform,Unow/edx-platform,ahmedaljazzar/edx-platform,eemirtekin/edx-platform,appsembler/edx-platform,Lektorium-LLC/edx-platform,EduPepperPDTesting/pepper2013-testing,ZLLab-Mooc/edx-platform,eestay/edx-platform,Endika/edx-platform,dsajkl/123,polimediaupv/edx-platform,shurihell/testasia,Ayub-Khan/edx-platform,deepsrijit1105/edx-platform,tanmaykm/edx-platform,cognitiveclass/edx-platform,eestay/edx-platform,mahendra-r/edx-platform,beni55/edx-platform,B-MOOC/edx-platform,ovnicraft/edx-platform,arbrandes/edx-platform,lduarte1991/edx-platform,dkarakats/edx-platform,franosincic/edx-platform,procangroup/edx-platform,leansoft/edx-platform,abdoosh00/edraak,chand3040/cloud_that,fintech-circle/edx-platform,SivilTaram/edx-platform,IndonesiaX/edx-platform,kmoocdev/edx-platform,motion2015/a3,kamalx/edx-platform,xuxiao19910803/edx,eduNEXT/edunext-platform,ferabra/edx-platform,CredoReference/edx-platform,y12uc231/edx-platform,unicri/edx-platform,jbassen/edx-platform,kursitet/edx-platform,ahmadio/edx-platform,shubhdev/edx-platform,ovnicraft/edx-platform,dsajkl/reqiop,DNFcode/edx-platform,adoosii/edx-platform,jamiefolsom/edx-platform,ahmadio/edx-platform,jazztpt/edx-platform,nttks/edx-platform,chauhanhardik/populo,teltek/edx-platform,benpatterson/edx-platform,ak2703/edx-platform,gsehub/edx-platform,OmarIthawi/edx-platform,msegado/edx-platform,analyseuc3m/ANALYSE-v1,msegado/edx-platform,eestay/edx-platform,chrisndodge/edx-platform,olexiim/edx-platform,devs1991/test_edx_docmode,knehez/edx-platform,cognitiveclass/edx-platform,bigdatauniversity/edx-platform,ZLLab-Mooc/edx-platform,MSOpenTech/edx-platform,chrisndodge/edx-platform,openfun/edx-platform,shubhdev/edx-platform,alu042/edx-platform,sudheerchintala/LearnEraPlatForm,don-github/edx-platform,pepeportela/edx-platform,olexiim/edx-platform,shashank971/edx-platform,polimediaupv/edx-platform,teltek/edx-platform,kursitet/edx-platform,angelapper/edx-platform,WatanabeYasumasa/edx-platform,halvertoluke/edx-platform,kxliugang/edx-platform,shubhdev/edxOnBaadal,amir-qayyum-khan/edx-platform,Livit/Livit.Learn.EdX,a-parhom/edx-platform,wwj718/ANALYSE,IITBinterns13/edx-platform-dev,JioEducation/edx-platform,y12uc231/edx-platform,jbzdak/edx-platform,wwj718/ANALYSE,ZLLab-Mooc/edx-platform,syjeon/new_edx,Shrhawk/edx-platform,rismalrv/edx-platform,zerobatu/edx-platform,rismalrv/edx-platform,iivic/BoiseStateX,SravanthiSinha/edx-platform,xuxiao19910803/edx-platform,jbassen/edx-platform,IndonesiaX/edx-platform,gymnasium/edx-platform,nikolas/edx-platform,jswope00/griffinx,waheedahmed/edx-platform,hkawasaki/kawasaki-aio8-1,xuxiao19910803/edx,eduNEXT/edunext-platform,auferack08/edx-platform,bdero/edx-platform,Kalyzee/edx-platform,ak2703/edx-platform,DefyVentures/edx-platform,nanolearningllc/edx-platform-cypress,appliedx/edx-platform,mtlchun/edx,abdoosh00/edraak,nikolas/edx-platform,chauhanhardik/populo_2,morpheby/levelup-by,proversity-org/edx-platform,marcore/edx-platform,jazkarta/edx-platform,romain-li/edx-platform,shubhdev/edxOnBaadal,UOMx/edx-platform,cselis86/edx-platform,bitifirefly/edx-platform,J861449197/edx-platform,fly19890211/edx-platform,nagyistoce/edx-platform,syjeon/new_edx,shubhdev/openedx,nttks/edx-platform,simbs/edx-platform,ampax/edx-platform,JCBarahona/edX,franosincic/edx-platform,antoviaque/edx-platform,Semi-global/edx-platform,martynovp/edx-platform,itsjeyd/edx-platform,openfun/edx-platform,jonathan-beard/edx-platform,Stanford-Online/edx-platform,inares/edx-platform,franosincic/edx-platform,PepperPD/edx-pepper-platform,angelapper/edx-platform,ZLLab-Mooc/edx-platform,waheedahmed/edx-platform,DefyVentures/edx-platform,synergeticsedx/deployment-wipro,polimediaupv/edx-platform,rismalrv/edx-platform,hamzehd/edx-platform,vismartltd/edx-platform,lduarte1991/edx-platform,philanthropy-u/edx-platform,ovnicraft/edx-platform,TeachAtTUM/edx-platform,miptliot/edx-platform,zofuthan/edx-platform,valtech-mooc/edx-platform,procangroup/edx-platform,doismellburning/edx-platform,dsajkl/reqiop,xingyepei/edx-platform,bitifirefly/edx-platform,louyihua/edx-platform,ESOedX/edx-platform,AkA84/edx-platform,cyanna/edx-platform,mjirayu/sit_academy,msegado/edx-platform,doismellburning/edx-platform,waheedahmed/edx-platform,solashirai/edx-platform,ampax/edx-platform,deepsrijit1105/edx-platform,chauhanhardik/populo,CredoReference/edx-platform,UOMx/edx-platform,leansoft/edx-platform,deepsrijit1105/edx-platform,Unow/edx-platform,BehavioralInsightsTeam/edx-platform,SravanthiSinha/edx-platform,zadgroup/edx-platform,MSOpenTech/edx-platform,carsongee/edx-platform,syjeon/new_edx,UXE/local-edx,torchingloom/edx-platform,B-MOOC/edx-platform,ubc/edx-platform,edry/edx-platform,EduPepperPDTesting/pepper2013-testing,jamiefolsom/edx-platform,nikolas/edx-platform,nanolearning/edx-platform,jolyonb/edx-platform,Lektorium-LLC/edx-platform,jswope00/GAI,atsolakid/edx-platform,bigdatauniversity/edx-platform,stvstnfrd/edx-platform,zadgroup/edx-platform,raccoongang/edx-platform,jruiperezv/ANALYSE,edry/edx-platform,wwj718/edx-platform,prarthitm/edxplatform,kxliugang/edx-platform,wwj718/ANALYSE,xingyepei/edx-platform,yokose-ks/edx-platform,stvstnfrd/edx-platform,xingyepei/edx-platform,nttks/jenkins-test,pabloborrego93/edx-platform,cognitiveclass/edx-platform,zerobatu/edx-platform,vikas1885/test1,alu042/edx-platform,edry/edx-platform,pelikanchik/edx-platform,mcgachey/edx-platform,utecuy/edx-platform,philanthropy-u/edx-platform,PepperPD/edx-pepper-platform
|
Add command to seed forum permissions to existing users
|
"""
This must be run only after seed_permissions_roles.py!
Creates default roles for all users currently in the database. Just runs through
Enrollments.
"""
from django.core.management.base import BaseCommand, CommandError
from student.models import CourseEnrollment
from django_comment_client.permissions import assign_default_role
class Command(BaseCommand):
args = 'course_id'
help = 'Seed default permisssions and roles'
def handle(self, *args, **options):
if len(args) != 0:
raise CommandError("This Command takes no arguments")
print "Updated roles for ",
for i, enrollment in enumerate(CourseEnrollment.objects.all(), start=1):
assign_default_role(None, enrollment)
if i % 1000 == 0:
print "{0}...".format(i),
print
|
<commit_before><commit_msg>Add command to seed forum permissions to existing users<commit_after>
|
"""
This must be run only after seed_permissions_roles.py!
Creates default roles for all users currently in the database. Just runs through
Enrollments.
"""
from django.core.management.base import BaseCommand, CommandError
from student.models import CourseEnrollment
from django_comment_client.permissions import assign_default_role
class Command(BaseCommand):
args = 'course_id'
help = 'Seed default permisssions and roles'
def handle(self, *args, **options):
if len(args) != 0:
raise CommandError("This Command takes no arguments")
print "Updated roles for ",
for i, enrollment in enumerate(CourseEnrollment.objects.all(), start=1):
assign_default_role(None, enrollment)
if i % 1000 == 0:
print "{0}...".format(i),
print
|
Add command to seed forum permissions to existing users"""
This must be run only after seed_permissions_roles.py!
Creates default roles for all users currently in the database. Just runs through
Enrollments.
"""
from django.core.management.base import BaseCommand, CommandError
from student.models import CourseEnrollment
from django_comment_client.permissions import assign_default_role
class Command(BaseCommand):
args = 'course_id'
help = 'Seed default permisssions and roles'
def handle(self, *args, **options):
if len(args) != 0:
raise CommandError("This Command takes no arguments")
print "Updated roles for ",
for i, enrollment in enumerate(CourseEnrollment.objects.all(), start=1):
assign_default_role(None, enrollment)
if i % 1000 == 0:
print "{0}...".format(i),
print
|
<commit_before><commit_msg>Add command to seed forum permissions to existing users<commit_after>"""
This must be run only after seed_permissions_roles.py!
Creates default roles for all users currently in the database. Just runs through
Enrollments.
"""
from django.core.management.base import BaseCommand, CommandError
from student.models import CourseEnrollment
from django_comment_client.permissions import assign_default_role
class Command(BaseCommand):
args = 'course_id'
help = 'Seed default permisssions and roles'
def handle(self, *args, **options):
if len(args) != 0:
raise CommandError("This Command takes no arguments")
print "Updated roles for ",
for i, enrollment in enumerate(CourseEnrollment.objects.all(), start=1):
assign_default_role(None, enrollment)
if i % 1000 == 0:
print "{0}...".format(i),
print
|
|
7b4b07dddccfe11dd42aa3613c68acc2939bef41
|
src/screenquota.py
|
src/screenquota.py
|
#!/usr/bin/env python
import cec # from python-cec
import os.path
import time
cec.init()
for adress, device in cec.list_devices().items():
print device.address, device.vendor, device.osd_string
tv = cec.Device(0)
while True:
if os.path.exists('/var/opt/screendisable/disable'):
try:
print tv.standby()
except:
print 'Reinitialising'
cec.init()
tv = cec.Device(0)
time.sleep(2)
|
Disable tv through raspberry pi's cec interface.
|
Disable tv through raspberry pi's cec interface.
|
Python
|
mit
|
vlimit/screenquota,vlimit/screenquota
|
Disable tv through raspberry pi's cec interface.
|
#!/usr/bin/env python
import cec # from python-cec
import os.path
import time
cec.init()
for adress, device in cec.list_devices().items():
print device.address, device.vendor, device.osd_string
tv = cec.Device(0)
while True:
if os.path.exists('/var/opt/screendisable/disable'):
try:
print tv.standby()
except:
print 'Reinitialising'
cec.init()
tv = cec.Device(0)
time.sleep(2)
|
<commit_before><commit_msg>Disable tv through raspberry pi's cec interface.<commit_after>
|
#!/usr/bin/env python
import cec # from python-cec
import os.path
import time
cec.init()
for adress, device in cec.list_devices().items():
print device.address, device.vendor, device.osd_string
tv = cec.Device(0)
while True:
if os.path.exists('/var/opt/screendisable/disable'):
try:
print tv.standby()
except:
print 'Reinitialising'
cec.init()
tv = cec.Device(0)
time.sleep(2)
|
Disable tv through raspberry pi's cec interface.#!/usr/bin/env python
import cec # from python-cec
import os.path
import time
cec.init()
for adress, device in cec.list_devices().items():
print device.address, device.vendor, device.osd_string
tv = cec.Device(0)
while True:
if os.path.exists('/var/opt/screendisable/disable'):
try:
print tv.standby()
except:
print 'Reinitialising'
cec.init()
tv = cec.Device(0)
time.sleep(2)
|
<commit_before><commit_msg>Disable tv through raspberry pi's cec interface.<commit_after>#!/usr/bin/env python
import cec # from python-cec
import os.path
import time
cec.init()
for adress, device in cec.list_devices().items():
print device.address, device.vendor, device.osd_string
tv = cec.Device(0)
while True:
if os.path.exists('/var/opt/screendisable/disable'):
try:
print tv.standby()
except:
print 'Reinitialising'
cec.init()
tv = cec.Device(0)
time.sleep(2)
|
|
d3ab1bb80945f8ebf6c16620e5629f497bdc5a17
|
violations/tests/test_dummy.py
|
violations/tests/test_dummy.py
|
from django.test import TestCase
from tasks.const import STATUS_SUCCESS
from ..dummy import dummy_violation
class DummyViolationCase(TestCase):
"""Dummy violation case"""
def test_result(self):
"""Test dummy violation result"""
data = {'raw': ''}
result = dummy_violation(data)
self.assertEqual(result['status'], STATUS_SUCCESS)
|
Add test for dummy violation
|
Add test for dummy violation
|
Python
|
mit
|
nvbn/coviolations_web,nvbn/coviolations_web
|
Add test for dummy violation
|
from django.test import TestCase
from tasks.const import STATUS_SUCCESS
from ..dummy import dummy_violation
class DummyViolationCase(TestCase):
"""Dummy violation case"""
def test_result(self):
"""Test dummy violation result"""
data = {'raw': ''}
result = dummy_violation(data)
self.assertEqual(result['status'], STATUS_SUCCESS)
|
<commit_before><commit_msg>Add test for dummy violation<commit_after>
|
from django.test import TestCase
from tasks.const import STATUS_SUCCESS
from ..dummy import dummy_violation
class DummyViolationCase(TestCase):
"""Dummy violation case"""
def test_result(self):
"""Test dummy violation result"""
data = {'raw': ''}
result = dummy_violation(data)
self.assertEqual(result['status'], STATUS_SUCCESS)
|
Add test for dummy violationfrom django.test import TestCase
from tasks.const import STATUS_SUCCESS
from ..dummy import dummy_violation
class DummyViolationCase(TestCase):
"""Dummy violation case"""
def test_result(self):
"""Test dummy violation result"""
data = {'raw': ''}
result = dummy_violation(data)
self.assertEqual(result['status'], STATUS_SUCCESS)
|
<commit_before><commit_msg>Add test for dummy violation<commit_after>from django.test import TestCase
from tasks.const import STATUS_SUCCESS
from ..dummy import dummy_violation
class DummyViolationCase(TestCase):
"""Dummy violation case"""
def test_result(self):
"""Test dummy violation result"""
data = {'raw': ''}
result = dummy_violation(data)
self.assertEqual(result['status'], STATUS_SUCCESS)
|
|
21c60fdd99e37436228cfe8e59f1b8788ea2b58b
|
platformio/builder/tools/pioar.py
|
platformio/builder/tools/pioar.py
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
import atexit
from os import remove
from tempfile import mkstemp
MAX_SOURCES_LENGTH = 8000 # Windows CLI has limit with command length to 8192
def _remove_tmpfile(path):
try:
remove(path)
except WindowsError:
pass
def _huge_sources_hook(sources):
if len(str(sources)) < MAX_SOURCES_LENGTH:
return sources
_, tmp_file = mkstemp()
with open(tmp_file, "w") as f:
f.write(str(sources).replace("\\", "/"))
atexit.register(_remove_tmpfile, tmp_file)
return "@%s" % tmp_file
def exists(_):
return True
def generate(env):
env.Replace(
_huge_sources_hook=_huge_sources_hook,
ARCOM=env.get("ARCOM", "").replace(
"$SOURCES", "${_huge_sources_hook(SOURCES)}"))
return env
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
import atexit
from os import remove
from tempfile import mkstemp
MAX_SOURCES_LENGTH = 8000 # Windows CLI has limit with command length to 8192
def _remove_tmpfile(path):
try:
remove(path)
except WindowsError: # pylint: disable=E0602
pass
def _huge_sources_hook(sources):
if len(str(sources)) < MAX_SOURCES_LENGTH:
return sources
_, tmp_file = mkstemp()
with open(tmp_file, "w") as f:
f.write(str(sources).replace("\\", "/"))
atexit.register(_remove_tmpfile, tmp_file)
return "@%s" % tmp_file
def exists(_):
return True
def generate(env):
env.Replace(
_huge_sources_hook=_huge_sources_hook,
ARCOM=env.get("ARCOM", "").replace(
"$SOURCES", "${_huge_sources_hook(SOURCES)}"))
return env
|
Hide PyLint warning with undefined WindowsError exception
|
Hide PyLint warning with undefined WindowsError exception
|
Python
|
apache-2.0
|
dkuku/platformio,mseroczynski/platformio,jrobeson/platformio,bkudria/platformio,awong1900/platformio,TimJay/platformio,TimJay/platformio,jrobeson/platformio,awong1900/platformio,awong1900/platformio,mcanthony/platformio,TimJay/platformio,ZachMassia/platformio,platformio/platformio-core,TimJay/platformio,platformio/platformio-core,jrobeson/platformio,platformio/platformio,valeros/platformio,jrobeson/platformio,bkudria/platformio,TimJay/platformio,bkudria/platformio,mplewis/platformio,bkudria/platformio,eiginn/platformio,atyenoria/platformio
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
import atexit
from os import remove
from tempfile import mkstemp
MAX_SOURCES_LENGTH = 8000 # Windows CLI has limit with command length to 8192
def _remove_tmpfile(path):
try:
remove(path)
except WindowsError:
pass
def _huge_sources_hook(sources):
if len(str(sources)) < MAX_SOURCES_LENGTH:
return sources
_, tmp_file = mkstemp()
with open(tmp_file, "w") as f:
f.write(str(sources).replace("\\", "/"))
atexit.register(_remove_tmpfile, tmp_file)
return "@%s" % tmp_file
def exists(_):
return True
def generate(env):
env.Replace(
_huge_sources_hook=_huge_sources_hook,
ARCOM=env.get("ARCOM", "").replace(
"$SOURCES", "${_huge_sources_hook(SOURCES)}"))
return env
Hide PyLint warning with undefined WindowsError exception
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
import atexit
from os import remove
from tempfile import mkstemp
MAX_SOURCES_LENGTH = 8000 # Windows CLI has limit with command length to 8192
def _remove_tmpfile(path):
try:
remove(path)
except WindowsError: # pylint: disable=E0602
pass
def _huge_sources_hook(sources):
if len(str(sources)) < MAX_SOURCES_LENGTH:
return sources
_, tmp_file = mkstemp()
with open(tmp_file, "w") as f:
f.write(str(sources).replace("\\", "/"))
atexit.register(_remove_tmpfile, tmp_file)
return "@%s" % tmp_file
def exists(_):
return True
def generate(env):
env.Replace(
_huge_sources_hook=_huge_sources_hook,
ARCOM=env.get("ARCOM", "").replace(
"$SOURCES", "${_huge_sources_hook(SOURCES)}"))
return env
|
<commit_before># Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
import atexit
from os import remove
from tempfile import mkstemp
MAX_SOURCES_LENGTH = 8000 # Windows CLI has limit with command length to 8192
def _remove_tmpfile(path):
try:
remove(path)
except WindowsError:
pass
def _huge_sources_hook(sources):
if len(str(sources)) < MAX_SOURCES_LENGTH:
return sources
_, tmp_file = mkstemp()
with open(tmp_file, "w") as f:
f.write(str(sources).replace("\\", "/"))
atexit.register(_remove_tmpfile, tmp_file)
return "@%s" % tmp_file
def exists(_):
return True
def generate(env):
env.Replace(
_huge_sources_hook=_huge_sources_hook,
ARCOM=env.get("ARCOM", "").replace(
"$SOURCES", "${_huge_sources_hook(SOURCES)}"))
return env
<commit_msg>Hide PyLint warning with undefined WindowsError exception<commit_after>
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
import atexit
from os import remove
from tempfile import mkstemp
MAX_SOURCES_LENGTH = 8000 # Windows CLI has limit with command length to 8192
def _remove_tmpfile(path):
try:
remove(path)
except WindowsError: # pylint: disable=E0602
pass
def _huge_sources_hook(sources):
if len(str(sources)) < MAX_SOURCES_LENGTH:
return sources
_, tmp_file = mkstemp()
with open(tmp_file, "w") as f:
f.write(str(sources).replace("\\", "/"))
atexit.register(_remove_tmpfile, tmp_file)
return "@%s" % tmp_file
def exists(_):
return True
def generate(env):
env.Replace(
_huge_sources_hook=_huge_sources_hook,
ARCOM=env.get("ARCOM", "").replace(
"$SOURCES", "${_huge_sources_hook(SOURCES)}"))
return env
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
import atexit
from os import remove
from tempfile import mkstemp
MAX_SOURCES_LENGTH = 8000 # Windows CLI has limit with command length to 8192
def _remove_tmpfile(path):
try:
remove(path)
except WindowsError:
pass
def _huge_sources_hook(sources):
if len(str(sources)) < MAX_SOURCES_LENGTH:
return sources
_, tmp_file = mkstemp()
with open(tmp_file, "w") as f:
f.write(str(sources).replace("\\", "/"))
atexit.register(_remove_tmpfile, tmp_file)
return "@%s" % tmp_file
def exists(_):
return True
def generate(env):
env.Replace(
_huge_sources_hook=_huge_sources_hook,
ARCOM=env.get("ARCOM", "").replace(
"$SOURCES", "${_huge_sources_hook(SOURCES)}"))
return env
Hide PyLint warning with undefined WindowsError exception# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
import atexit
from os import remove
from tempfile import mkstemp
MAX_SOURCES_LENGTH = 8000 # Windows CLI has limit with command length to 8192
def _remove_tmpfile(path):
try:
remove(path)
except WindowsError: # pylint: disable=E0602
pass
def _huge_sources_hook(sources):
if len(str(sources)) < MAX_SOURCES_LENGTH:
return sources
_, tmp_file = mkstemp()
with open(tmp_file, "w") as f:
f.write(str(sources).replace("\\", "/"))
atexit.register(_remove_tmpfile, tmp_file)
return "@%s" % tmp_file
def exists(_):
return True
def generate(env):
env.Replace(
_huge_sources_hook=_huge_sources_hook,
ARCOM=env.get("ARCOM", "").replace(
"$SOURCES", "${_huge_sources_hook(SOURCES)}"))
return env
|
<commit_before># Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
import atexit
from os import remove
from tempfile import mkstemp
MAX_SOURCES_LENGTH = 8000 # Windows CLI has limit with command length to 8192
def _remove_tmpfile(path):
try:
remove(path)
except WindowsError:
pass
def _huge_sources_hook(sources):
if len(str(sources)) < MAX_SOURCES_LENGTH:
return sources
_, tmp_file = mkstemp()
with open(tmp_file, "w") as f:
f.write(str(sources).replace("\\", "/"))
atexit.register(_remove_tmpfile, tmp_file)
return "@%s" % tmp_file
def exists(_):
return True
def generate(env):
env.Replace(
_huge_sources_hook=_huge_sources_hook,
ARCOM=env.get("ARCOM", "").replace(
"$SOURCES", "${_huge_sources_hook(SOURCES)}"))
return env
<commit_msg>Hide PyLint warning with undefined WindowsError exception<commit_after># Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
import atexit
from os import remove
from tempfile import mkstemp
MAX_SOURCES_LENGTH = 8000 # Windows CLI has limit with command length to 8192
def _remove_tmpfile(path):
try:
remove(path)
except WindowsError: # pylint: disable=E0602
pass
def _huge_sources_hook(sources):
if len(str(sources)) < MAX_SOURCES_LENGTH:
return sources
_, tmp_file = mkstemp()
with open(tmp_file, "w") as f:
f.write(str(sources).replace("\\", "/"))
atexit.register(_remove_tmpfile, tmp_file)
return "@%s" % tmp_file
def exists(_):
return True
def generate(env):
env.Replace(
_huge_sources_hook=_huge_sources_hook,
ARCOM=env.get("ARCOM", "").replace(
"$SOURCES", "${_huge_sources_hook(SOURCES)}"))
return env
|
8e59e994370749db03ffda32bc449049c49a3f22
|
python/example_code/sqs/change_visibility.py
|
python/example_code/sqs/change_visibility.py
|
# Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import boto3
# Create SQS client
sqs = boto3.client('sqs')
queue_url = 'SQS_QUEUE_URL'
# Receive message from SQS queue
response = sqs.receive_message(
QueueUrl=queue_url,
AttributeNames=[
'SentTimestamp'
],
MaxNumberOfMessages=1,
MessageAttributeNames=[
'All'
],
)
message = response['Messages'][0]
receipt_handle = message['ReceiptHandle']
# Change visibility timeout of message from queue
sqs.change_message_visibility(
QueueUrl=queue_url,
ReceiptHandle=receipt_handle,
VisibilityTimeout=36000
)
print('Received and changed visibilty timeout of message: %s' % message)
|
Add SQS visibility timeout message example
|
Add SQS visibility timeout message example
|
Python
|
apache-2.0
|
awsdocs/aws-doc-sdk-examples,imshashank/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,imshashank/aws-doc-sdk-examples,imshashank/aws-doc-sdk-examples,imshashank/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,imshashank/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,imshashank/aws-doc-sdk-examples,imshashank/aws-doc-sdk-examples,imshashank/aws-doc-sdk-examples,imshashank/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples,awsdocs/aws-doc-sdk-examples
|
Add SQS visibility timeout message example
|
# Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import boto3
# Create SQS client
sqs = boto3.client('sqs')
queue_url = 'SQS_QUEUE_URL'
# Receive message from SQS queue
response = sqs.receive_message(
QueueUrl=queue_url,
AttributeNames=[
'SentTimestamp'
],
MaxNumberOfMessages=1,
MessageAttributeNames=[
'All'
],
)
message = response['Messages'][0]
receipt_handle = message['ReceiptHandle']
# Change visibility timeout of message from queue
sqs.change_message_visibility(
QueueUrl=queue_url,
ReceiptHandle=receipt_handle,
VisibilityTimeout=36000
)
print('Received and changed visibilty timeout of message: %s' % message)
|
<commit_before><commit_msg>Add SQS visibility timeout message example<commit_after>
|
# Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import boto3
# Create SQS client
sqs = boto3.client('sqs')
queue_url = 'SQS_QUEUE_URL'
# Receive message from SQS queue
response = sqs.receive_message(
QueueUrl=queue_url,
AttributeNames=[
'SentTimestamp'
],
MaxNumberOfMessages=1,
MessageAttributeNames=[
'All'
],
)
message = response['Messages'][0]
receipt_handle = message['ReceiptHandle']
# Change visibility timeout of message from queue
sqs.change_message_visibility(
QueueUrl=queue_url,
ReceiptHandle=receipt_handle,
VisibilityTimeout=36000
)
print('Received and changed visibilty timeout of message: %s' % message)
|
Add SQS visibility timeout message example# Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import boto3
# Create SQS client
sqs = boto3.client('sqs')
queue_url = 'SQS_QUEUE_URL'
# Receive message from SQS queue
response = sqs.receive_message(
QueueUrl=queue_url,
AttributeNames=[
'SentTimestamp'
],
MaxNumberOfMessages=1,
MessageAttributeNames=[
'All'
],
)
message = response['Messages'][0]
receipt_handle = message['ReceiptHandle']
# Change visibility timeout of message from queue
sqs.change_message_visibility(
QueueUrl=queue_url,
ReceiptHandle=receipt_handle,
VisibilityTimeout=36000
)
print('Received and changed visibilty timeout of message: %s' % message)
|
<commit_before><commit_msg>Add SQS visibility timeout message example<commit_after># Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import boto3
# Create SQS client
sqs = boto3.client('sqs')
queue_url = 'SQS_QUEUE_URL'
# Receive message from SQS queue
response = sqs.receive_message(
QueueUrl=queue_url,
AttributeNames=[
'SentTimestamp'
],
MaxNumberOfMessages=1,
MessageAttributeNames=[
'All'
],
)
message = response['Messages'][0]
receipt_handle = message['ReceiptHandle']
# Change visibility timeout of message from queue
sqs.change_message_visibility(
QueueUrl=queue_url,
ReceiptHandle=receipt_handle,
VisibilityTimeout=36000
)
print('Received and changed visibilty timeout of message: %s' % message)
|
|
a88d9181497ac18a3905e33664e1642e68b192f8
|
reports/migrations/0003_indonesia-weights.py
|
reports/migrations/0003_indonesia-weights.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django_countries import countries
def populate_weights(apps, schema_editor):
Weights = apps.get_model("reports", "Weights")
db_alias = schema_editor.connection.alias
for item in COUNTRY_WEIGHTS:
country = item['Country']
item.pop('Country')
for media_type, weight in item.iteritems():
w = Weights.objects.using(db_alias).create(
country=country,
media_type=media_type,
weight=weight)
w.save()
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('reports', '0002_populate_weights'),
]
operations = [
migrations.RunPython(
populate_weights,
backwards,
),
]
COUNTRY_WEIGHTS= [
{'Country': 'ID',
'Internet': '0',
'Print': '11',
'Radio': '1',
'Television': '7',
'Twitter': '0'}]
|
Add Indonesia weights as migration
|
Add Indonesia weights as migration
|
Python
|
apache-2.0
|
Code4SA/gmmp,Code4SA/gmmp,Code4SA/gmmp
|
Add Indonesia weights as migration
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django_countries import countries
def populate_weights(apps, schema_editor):
Weights = apps.get_model("reports", "Weights")
db_alias = schema_editor.connection.alias
for item in COUNTRY_WEIGHTS:
country = item['Country']
item.pop('Country')
for media_type, weight in item.iteritems():
w = Weights.objects.using(db_alias).create(
country=country,
media_type=media_type,
weight=weight)
w.save()
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('reports', '0002_populate_weights'),
]
operations = [
migrations.RunPython(
populate_weights,
backwards,
),
]
COUNTRY_WEIGHTS= [
{'Country': 'ID',
'Internet': '0',
'Print': '11',
'Radio': '1',
'Television': '7',
'Twitter': '0'}]
|
<commit_before><commit_msg>Add Indonesia weights as migration<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django_countries import countries
def populate_weights(apps, schema_editor):
Weights = apps.get_model("reports", "Weights")
db_alias = schema_editor.connection.alias
for item in COUNTRY_WEIGHTS:
country = item['Country']
item.pop('Country')
for media_type, weight in item.iteritems():
w = Weights.objects.using(db_alias).create(
country=country,
media_type=media_type,
weight=weight)
w.save()
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('reports', '0002_populate_weights'),
]
operations = [
migrations.RunPython(
populate_weights,
backwards,
),
]
COUNTRY_WEIGHTS= [
{'Country': 'ID',
'Internet': '0',
'Print': '11',
'Radio': '1',
'Television': '7',
'Twitter': '0'}]
|
Add Indonesia weights as migration# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django_countries import countries
def populate_weights(apps, schema_editor):
Weights = apps.get_model("reports", "Weights")
db_alias = schema_editor.connection.alias
for item in COUNTRY_WEIGHTS:
country = item['Country']
item.pop('Country')
for media_type, weight in item.iteritems():
w = Weights.objects.using(db_alias).create(
country=country,
media_type=media_type,
weight=weight)
w.save()
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('reports', '0002_populate_weights'),
]
operations = [
migrations.RunPython(
populate_weights,
backwards,
),
]
COUNTRY_WEIGHTS= [
{'Country': 'ID',
'Internet': '0',
'Print': '11',
'Radio': '1',
'Television': '7',
'Twitter': '0'}]
|
<commit_before><commit_msg>Add Indonesia weights as migration<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django_countries import countries
def populate_weights(apps, schema_editor):
Weights = apps.get_model("reports", "Weights")
db_alias = schema_editor.connection.alias
for item in COUNTRY_WEIGHTS:
country = item['Country']
item.pop('Country')
for media_type, weight in item.iteritems():
w = Weights.objects.using(db_alias).create(
country=country,
media_type=media_type,
weight=weight)
w.save()
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('reports', '0002_populate_weights'),
]
operations = [
migrations.RunPython(
populate_weights,
backwards,
),
]
COUNTRY_WEIGHTS= [
{'Country': 'ID',
'Internet': '0',
'Print': '11',
'Radio': '1',
'Television': '7',
'Twitter': '0'}]
|
|
903b448a5a6e8eb882fbfa7cb48fa9e17d7c21af
|
tests/test_main.py
|
tests/test_main.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twine import __main__ as dunder_main
import pretend
def test_exception_handling(monkeypatch):
replaced_dispatch = pretend.raiser(KeyError('foo'))
monkeypatch.setattr(dunder_main, 'dispatch', replaced_dispatch)
assert dunder_main.main() == 'KeyError: foo'
|
Add tests around our new handling of exceptions
|
Add tests around our new handling of exceptions
|
Python
|
apache-2.0
|
pypa/twine
|
Add tests around our new handling of exceptions
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twine import __main__ as dunder_main
import pretend
def test_exception_handling(monkeypatch):
replaced_dispatch = pretend.raiser(KeyError('foo'))
monkeypatch.setattr(dunder_main, 'dispatch', replaced_dispatch)
assert dunder_main.main() == 'KeyError: foo'
|
<commit_before><commit_msg>Add tests around our new handling of exceptions<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twine import __main__ as dunder_main
import pretend
def test_exception_handling(monkeypatch):
replaced_dispatch = pretend.raiser(KeyError('foo'))
monkeypatch.setattr(dunder_main, 'dispatch', replaced_dispatch)
assert dunder_main.main() == 'KeyError: foo'
|
Add tests around our new handling of exceptions# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twine import __main__ as dunder_main
import pretend
def test_exception_handling(monkeypatch):
replaced_dispatch = pretend.raiser(KeyError('foo'))
monkeypatch.setattr(dunder_main, 'dispatch', replaced_dispatch)
assert dunder_main.main() == 'KeyError: foo'
|
<commit_before><commit_msg>Add tests around our new handling of exceptions<commit_after># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twine import __main__ as dunder_main
import pretend
def test_exception_handling(monkeypatch):
replaced_dispatch = pretend.raiser(KeyError('foo'))
monkeypatch.setattr(dunder_main, 'dispatch', replaced_dispatch)
assert dunder_main.main() == 'KeyError: foo'
|
|
3b684eeadb0c8b39593b14c15233a314bbab0895
|
troposphere/sns.py
|
troposphere/sns.py
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'RedrivePolicy': (dict, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
|
Update SNS per 2019-11-21 changes
|
Update SNS per 2019-11-21 changes
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,ikben/troposphere,ikben/troposphere,cloudtools/troposphere
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
Update SNS per 2019-11-21 changes
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'RedrivePolicy': (dict, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
|
<commit_before># Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
<commit_msg>Update SNS per 2019-11-21 changes<commit_after>
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'RedrivePolicy': (dict, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
|
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
Update SNS per 2019-11-21 changes# Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'RedrivePolicy': (dict, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
|
<commit_before># Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
<commit_msg>Update SNS per 2019-11-21 changes<commit_after># Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'RedrivePolicy': (dict, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.