code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
"""List iSCSI Snapshots."""
# :license: MIT, see LICENSE for more details.
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import formatting
from SoftLayer.CLI import helpers
from SoftLayer import utils
import click
@click.command()
@click.argument('iscsi-identifier')
@environment.pass_env
def cli(env, iscsi_identifier):
"""List iSCSI Snapshots."""
iscsi_mgr = SoftLayer.ISCSIManager(env.client)
iscsi_id = helpers.resolve_id(iscsi_mgr.resolve_ids,
iscsi_identifier,
'iSCSI')
iscsi = env.client['Network_Storage_Iscsi']
snapshots = iscsi.getPartnerships(
mask='volumeId,partnerVolumeId,createDate,type', id=iscsi_id)
snapshots = [utils.NestedDict(n) for n in snapshots]
table = formatting.Table(['id', 'createDate', 'name', 'description'])
for snapshot in snapshots:
table.add_row([
snapshot['partnerVolumeId'],
snapshot['createDate'],
snapshot['type']['name'],
snapshot['type']['description'],
])
return table
|
cloudify-cosmo/softlayer-python
|
SoftLayer/CLI/snapshot/list.py
|
Python
|
mit
| 1,122
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This code is the correct implementation about constant.
"""
from const.base import Base
class Apple(Base):
__attrs__ = ["id", "name"]
def __init__(self, id=None, name=None):
self.id = id
self.name = name
class Banana(Base):
__attrs__ = ["id", "name"]
def __init__(self, id=None, name=None):
self.id = id
self.name = name
class Fruit(Base):
__attrs__ = ["id", "name"]
def __init__(self, id=None, name=None):
self.id = id
self.name = name
class Meat(Base):
__attrs__ = ["id", "name"]
def __init__(self, id=None, name=None):
self.id = id
self.name = name
class Food(Base):
__attrs__ = ["id", "name"]
def __init__(self, id=None, name=None):
self.id = id
self.name = name
# Apple
apple_name____red_apple = Apple(id=1, name="red apple")
apple_name____green_apple = Apple(id=2, name="green apple")
class AppleCol(Fruit):
_collection = list()
_collection.append(apple_name____red_apple)
name____red_apple = apple_name____red_apple
_collection.append(apple_name____green_apple)
name____green_apple = apple_name____green_apple
# Banana
banana_name____yellow_banana = Banana(id=1, name="yellow banana")
banana_name____green_banana = Banana(id=2, name="green banana")
class BananaCol(Fruit):
_collection = list()
_collection.append(banana_name____yellow_banana)
name____yellow_banana = banana_name____yellow_banana
_collection.append(banana_name____green_banana)
name____green_banana = banana_name____green_banana
# Fruit
fruit_name____Apple = AppleCol(id=1, name="Apple")
fruit_name____Banana = BananaCol(id=2, name="Banana")
class FruitCol(Food):
_collection = list()
_collection.append(fruit_name____Apple)
name____Apple = fruit_name____Apple
_collection.append(fruit_name____Banana)
name____Banana = fruit_name____Banana
# Meat
meat_name____Pork = Meat(id=1, name='Pork')
meat_name____Beef = Meat(id=2, name='Beef')
class MeatCol(Food):
_collection = list()
_collection.append(meat_name____Pork)
name____Pork = meat_name____Pork
_collection.append(meat_name____Beef)
name____Beef = meat_name____Beef
# Food
food_name____Fruit = FruitCol(id=1, name="Fruit")
food_name____Meat = MeatCol(id=2, name="Meat")
class FoodCol(Base):
_collection = list()
_collection.append(food_name____Fruit)
name____Fruit = food_name____Fruit
_collection.append(food_name____Meat)
name____Meat = food_name____Meat
food_col = FoodCol()
|
MacHu-GWU/constant-project
|
constant/test/dev/food_example.py
|
Python
|
mit
| 2,617
|
# coding=utf-8
import glob
import os
class File(object):
def __init__(self, path):
self.original = path
self.abspath = os.path.abspath(path)
def __str__(self):
prefix = ''
if self.isfile:
prefix = 'file: '
elif self.isdir:
prefix = 'dir: '
return prefix + self.original
def dir_required(self):
if not self.isdir:
raise ValueError('Only dir is supported for this operation.')
def file_required(self):
if not self.isfile:
raise ValueError('Only file is supported for this operation.')
@staticmethod
def join(path, *paths):
return os.path.join(path, *paths)
@property
def name(self, without_ext=False):
return os.path.basename(self.abspath)
@property
def name_without_ext(self):
basename = os.path.basename(self.abspath)
return os.path.splitext(basename)[0]
@property
def ext(self):
return os.path.splitext(self.abspath)[1]
@property
def isfile(self):
return os.path.isfile(self.abspath)
@property
def isdir(self):
return os.path.isdir(self.abspath)
@property
def exists(self):
return os.path.exists(self.abspath)
def find(self, pattern='*'):
self.dir_required()
wd = os.path.realpath(self.abspath)
return [File(f) for f in glob.glob(os.path.join(wd, pattern))]
def subdirs(self):
self.dir_required()
return [f for f in self.find() if f.isdir]
def files(self, pattern='*'):
self.dir_required()
return [f for f in self.find(pattern) if f.isfile]
def create_if_not_exists(self):
if not self.exists:
os.makedirs(self.abspath)
def remove(self):
if self.isdir:
os.removedirs(self.abspath)
else:
os.remove(self.abspath)
def write(self, s, mode='w', encoding='utf-8'):
with open(self.abspath, mode=mode, encoding=encoding) as f:
f.write(s)
def writelines(self, lines, mode='w', encoding='utf-8'):
with open(self.abspath, mode=mode, encoding=encoding) as f:
f.writelines(lines)
def append(self, s):
self.write(s, 'a')
def appendlines(self, lines):
self.writelines(lines, 'a')
def readlines(self, mode='r', encoding='utf-8'):
with open(self.abspath, mode, encoding=encoding) as f:
for line in f:
yield line
# read json
# write json
# pickle?
# create tmp
# move to
# iterable
|
anderscui/nails
|
nails/filesystem.py
|
Python
|
mit
| 2,608
|
from django.template.loader import render_to_string
from tasks.const import STATUS_SUCCESS
from .base import library
@library.register('coverage')
def coverage_violation(data):
"""Coverage violation parser
:param data: task data
:type data: dict
:returns: dict
"""
data['status'] = STATUS_SUCCESS
line = data['raw'].split('\n')[-2]
statements, miss, cover = [
part for part in line.split(' ')
if len(part) > 0 and 'TOTAL' not in part
]
each_file = [
filter(len, line.split(' '))
for line in data['raw'].split('\n')[2:-3]
]
data['preview'] = render_to_string('violations/coverage/preview.html', {
'statements': statements,
'miss': miss,
'cover': cover,
})
data['prepared'] = render_to_string('violations/coverage/prepared.html', {
'statements': statements,
'miss': miss,
'cover': cover,
'each_file': each_file,
})
data['plot'] = {
'cover': int(cover[:-1]),
}
data['success_percent'] = int(cover[:-1])
return data
|
nvbn/coviolations_web
|
violations/coverage.py
|
Python
|
mit
| 1,087
|
from .pypvwatts import PVWatts
from .pvwattserror import PVWattsValidationError
|
mpaolino/pypvwatts
|
pypvwatts/__init__.py
|
Python
|
mit
| 80
|
from __future__ import absolute_import
# Copyright (c) 2010-2015 openpyxl
import datetime
import decimal
from io import BytesIO
from openpyxl.xml.functions import tostring, xmlfile
from openpyxl.utils.indexed_list import IndexedList
from openpyxl.utils.datetime import CALENDAR_WINDOWS_1900
from openpyxl.styles import Style
from openpyxl.styles.styleable import StyleId
from openpyxl.tests.helper import compare_xml
import pytest
class DummyLocalData:
pass
class DummyWorkbook:
def __init__(self):
self.shared_strings = IndexedList()
self.shared_styles = [Style()]
self._cell_styles = IndexedList([StyleId(0, 0, 0, 0, 0, 0)])
self._number_formats = IndexedList()
self._local_data = DummyLocalData()
self.encoding = "UTF-8"
self.excel_base_date = CALENDAR_WINDOWS_1900
def get_sheet_names(self):
return []
@pytest.fixture
def DumpWorksheet():
from .. dump_worksheet import DumpWorksheet
return DumpWorksheet(DummyWorkbook(), title="TestWorksheet")
@pytest.mark.lxml_required
def test_write_header(DumpWorksheet):
ws = DumpWorksheet
doc = ws._write_header()
next(doc)
doc.close()
header = open(ws.filename)
xml = header.read()
expected = """
<worksheet xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<sheetPr>
<outlinePr summaryRight="1" summaryBelow="1"/>
<pageSetUpPr/>
</sheetPr>
<sheetViews>
<sheetView workbookViewId="0">
<selection sqref="A1" activeCell="A1"/>
</sheetView>
</sheetViews>
<sheetFormatPr baseColWidth="10" defaultRowHeight="15"/>
<sheetData/>
</worksheet>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
def test_append(DumpWorksheet):
ws = DumpWorksheet
def _writer(doc):
with xmlfile(doc) as xf:
with xf.element('sheetData'):
try:
while True:
body = (yield)
xf.write(body)
except GeneratorExit:
pass
doc = BytesIO()
ws.writer = _writer(doc)
next(ws.writer)
ws.append([1, "s"])
ws.append(['2', 3])
ws.append(i for i in [1, 2])
ws.writer.close()
xml = doc.getvalue()
expected = """
<sheetData>
<row r="1" spans="1:2">
<c r="A1" t="n">
<v>1</v>
</c>
<c r="B1" t="s">
<v>0</v>
</c>
</row>
<row r="2" spans="1:2">
<c r="A2" t="s">
<v>1</v>
</c>
<c r="B2" t="n">
<v>3</v>
</c>
</row>
<row r="3" spans="1:2">
<c r="A3" t="n">
<v>1</v>
</c>
<c r="B3" t="n">
<v>2</v>
</c>
</row>
</sheetData>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
def test_dirty_cell(DumpWorksheet):
ws = DumpWorksheet
def _writer(doc):
with xmlfile(doc) as xf:
with xf.element('sheetData'):
try:
while True:
body = (yield)
xf.write(body)
except GeneratorExit:
pass
doc = BytesIO()
ws.writer = _writer(doc)
next(ws.writer)
ws.append((datetime.date(2001, 1, 1), 1))
ws.writer.close()
xml = doc.getvalue()
expected = """
<sheetData>
<row r="1" spans="1:2">
<c r="A1" t="n" s="1"><v>36892</v></c>
<c r="B1" t="n"><v>1</v></c>
</row>
</sheetData>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
@pytest.mark.parametrize("row", ("string", dict()))
def test_invalid_append(DumpWorksheet, row):
ws = DumpWorksheet
with pytest.raises(TypeError):
ws.append(row)
@pytest.mark.lxml_required
def test_cell_comment(DumpWorksheet):
ws = DumpWorksheet
from openpyxl.comments import Comment
from .. dump_worksheet import WriteOnlyCell
cell = WriteOnlyCell(ws, 1)
comment = Comment('hello', 'me')
cell.comment = comment
ws.append([cell])
assert ws._comments == [comment]
ws.close()
with open(ws.filename) as src:
xml = src.read()
expected = """
<worksheet xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<sheetPr>
<outlinePr summaryRight="1" summaryBelow="1"/>
<pageSetUpPr/>
</sheetPr>
<sheetViews>
<sheetView workbookViewId="0">
<selection sqref="A1" activeCell="A1"/>
</sheetView>
</sheetViews>
<sheetFormatPr baseColWidth="10" defaultRowHeight="15"/>
<sheetData>
<row r="1" spans="1:1"><c r="A1" t="n"><v>1</v></c></row>
</sheetData>
<legacyDrawing r:id="commentsvml"></legacyDrawing>
</worksheet>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
@pytest.mark.lxml_required
def test_cannot_save_twice(DumpWorksheet):
from .. dump_worksheet import WorkbookAlreadySaved
ws = DumpWorksheet
ws.close()
with pytest.raises(WorkbookAlreadySaved):
ws.close()
with pytest.raises(WorkbookAlreadySaved):
ws.append([1])
@pytest.mark.lxml_required
def test_close(DumpWorksheet):
ws = DumpWorksheet
ws.close()
with open(ws.filename) as src:
xml = src.read()
expected = """
<worksheet xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<sheetPr>
<outlinePr summaryRight="1" summaryBelow="1"/>
<pageSetUpPr/>
</sheetPr>
<sheetViews>
<sheetView workbookViewId="0">
<selection sqref="A1" activeCell="A1"/>
</sheetView>
</sheetViews>
<sheetFormatPr baseColWidth="10" defaultRowHeight="15"/>
<sheetData/>
</worksheet>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
@pytest.mark.lxml_required
def test_auto_filter(DumpWorksheet):
ws = DumpWorksheet
ws.auto_filter.ref = 'A1:F1'
ws.close()
with open(ws.filename) as src:
xml = src.read()
expected = """
<worksheet xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<sheetPr>
<outlinePr summaryRight="1" summaryBelow="1"/>
<pageSetUpPr/>
</sheetPr>
<sheetViews>
<sheetView workbookViewId="0">
<selection sqref="A1" activeCell="A1"/>
</sheetView>
</sheetViews>
<sheetFormatPr baseColWidth="10" defaultRowHeight="15"/>
<sheetData/>
<autoFilter ref="A1:F1"/>
</worksheet>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
@pytest.mark.lxml_required
def test_frozen_panes(DumpWorksheet):
ws = DumpWorksheet
ws.freeze_panes = 'D4'
ws.close()
with open(ws.filename) as src:
xml = src.read()
expected = """
<worksheet xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<sheetPr>
<outlinePr summaryRight="1" summaryBelow="1"/>
<pageSetUpPr/>
</sheetPr>
<sheetViews>
<sheetView workbookViewId="0">
<pane xSplit="3" ySplit="3" topLeftCell="D4" activePane="bottomRight" state="frozen"/>
<selection pane="topRight"/>
<selection pane="bottomLeft"/>
<selection pane="bottomRight" activeCell="A1" sqref="A1"/>
</sheetView>
</sheetViews>
<sheetFormatPr baseColWidth="10" defaultRowHeight="15"/>
<sheetData/>
</worksheet>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
@pytest.mark.lxml_required
def test_write_empty_row(DumpWorksheet):
ws = DumpWorksheet
ws.append(['1', '2', '3'])
ws.append([])
ws.close()
with open(ws.filename) as src:
xml = src.read()
expected = """
<worksheet xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<sheetPr>
<outlinePr summaryRight="1" summaryBelow="1"/>
<pageSetUpPr/>
</sheetPr>
<sheetViews>
<sheetView workbookViewId="0">
<selection sqref="A1" activeCell="A1"/>
</sheetView>
</sheetViews>
<sheetFormatPr baseColWidth="10" defaultRowHeight="15"/>
<sheetData>
<row r="1" spans="1:3">
<c r="A1" t="s">
<v>0</v>
</c>
<c r="B1" t="s">
<v>1</v>
</c>
<c r="C1" t="s">
<v>2</v>
</c>
</row>
<row r="2"/>
</sheetData>
</worksheet>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
|
Darthkpo/xtt
|
openpyxl/writer/tests/test_dump.py
|
Python
|
mit
| 8,954
|
## Need to find a library
|
jacksarick/My-Code
|
Python/python challenges/euler/017_number_letter_counts.py
|
Python
|
mit
| 25
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class Queue:
def __init__(self):
self.queue = []
def enqueue(self, item):
self.queue.append(item)
def dequeue(self, item):
return self.queue.pop(0)
|
goldsborough/algs4
|
stacks-queues/python/queue.py
|
Python
|
mit
| 205
|
#coding:utf-8
'''
第一种方式:使用os模块中的fork方式实现多进程
import os
if __name__ == '__main__':
print 'current Process (%s) start ...'%(os.getpid())
pid = os.fork()
if pid < 0:
print 'error in fork'
elif pid == 0:
print 'I am child process(%s) and my parent process is (%s)',(os.getpid(),os.getppid())
else:
print 'I(%s) created a chlid process (%s).',(os.getpid(),pid)
'''
'''
第二种方法:使用multiprocessing模块创建多进程
import os
from multiprocessing import Process
# 子进程要执行的代码
def run_proc(name):
print 'Child process %s (%s) Running...' % (name, os.getpid())
if __name__ == '__main__':
print 'Parent process %s.' % os.getpid()
p_list=[]
for i in range(5):
p = Process(target=run_proc, args=(str(i),))
p_list.append(p)
print 'Process will start.'
p_list[i].start()
for p in p_list:
p.join()
print 'Process end.'
'''
'''
multiprocessing模块提供了一个Pool类来代表进程池对象
from multiprocessing import Pool
import os, time, random
def run_task(name):
print 'Task %s (pid = %s) is running...' % (name, os.getpid())
time.sleep(random.random() * 3)
print 'Task %s end.' % name
if __name__=='__main__':
print 'Current process %s.' % os.getpid()
p = Pool(processes=3)
for i in range(5):
p.apply_async(run_task, args=(i,))
print 'Waiting for all subprocesses done...'
p.close()
p.join()
print 'All subprocesses done.'
'''
'''
Queue进程间通信
from multiprocessing import Process, Queue
import os, time, random
# 写数据进程执行的代码:
def proc_write(q,urls):
print('Process(%s) is writing...' % os.getpid())
for url in urls:
q.put(url)
print('Put %s to queue...' % url)
time.sleep(random.random())
# 读数据进程执行的代码:
def proc_read(q):
print('Process(%s) is reading...' % os.getpid())
while True:
url = q.get(True)
print('Get %s from queue.' % url)
if __name__=='__main__':
# 父进程创建Queue,并传给各个子进程:
q = Queue()
proc_writer1 = Process(target=proc_write, args=(q,['url_1', 'url_2', 'url_3']))
proc_writer2 = Process(target=proc_write, args=(q,['url_4','url_5','url_6']))
proc_reader = Process(target=proc_read, args=(q,))
# 启动子进程proc_writer,写入:
proc_writer1.start()
proc_writer2.start()
# 启动子进程proc_reader,读取:
proc_reader.start()
# 等待proc_writer结束:
proc_writer1.join()
proc_writer2.join()
# proc_reader进程里是死循环,无法等待其结束,只能强行终止:
proc_reader.terminate()
'''
'''
pipe进程间通信
import multiprocessing
import random
import time,os
def proc_send(pipe,urls):
for url in urls:
print "Process(%s) send: %s" %(os.getpid(),url)
pipe.send(url)
time.sleep(random.random())
def proc_recv(pipe):
while True:
print "Process(%s) rev:%s" %(os.getpid(),pipe.recv())
time.sleep(random.random())
'''
|
qiyeboy/SpiderBook
|
ch01/1.4.1.py
|
Python
|
mit
| 3,126
|
import collections
import itertools
from marnadi.utils import cached_property, CachedDescriptor
class Header(collections.Mapping):
__slots__ = 'value', 'params'
def __init__(self, *value, **params):
assert len(value) == 1
self.value = value[0]
self.params = params
def __hash__(self):
return hash(self.value)
def __eq__(self, other):
return self.value == other
def __ne__(self, other):
return self.value != other
def __str__(self):
return self.stringify()
def __bytes__(self):
value = self.stringify()
if isinstance(value, bytes): # python 2.x
return value
return value.encode(encoding='latin1')
def __getitem__(self, item):
return self.params[item]
def __iter__(self):
return iter(self.params)
def __len__(self):
return len(self.params)
def __bool__(self):
return True
def __nonzero__(self):
return self.__bool__()
def stringify(self):
if not self.params:
return str(self.value)
return '{value}; {params}'.format(
value=self.value,
params='; '.join(
'%s=%s' % (attr_name, attr_value)
for attr_name, attr_value in self.params.items()
),
)
class HeadersMixin(collections.Mapping):
if hasattr(collections.Mapping, '__slots__'):
__slots__ = '__weakref__',
def __getitem__(self, header):
return self._headers[header.title()]
def __len__(self):
return len(self._headers)
def __iter__(self):
return iter(self._headers)
__hash__ = object.__hash__
__eq__ = object.__eq__
__ne__ = object.__ne__
@cached_property
def _headers(self):
raise ValueError("This property must be set before using")
def items(self, stringify=False):
for header, values in self._headers.items():
for value in values:
yield header, str(value) if stringify else value
def values(self, stringify=False):
for values in self._headers.values():
for value in values:
yield str(value) if stringify else value
class ResponseHeaders(HeadersMixin, collections.MutableMapping):
__slots__ = ()
def __init__(self, default_headers):
self._headers = default_headers
def __delitem__(self, header):
del self._headers[header.title()]
def __setitem__(self, header, value):
self._headers[header.title()] = [value]
def append(self, header_item):
header, value = header_item
self._headers[header.title()].append(value)
def extend(self, headers):
for header in headers:
self.append(header)
def setdefault(self, header, default=None):
return self._headers.setdefault(header.title(), [default])
def clear(self, *headers):
if headers:
for header in headers:
try:
del self[header]
except KeyError:
pass
else:
self._headers.clear()
class Headers(CachedDescriptor, HeadersMixin):
__slots__ = ()
def __init__(self, *default_headers, **kw_default_headers):
super(Headers, self).__init__()
self._headers = collections.defaultdict(list)
for header, value in itertools.chain(
default_headers,
kw_default_headers.items(),
):
self._headers[header.title()].append(value)
def get_value(self, instance):
return ResponseHeaders(default_headers=self._headers.copy())
|
renskiy/marnadi
|
marnadi/http/headers.py
|
Python
|
mit
| 3,678
|
# -*- coding: utf-8 -*-
from folium.plugins.marker_cluster import MarkerCluster
from folium.utilities import if_pandas_df_convert_to_numpy, validate_location
from jinja2 import Template
class FastMarkerCluster(MarkerCluster):
"""
Add marker clusters to a map using in-browser rendering.
Using FastMarkerCluster it is possible to render 000's of
points far quicker than the MarkerCluster class.
Be aware that the FastMarkerCluster class passes an empty
list to the parent class' __init__ method during initialisation.
This means that the add_child method is never called, and
no reference to any marker data are retained. Methods such
as get_bounds() are therefore not available when using it.
Parameters
----------
data: list of list with values
List of list of shape [[lat, lon], [lat, lon], etc.]
When you use a custom callback you could add more values after the
lat and lon. E.g. [[lat, lon, 'red'], [lat, lon, 'blue']]
callback: string, optional
A string representation of a valid Javascript function
that will be passed each row in data. See the
FasterMarkerCluster for an example of a custom callback.
name : string, optional
The name of the Layer, as it will appear in LayerControls.
overlay : bool, default True
Adds the layer as an optional overlay (True) or the base layer (False).
control : bool, default True
Whether the Layer will be included in LayerControls.
show: bool, default True
Whether the layer will be shown on opening (only for overlays).
icon_create_function : string, default None
Override the default behaviour, making possible to customize
markers colors and sizes.
**kwargs
Additional arguments are passed to Leaflet.markercluster options. See
https://github.com/Leaflet/Leaflet.markercluster
"""
_template = Template(u"""
{% macro script(this, kwargs) %}
var {{ this.get_name() }} = (function(){
{{ this.callback }}
var data = {{ this.data|tojson }};
var cluster = L.markerClusterGroup({{ this.options|tojson }});
{%- if this.icon_create_function is not none %}
cluster.options.iconCreateFunction =
{{ this.icon_create_function.strip() }};
{%- endif %}
for (var i = 0; i < data.length; i++) {
var row = data[i];
var marker = callback(row);
marker.addTo(cluster);
}
cluster.addTo({{ this._parent.get_name() }});
return cluster;
})();
{% endmacro %}""")
def __init__(self, data, callback=None, options=None,
name=None, overlay=True, control=True, show=True, icon_create_function=None, **kwargs):
if options is not None:
kwargs.update(options) # options argument is legacy
super(FastMarkerCluster, self).__init__(name=name, overlay=overlay,
control=control, show=show,
icon_create_function=icon_create_function,
**kwargs)
self._name = 'FastMarkerCluster'
data = if_pandas_df_convert_to_numpy(data)
self.data = [[*validate_location(row[:2]), *row[2:]] # noqa: E999
for row in data]
if callback is None:
self.callback = """
var callback = function (row) {
var icon = L.AwesomeMarkers.icon();
var marker = L.marker(new L.LatLng(row[0], row[1]));
marker.setIcon(icon);
return marker;
};"""
else:
self.callback = 'var callback = {};'.format(callback)
|
ocefpaf/folium
|
folium/plugins/fast_marker_cluster.py
|
Python
|
mit
| 3,954
|
from django.core import validators
from django.utils.deconstruct import deconstructible
from django.utils.translation import gettext_lazy as _
@deconstructible
class UsernameValidator(validators.RegexValidator):
regex = r'^[\w.]+$'
message = _(
'Enter a valid username. This value may contain only letters, '
'numbers, underscores and periods.'
)
|
apirobot/shmitter
|
backend/shmitter/users/validators.py
|
Python
|
mit
| 377
|
from __future__ import print_function, absolute_import, division
import sys
sys.path.append('../')
import numpy as np
import tt
from tt.eigb import *
import time
""" This code computes many eigenvalus of the Laplacian operator """
d = 8
f = 8
A = tt.qlaplace_dd([d]*f)
#A = (-1)*A
#A = tt.eye(2,d)
n = [2] *(d * f)
r = [8] *(d * f + 1)
r[0] = 1
r[d * f] = 8 #Number of eigenvalues sought
x = tt.rand(n, d * f, r)
#x = tt_ones(2,d)
t = time.time()
y, lam = eigb(A, x, 1e-6)
t1 = time.time()
print('Eigenvalues:', lam)
print('Time is:', t1-t)
|
oseledets/ttpy
|
examples/test_eigb.py
|
Python
|
mit
| 544
|
# -*- coding: utf-8 -*-
from argparse import ArgumentParser
from ansible_playbook_wrapper.command.play import PlayCommand
def main():
parser = ArgumentParser()
sub_parsers = parser.add_subparsers(help='commands')
play_parser = sub_parsers.add_parser('play', help='play playbook')
for arg_info in PlayCommand.ARGUMENT_INFO:
play_parser.add_argument(*(arg_info[0]), **(arg_info[1]))
play_parser.set_defaults(command_class=PlayCommand)
parsed_args = parser.parse_args()
parsed_args.command_class(parsed_args).run()
|
succhiello/ansible-playbook-wrapper
|
ansible_playbook_wrapper/__init__.py
|
Python
|
mit
| 556
|
from logbook import Logger
from ..core.local import get_current_conf
from ..core.connection import autoccontext
from .. import db
from datetime import timedelta, datetime
log = Logger(__name__)
def del_inactive_queries():
conf = get_current_conf()
with autoccontext(commit=True) as conn:
before = db.get_query_count(conn)
db.del_inactive_queries(
conn,
before=datetime.utcnow() - timedelta(days=conf['TORABOT_DELETE_INACTIVE_QUERIES_BEFORE_DAYS']),
limit=conf['TORABOT_DELETE_INACTIVE_QUERIES_LIMIT']
)
after = db.get_query_count(conn)
log.info('delete inactive queries, from {} to {}, deleted {}', before, after, before - after)
return before - after
def del_old_changes():
conf = get_current_conf()
with autoccontext(commit=True) as conn:
before = db.get_change_count(conn)
db.del_old_changes(
conn,
before=datetime.utcnow() - timedelta(days=conf['TORABOT_DELETE_OLD_CHANGES_BEFORE_DAYS']),
limit=conf['TORABOT_DELETE_OLD_CHANGES_LIMIT']
)
after = db.get_change_count(conn)
log.info('delete old changes, from {} to {}, deleted {}', before, after, before - after)
return before - after
|
Answeror/torabot
|
torabot/tasks/delete.py
|
Python
|
mit
| 1,277
|
import os
import re
import codecs
import subprocess
import tempfile
import shutil
from .tylogger import logger
DEFAULT_ENCODING = 'utf16'
class Strings(object):
def __init__(self, encoding=DEFAULT_ENCODING, aliases=None):
self.encoding = encoding if encoding else DEFAULT_ENCODING
self.__references = {}
self.aliases = aliases if aliases else []
self.temp_dir = None
def generate(self, files, dst):
"""generate strings
:param dst: destination directory
:param files: input files
:return generate strings dicts
"""
dst_dir = os.path.abspath(dst)
results = {}
if self.temp_dir is None:
logger.process('Generating Strings...')
self.__generate_strings_temp_file(files)
logger.done('Generated Strings')
for filename in os.listdir(self.temp_dir):
logger.debug('generated %s' % filename)
reference = self.parsing(os.path.join(dst_dir, filename), encoding=self.encoding)
self.__references[filename] = reference
logger.done('Generated Reference')
for k, v in self.__references.items():
logger.info('%s count: %d' % (k, len(v)))
for basename, ref in self.__references.items():
target_abspath = os.path.join(dst_dir, basename)
dirname = os.path.dirname(target_abspath)
if not os.path.exists(dirname):
os.makedirs(dirname)
shutil.copy(os.path.join(self.temp_dir, basename), target_abspath)
results[basename] = self.translate(target_abspath, ref, self.encoding)
return results
def __generate_strings_temp_file(self, source_files):
"""run `genstrings` script. generate `.strings` files to a temp directory.
:param source_files: input files
:return: temp directory
"""
script = 'genstrings'
for filename in source_files:
script += ' %s' % filename
if len(self.aliases) > 0:
script += ' -s'
for alias in self.aliases:
script += ' %s' % alias
temp_dir = tempfile.mkdtemp()
self.__run_script('%s -o %s' % (script, temp_dir))
self.temp_dir = temp_dir
return temp_dir
def __del__(self):
if self.temp_dir:
shutil.rmtree(self.temp_dir, ignore_errors=True)
@staticmethod
def __run_script(script):
logger.debug('run: %s' % script)
process = subprocess.Popen(script, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = ''
while process.poll() is None:
line = process.stdout.readline()
if line:
output += line
logger.debug(line.strip())
logger.finished(process.returncode)
return process.returncode, output
@staticmethod
def parsing(filename, encoding=DEFAULT_ENCODING):
"""parsing `.strings` file.
:param filename: .strings filename
:param encoding: file encoding
:return: reference
"""
reference = dict((elem[0], elem[1]) for elem in Strings.__reference_generator(filename, encoding))
return reference
@staticmethod
def parsing_elems(filename, encoding=DEFAULT_ENCODING):
return list(Strings.__reference_generator(filename, encoding))
@staticmethod
def __reference_generator(filename, encoding=DEFAULT_ENCODING):
if os.path.exists(filename):
line_end = [0]
contents = ''
with codecs.open(filename, mode='r', encoding=encoding if encoding else DEFAULT_ENCODING) as f:
for line in f.readlines():
contents += line
line_end.append(len(contents))
prog = re.compile(r"\s*\"(?P<key>.*?)\"\s*=\s*\"(?P<value>[\s\S]*?)\"\s*;", re.MULTILINE)
for match in prog.finditer(contents):
key = match.group('key')
key_start = match.start('key')
value = match.group('value')
match.groupdict()
line_no = next(i for i in range(len(line_end)) if line_end[i] > key_start)
yield (key, value, line_no)
@property
def generated_filenames(self):
"""generated strings files basenames
e.g.: 'Localizable.strings'
:return: strings filenames
"""
return self.__references.keys()
@staticmethod
def translate(dst, reference, encoding=DEFAULT_ENCODING):
"""translate strings file by reference
:param dst: destination strings file
:param reference: translation reference
:param encoding: file encoding
:return: result dict
"""
result = {}
translated = []
try:
f = codecs.open(dst, "r", DEFAULT_ENCODING)
lines = f.readlines()
for (index, line) in enumerate(lines):
match = re.match(r'"(?P<key>.*?)" = "(?P<value>.*?)";', line)
if match is not None:
key = match.group('key')
value = match.group('value')
answer = reference.get(key, None)
if answer is not None:
if reference[key] != value:
line = '"%s" = "%s";\n' % (key, answer)
lines[index] = line
translated.append(key)
result[key] = answer
else:
result[key] = value
f.close()
logger.done('Translated: %s' % dst)
logger.info('count: %d' % len(translated))
for k in translated:
logger.debug('%s => %s' % (k, result[k]))
f = codecs.open(dst, "w+", encoding=encoding)
f.writelines(lines)
f.flush()
f.close()
return result
# logger.addition('Write strings file to: %s' % self.filename)
except Exception as e:
logger.error(e)
|
luckytianyiyan/TyStrings
|
tystrings/strings.py
|
Python
|
mit
| 6,149
|
from import_export import fields, resources
class GamesPlayedResource(resources.Resource):
game = fields.Field(attribute='game__name', column_name='game')
time = fields.Field(attribute='time', column_name='time (hours)')
num_players = fields.Field(attribute='num_players', column_name='num_players')
class Meta:
export_order = ['game', 'time', 'num_players']
def dehydrate_game(self, obj):
return obj['game__name']
def dehydrate_time(self, obj):
return obj['time'].total_seconds() / 3600
def dehydrate_num_players(self, obj):
return obj['num_players']
|
sergei-maertens/discord-bot
|
bot/plugins/stats/resources.py
|
Python
|
mit
| 619
|
"""linter_test_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
|
David-Wobrock/django-fake-database-backends
|
tests/test_project/test_project/urls.py
|
Python
|
mit
| 776
|
"""Main urls.py for the ``pythonsingapore.com`` project."""
from django.conf import settings
from django.conf.urls.defaults import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from cms.sitemaps import CMSSitemap
from myproject.sitemaps import BlogSitemap
admin.autodiscover()
sitemaps = {}
sitemaps['cmspages'] = CMSSitemap()
sitemaps['news'] = BlogSitemap()
urlpatterns = static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += staticfiles_urlpatterns()
if settings.DEBUG is False and settings.SANDBOX is True:
urlpatterns += patterns(
'',
(r'^404/$', 'django.views.defaults.page_not_found'),
(r'^500/$', 'django.views.defaults.server_error'),
url(r'^static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_ROOT}),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
)
urlpatterns += patterns(
'',
url(r'^sitemap.xml$', 'django.contrib.sitemaps.views.sitemap',
{'sitemaps': sitemaps, }),
url(r'^captcha/', include('captcha.urls')),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(settings.ADMIN_URL, include(admin.site.urls)),
url(r'^admin-.+/', include('admin_honeypot.urls')),
url(r'^rosetta/', include('rosetta.urls')),
url(r'^accounts/', include('registration_email.backends.default.urls')),
url(r'^u/', include('user_data.urls')),
url(r'^', include('cms.urls')),
)
|
pythonsingapore/pythonsingapore
|
website/webapps/django/myproject/myproject/urls.py
|
Python
|
mit
| 1,625
|
# -*- coding: utf-8 -*-
"""anagram_solver.__main__: executed when directory is called as script."""
from .anagram_solver import main
main()
|
patrickleweryharris/anagram-solver
|
anagram_solver/__main__.py
|
Python
|
mit
| 145
|
import constants as c
from gui.windows import VideoStream
import socket
import cv2
import urllib
import numpy as np
class Robot(object):
def __init__(self, connection):
self.connection = connection
""" @type : Connections.ConnectionProcessEnd.RobotConnection """
self.socket = None
self.stream = None
self.bytes = None
self.window = None
self.psychopy_disabled = None
self.stream_enabled = None
self.target_to_command = None
self.connection.waitMessages(self.start, self.exit, self.update, self.setup, self.sendMessage, poll=0)
def start(self):
while True:
self.update()
message = self.connection.receiveMessageInstant()
if message is not None:
if isinstance(message, int):
self.sendMessage(self.target_to_command[message])
elif message in c.ROBOT_COMMANDS:
self.sendMessage(message)
elif isinstance(message, basestring):
return message
else:
print("Robot message: " + str(message))
def updateVideo(self):
if self.stream_enabled:
if self.stream is not None:
self.bytes += self.stream.read(1024)
a = self.bytes.find('\xff\xd8')
b = self.bytes.find('\xff\xd9')
if a != -1 and b != -1:
jpg = self.bytes[a:b+2]
self.bytes = self.bytes[b+2:]
i = cv2.imdecode(np.fromstring(jpg, dtype=np.uint8), cv2.CV_LOAD_IMAGE_COLOR)
if self.psychopy_disabled is not None:
if self.psychopy_disabled:
self.window.updateStream(i)
else:
self.connection.sendMessage(i)
def updateWindow(self):
if self.window is not None:
self.window.update()
def exitWindow(self):
if self.window is not None:
self.window.exitFlag = True
self.window.exit()
def update(self):
self.updateVideo()
self.updateWindow()
def exit(self):
self.exitWindow()
self.connection.close()
def sendRobotMessage(self, message):
try: # seems like PiTank closes the socket after receiving message
robot_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
robot_socket.connect(("192.168.42.1", 12345))
robot_socket.send(message)
except Exception, e:
print("Could not send message to robot (did you click setup? Is PiTank switched on and computer connected to PiTank?): " + str(e))
def sendMessage(self, message):
if message in c.ROBOT_COMMANDS:
self.sendRobotMessage(message)
else:
print("Unknown message in Robot: " + str(message))
def psychopyDisabled(self, options):
return options[c.DISABLE] == 1
def streamEnabled(self, options):
return options[c.ROBOT_STREAM] == 1
def getTargetToCommand(self, options):
return {
options[c.ROBOT_OPTION_FORWARD]: c.MOVE_FORWARD,
options[c.ROBOT_OPTION_BACKWARD]: c.MOVE_BACKWARD,
options[c.ROBOT_OPTION_LEFT]: c.MOVE_LEFT,
options[c.ROBOT_OPTION_RIGHT]: c.MOVE_RIGHT,
options[c.ROBOT_OPTION_STOP]: c.MOVE_STOP
}
def setup(self):
options = self.connection.receiveMessageBlock()
self.exitWindow()
self.stream_enabled = self.streamEnabled(options[c.DATA_ROBOT])
self.target_to_command = self.getTargetToCommand(options[c.DATA_ROBOT])
if self.stream_enabled:
self.psychopy_disabled = self.psychopyDisabled(options[c.DATA_BACKGROUND])
if self.psychopy_disabled:
self.window = VideoStream.StreamWindow()
self.window.setup()
else:
self.window = None
else:
self.window = None
try:
self.stream = urllib.urlopen("http://192.168.42.1:8080/?action=stream")
self.bytes = ""
return c.SUCCESS_MESSAGE
except Exception, e:
print("Error: " + str(e))
return c.FAIL_MESSAGE
|
kahvel/VEP-BCI
|
src/Robot.py
|
Python
|
mit
| 4,342
|
#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import re
import os.path
from io import open
from setuptools import find_packages, setup
# Change the PACKAGE_NAME only to change folder and different name
PACKAGE_NAME = "azure-mgmt-servicefabric"
PACKAGE_PPRINT_NAME = "Service Fabric Management"
# a-b-c => a/b/c
package_folder_path = PACKAGE_NAME.replace('-', '/')
# a-b-c => a.b.c
namespace_name = PACKAGE_NAME.replace('-', '.')
# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, 'version.py')
if os.path.exists(os.path.join(package_folder_path, 'version.py'))
else os.path.join(package_folder_path, '_version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.md', encoding='utf-8') as f:
readme = f.read()
with open('CHANGELOG.md', encoding='utf-8') as f:
changelog = f.read()
setup(
name=PACKAGE_NAME,
version=version,
description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
long_description=readme + '\n\n' + changelog,
long_description_content_type='text/markdown',
license='MIT License',
author='Microsoft Corporation',
author_email='azpysdkhelp@microsoft.com',
url='https://github.com/Azure/azure-sdk-for-python',
keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'License :: OSI Approved :: MIT License',
],
zip_safe=False,
packages=find_packages(exclude=[
'tests',
# Exclude packages that will be covered by PEP420 or nspkg
'azure',
'azure.mgmt',
]),
install_requires=[
'msrest>=0.6.21',
'azure-common~=1.1',
'azure-mgmt-core>=1.3.0,<2.0.0',
],
python_requires=">=3.6"
)
|
Azure/azure-sdk-for-python
|
sdk/servicefabric/azure-mgmt-servicefabric/setup.py
|
Python
|
mit
| 2,683
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-30 22:11
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wordproject', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='wordrecord',
name='description',
field=models.TextField(max_length=200, null=True),
),
]
|
OtagoPolytechnic/LanguageCards
|
admin/wordproject/migrations/0002_auto_20160331_1111.py
|
Python
|
mit
| 464
|
from django.contrib import admin
# Register your models here.
from polls.models import Question,Choice
class ChoiceInline(admin.TabularInline):
model = Choice
extra = 3
class QuestionAdmin(admin.ModelAdmin):
fields = ["question_text", "pub_date"]
inlines = [ChoiceInline]
list_display = ('question_text', 'pub_date', 'was_published_recently')
search_fields = ['question_text']
list_filter = ['pub_date']
admin.site.register(Question, QuestionAdmin)
|
Tassemble/jewelry
|
polls/admin.py
|
Python
|
mit
| 475
|
"""
Django settings for webserver project.
Generated by 'django-admin startproject' using Django 1.11.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os, sys
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "apps"))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'cd8=h&(&^#m95znusg4-f65vl6t#e%_wpf=nn6a^xnuh2pn5pd'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'apps.users',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'django_filters',
'apps.ping',
'apps.lessors',
'apps.bank_accounts',
'apps.products',
'apps.markets',
'apps.tags',
'apps.commons',
'apps.booths',
'apps.reservations',
'apps.payments',
'apps.reports',
'apps.ratings',
]
AUTH_USER_MODEL = 'users.User'
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'config.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'config.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'db',
'PORT': 5432,
}
}
# REST framework
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.AllowAny',
],
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.BasicAuthentication',
'rest_framework_jwt.authentication.JSONWebTokenAuthentication',
),
'PAGE_SIZE': 12,
'DEFAULT_FILTER_BACKENDS': ('django_filters.rest_framework.DjangoFilterBackend',),
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
# Email credentials
EMAIL_USE_TLS = True
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_PASSWORD = 'whale123mart'
EMAIL_HOST_USER = 'whalemart.noti@gmail.com'
EMAIL_PORT = 587
DEFAULT_FROM_EMAIL = EMAIL_HOST_USER
|
we-inc/mms-snow-white-and-the-seven-pandas
|
webserver/config/settings.py
|
Python
|
mit
| 4,453
|
import datetime
import queue
import multiprocessing
import pytest
from honcho.printer import Message
from honcho.manager import Manager
from honcho.manager import SYSTEM_PRINTER_NAME
HISTORIES = {
'one': {
'processes': {'foo': {}},
'messages': (('foo', 'start', {'pid': 123}),
('foo', 'line', b'hello, world!\n'),
('foo', 'stop', {'returncode': 0})),
},
'two': {
'processes': {'bar': {}, 'foo': {}},
'messages': (('foo', 'start', {'pid': 123}),
('bar', 'start', {'pid': 124}),
('foo', 'line', b'process one\n'),
('bar', 'line', b'process two\n'),
('foo', 'stop', {'returncode': 0}),
('bar', 'stop', {'returncode': 0})),
},
'returncode': {
'processes': {'bar': {}, 'foo': {}},
'messages': (('foo', 'start', {'pid': 123}),
('bar', 'start', {'pid': 124}),
('foo', 'stop', {'returncode': 456}),
('bar', 'stop', {'returncode': 321})),
},
'output_after_stop': {
'processes': {'bar': {}, 'foo': {}},
'messages': (('foo', 'start', {'pid': 123}),
('bar', 'start', {'pid': 124}),
('foo', 'line', b'hi from foo\n'),
('bar', 'line', b'hi from bar\n'),
('foo', 'stop', {'returncode': 0}),
('bar', 'line', b'fishmongers\n'),
('bar', 'line', b'butchers\n'),
('bar', 'stop', {'returncode': -15})),
},
}
class FakeClock(object):
def now(self):
return datetime.datetime(2012, 8, 11, 12, 42)
class FakeProcessManager(object):
def terminate(self, pid):
pass
def kill(self, pid):
pass
class FakeProcess(object):
def __init__(self, cmd, name=None, colour=None, quiet=None, env=None, cwd=None):
self.cmd = cmd
self.name = name
self.colour = colour
self.quiet = quiet
self.env = env
self.cwd = cwd
self._events = None
self._options = {}
def run(self, events=None, ignore_signals=False):
self._report('run', events_passed=events is not None)
def _report(self, type, **data):
if self._events is not None:
data.update({'type': type,
'name': self.name})
self._events.put(data)
class Harness(object):
def __init__(self, history, manager):
self.history = history
self.manager = manager
self.events_local = []
self._q = multiprocessing.Queue()
self._rc = multiprocessing.Value('i', -999)
def run(self, wait=True):
self.manager._process_ctor = self._process_ctor
for name, options in self.history['processes'].items():
self.manager.add_process(name,
options.get('command', 'test'),
options.get('quiet', False))
def _loop(rc):
self.manager.loop()
rc.value = self.manager.returncode
self._mproc = multiprocessing.Process(target=_loop, args=(self._rc,))
self._mproc.start()
for msg in self.history['messages']:
self.send_manager(*msg)
self._mproc.join()
@property
def manager_returncode(self):
if self._rc.value == -999:
return None
return self._rc.value
def send_manager(self, process_name, type, data, **kwargs):
self.manager.events.put(Message(type=type,
data=data,
time=datetime.datetime.now(),
name=process_name,
colour=None))
def fetch_events(self):
"""
Retrieve any pending events from the queue and put them on the local
event cache
"""
while 1:
try:
self.events_local.append(self._q.get(False))
except queue.Empty:
break
def find_events(self, name=None, type=None):
self.fetch_events()
results = []
for event in self.events_local:
if name is not None and event['name'] != name:
continue
if type is not None and event['type'] != type:
continue
results.append(event)
return results
def _process_ctor(self, *args, **kwargs):
options = self.history['processes'][kwargs['name']]
p = FakeProcess(*args, **kwargs)
p._events = self._q
p._options = options
return p
class FakePrinter(object):
def __init__(self, width=0):
self.width = width
self.lines_local = []
self._q = multiprocessing.Queue()
def write(self, message):
# Called in a remote thread, so just put the message on the queue.
self._q.put(message)
def fetch_lines(self):
"""
Retrieve any pending lines from the queue and put them on the local
line cache
"""
while 1:
try:
self.lines_local.append(self._q.get(False))
except queue.Empty:
break
def got_line(self, data):
return self.find_line(data) is not None
def find_line(self, data):
self.fetch_lines()
for line in self.lines_local:
if line.data == data:
return line
class TestManager(object):
@pytest.fixture(autouse=True)
def printer(self): # noqa
self.p = FakePrinter()
self.m = Manager(printer=self.p)
self.m._clock = FakeClock()
self.m._procmgr = FakeProcessManager()
def run_history(self, name, wait=True):
self.h = Harness(HISTORIES[name], self.m)
self.h.run(wait=wait)
def test_init_sets_default_printer_width(self):
assert self.p.width == len(SYSTEM_PRINTER_NAME)
def test_add_process_updates_printer_width(self):
self.m.add_process('interesting', 'ruby server.rb')
assert self.p.width == len('interesting')
def test_add_process_sets_name(self):
proc = self.m.add_process('foo', 'ruby server.rb')
assert proc.name == 'foo'
def test_add_process_sets_cmd(self):
proc = self.m.add_process('foo', 'ruby server.rb')
assert proc.cmd == 'ruby server.rb'
def test_add_process_sets_colour(self):
proc = self.m.add_process('foo', 'ruby server.rb')
assert proc.colour is not None
def test_add_process_sets_unique_colours(self):
p1 = self.m.add_process('foo', 'ruby server.rb')
p2 = self.m.add_process('bar', 'python server.py')
assert p1.colour != p2.colour
def test_add_process_sets_quiet(self):
proc = self.m.add_process('foo', 'ruby server.rb', quiet=True)
assert proc.quiet
def test_add_process_name_must_be_unique(self):
self.m.add_process('foo', 'ruby server.rb')
with pytest.raises(AssertionError):
self.m.add_process('foo', 'another command')
def test_add_process_sets_cwd(self):
proc = self.m.add_process('foo', 'ruby server.rb', cwd='foo-dir')
assert proc.cwd == 'foo-dir'
def test_loop_with_empty_manager_returns_immediately(self):
self.m.loop()
def test_loop_calls_process_run(self):
self.run_history('one')
evts = self.h.find_events(type='run')
assert len(evts) == 1
assert evts[0]['name'] == 'foo'
assert evts[0]['events_passed']
def test_printer_receives_messages_in_correct_order(self):
self.run_history('one')
self.p.fetch_lines()
assert self.p.lines_local[0].data == 'foo started (pid=123)\n'
assert self.p.lines_local[1].data == b'hello, world!\n'
assert self.p.lines_local[2].data == 'foo stopped (rc=0)\n'
def test_printer_receives_lines_multi_process(self):
self.run_history('two')
l1 = self.p.find_line(b'process one\n')
l2 = self.p.find_line(b'process two\n')
assert l1.name == 'foo'
assert l2.name == 'bar'
def test_returncode_set_by_first_exiting_process(self):
self.run_history('returncode')
assert self.h.manager_returncode == 456
def test_printer_receives_lines_after_stop(self):
self.run_history('output_after_stop')
assert self.p.got_line(b'fishmongers\n')
assert self.p.got_line(b'butchers\n')
|
nickstenning/honcho
|
tests/test_manager.py
|
Python
|
mit
| 8,625
|
# Graphical paste and save GUI for adding members
from Tkinter import *
import os
import pdb
import datetime
lday = 04
lmonth = 10
class myDate:
def __init__(self, year, month, day):
self.date = datetime.datetime(year, month, day)
self.updateString()
def getMonthDay(self):
lday = format(self.date.day, '02')
lmonth = format(self.date.month, '02')
return lmonth + lday
def getfilename(self):
lfdate = self.getMonthDay()
lfname = lfdate + "-" + nextfname(lfdate) + ".txt"
return lfname
def updateString(self):
self.datestr = self.date.strftime("%m%d")
def updateDate(self, dt_obj):
self.date = dt_obj
date = myDate(2015, lmonth, lday)
def save(date):
f = open(date.getfilename(), "w")
t = text.get("1.0", END)
f.write(t.encode('utf8'))
f.close()
lfname = date.getfilename()
llabel.configure(text = lfname)
def add_day(date):
dt = datetime.datetime(2015, date.date.month, date.date.day)
dt = dt + datetime.timedelta(days=1)
date.updateDate(dt)
date.updateString()
lfname = date.getfilename()
llabel.configure(text = lfname)
def sub_day(date):
dt = datetime.datetime(2015, date.date.month, date.date.day)
dt = dt - datetime.timedelta(days=1)
date.updateDate(dt)
date.updateString()
lfname = date.getfilename()
llabel.configure(text = lfname)
def select_all(event):
text.tag_add(SEL, "1.0", END)
text.mark_set(INSERT, "1.0")
text.see(INSERT)
return 'break'
def nextfname(prefix):
first = 1
fstr = format(first, '02')
while os.path.exists(prefix + "-" + fstr + ".txt"):
first = first + 1
fstr = format(first, '02')
return fstr
root = Tk()
text = Text(root)
text.insert(INSERT, "")
text.bind("<Control-Key-a>", select_all)
text.grid()
bsave = Button(root, text="Save", command=lambda: save(date))
bsave.grid(columnspan=2, column=1, row=0)
dplus = Button(root, text="d+", command=lambda: add_day(date))
dplus.grid(column=1, row=1)
dminus = Button(root, text="d-", command=lambda: sub_day(date))
dminus.grid(column=2, row=1)
lfname = date.getfilename()
llabel = Label(root, text=lfname)
llabel.grid(columnspan=2, column=1, row=2)
root.mainloop()
|
marev711/scripts
|
medlemsinput.py
|
Python
|
mit
| 2,300
|
# coding=utf-8
import pytest
@pytest.fixture
def dns_sd():
from pymachinetalk import dns_sd
return dns_sd
@pytest.fixture
def sd():
from pymachinetalk import dns_sd
sd = dns_sd.ServiceDiscovery()
return sd
def test_registeringServicesFromServiceContainerWorks(dns_sd, sd):
service = dns_sd.Service()
discoverable = dns_sd.ServiceContainer()
discoverable.services.append(service)
sd.register(discoverable)
assert service in sd.services
def test_registeringServiceDirectlyWorks(dns_sd, sd):
service = dns_sd.Service()
sd.register(service)
assert service in sd.services
def test_registeringAnythingElseFails(sd):
item = object()
try:
sd.register(item)
except TypeError:
assert True
assert item not in sd.services
def test_registeringWhenRunningThrowsError(dns_sd, sd):
service = dns_sd.Service()
def dummy():
pass
sd._start_discovery = dummy
sd.start()
try:
sd.register(service)
except RuntimeError:
assert True
assert service not in sd.services
def test_unregisteringServiceDirectlyWorks(dns_sd, sd):
service = dns_sd.Service()
sd.register(service)
sd.unregister(service)
assert service not in sd.services
def test_unregisteringServicesFromServiceContainerWorks(dns_sd, sd):
service = dns_sd.Service()
discoverable = dns_sd.ServiceContainer()
discoverable.services.append(service)
sd.register(discoverable)
sd.unregister(discoverable)
assert service not in sd.services
def test_unregisteringAnythingElseFails(sd):
item = 34
try:
sd.unregister(item)
except TypeError:
assert True
assert item not in sd.services
def test_unregisteringWhenRunningThrowsError(dns_sd, sd):
service = dns_sd.Service()
def dummy():
pass
sd._start_discovery = dummy
sd.start()
try:
sd.unregister(service)
except RuntimeError:
assert True
assert service not in sd.services
class ServiceInfoFactory(object):
def create(
self,
base_type='machinekit',
domain='local',
sd_protocol='tcp',
name='Hugo on Franz',
service=b'halrcomp',
uuid=b'12345678',
host='127.0.0.1',
protocol='tcp',
port=12345,
version=0,
properties=None,
server='127.0.0.1',
address=None,
):
from zeroconf import ServiceInfo
typestring = '_%s._%s.%s.' % (base_type, sd_protocol, domain)
dsn = b'%s://%s:%i' % (protocol.encode(), host.encode(), port)
if properties is None:
properties = {
b'uuid': uuid,
b'service': service,
b'dsn': dsn,
b'version': version,
}
return ServiceInfo(
type_=typestring,
name='%s %s.%s' % (name, host, typestring),
properties=properties,
address=(address or host).encode(),
port=port,
server=server,
)
@pytest.fixture
def zeroconf(mocker):
from zeroconf import Zeroconf
service_info = ServiceInfoFactory().create()
zeroconf_stub = mocker.stub(name='get_service_info')
zeroconf_stub.return_value = service_info
stub_object = Zeroconf()
stub_object.get_service_info = zeroconf_stub
return stub_object
@pytest.fixture
def zeroconf_without_service_info(mocker):
from zeroconf import Zeroconf
zeroconf_stub = mocker.stub(name='get_service_info')
zeroconf_stub.return_value = None
stub_object = Zeroconf()
stub_object.get_service_info = zeroconf_stub
return stub_object
def test_serviceDiscoveredUpdatesRegisteredServices(dns_sd, sd, zeroconf):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
sd.add_service(
zeroconf,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
assert service.ready is True
def test_serviceDisappearedUpdatesRegisteredServices(dns_sd, sd, zeroconf):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
sd.add_service(
zeroconf,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
sd.remove_service(
zeroconf,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
assert service.ready is False
def test_stoppingServiceDiscoveryResetsAllServices(dns_sd, sd, zeroconf):
service1 = dns_sd.Service(type_='halrcomp')
sd.register(service1)
service2 = dns_sd.Service(type_='halrcmd')
sd.register(service2)
sd.browser = object() # dummy
sd.add_service(
zeroconf,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
sd.stop()
assert service1.ready is False
assert service2.ready is False
def test_serviceDiscoveredWithoutServiceInfoDoesNotUpdateRegisteredServices(
dns_sd, sd, zeroconf_without_service_info
):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
sd.add_service(
zeroconf_without_service_info,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
assert service.ready is False
def test_serviceDisappearedWithoutServiceInfoDoesNotUpdateRegisteredServices(
dns_sd, sd, zeroconf_without_service_info
):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
service.ready = True
sd.remove_service(
zeroconf_without_service_info,
'_machinekit._tcp.local.',
'Foo on Bar 127.0.0.1._machinekit._tcp.local.',
)
assert service.ready is True
def test_serviceInfoSetsAllRelevantValuesOfService(dns_sd):
service = dns_sd.Service(type_='halrcomp')
service_info = ServiceInfoFactory().create(
name='Foo on Bar',
uuid=b'987654321',
version=5,
host='10.0.0.10',
protocol='tcp',
port=12456,
server='sandybox.local',
)
service.add_service_info(service_info)
assert service.uri == 'tcp://10.0.0.10:12456'
assert service.name == service_info.name
assert service.uuid == '987654321'
assert service.version == 5
assert service.host_name == 'sandybox.local'
assert service.host_address == '10.0.0.10'
def test_serviceInfoResolvesLocalHostnameIfMatched(dns_sd):
service = dns_sd.Service(type_='halrcomp')
service_info = ServiceInfoFactory().create(
host='sandybox.local',
protocol='tcp',
port=12456,
server='sandybox.local',
address='10.0.0.10',
)
service.add_service_info(service_info)
assert service.uri == 'tcp://10.0.0.10:12456'
def test_serviceInfoRetursRawUriIfHostnameIsNotMatched(dns_sd):
service = dns_sd.Service(type_='halrcomp')
service_info = ServiceInfoFactory().create(
host='thinkpad.local',
protocol='tcp',
port=12456,
server='sandybox.local',
address='10.0.0.10',
)
service.add_service_info(service_info)
assert service.uri == 'tcp://thinkpad.local:12456'
def test_serviceInfoWithIncompleteValuesIsIgnoredByService(dns_sd):
service = dns_sd.Service(type_='launcher')
service_info = ServiceInfoFactory().create(properties={})
service.add_service_info(service_info)
assert service.uri == ''
assert service.uuid == ''
assert service.version == b''
def test_removingServiceInfoResetsAllRelevantValuesOfService(dns_sd):
service = dns_sd.Service(type_='blahus')
service_info = ServiceInfoFactory().create()
service.add_service_info(service_info)
service.remove_service_info(service_info)
assert service.uri == ''
assert service.name == ''
assert service.uuid == ''
assert service.version == 0
assert service.host_name == ''
assert service.host_address == ''
def test_clearingServiceInfosResetsValuesOfService(dns_sd):
service = dns_sd.Service(type_='foobar')
service.add_service_info(ServiceInfoFactory().create())
service.add_service_info(ServiceInfoFactory().create())
service.clear_service_infos()
assert service.ready is False
assert service.uri == ''
def test_settingReadyPropertyOfServiceTriggersCallback(dns_sd):
cb_called = [False]
def cb(_):
cb_called[0] = True
service = dns_sd.Service(type_='halrcomp')
service.on_ready_changed.append(cb)
service_info = ServiceInfoFactory().create()
service.add_service_info(service_info)
assert cb_called[0] is True
def test_discoverableAddingServiceWorks(dns_sd):
discoverable = dns_sd.ServiceContainer()
service = dns_sd.Service(type_='foo')
discoverable.add_service(service)
assert service in discoverable.services
def test_discoverableAddingAnythingElseFails(dns_sd):
discoverable = dns_sd.ServiceContainer()
item = object()
try:
discoverable.add_service(item)
assert False
except TypeError:
assert True
assert item not in discoverable.services
def test_discoverableRemovingServiceWorks(dns_sd):
discoverable = dns_sd.ServiceContainer()
service = dns_sd.Service(type_='foo')
discoverable.add_service(service)
discoverable.remove_service(service)
assert service not in discoverable.services
def test_discoverableRemvoingAnythingElseFails(dns_sd):
discoverable = dns_sd.ServiceContainer()
item = object()
try:
discoverable.remove_service(item)
assert False
except TypeError:
assert True
assert item not in discoverable.services
def test_discoverableAllServicesReadySetServicesReady(dns_sd):
discoverable = dns_sd.ServiceContainer()
service1 = dns_sd.Service(type_='foo')
discoverable.add_service(service1)
service2 = dns_sd.Service(type_='bar')
discoverable.add_service(service2)
service1.ready = True
service2.ready = True
assert discoverable.services_ready is True
def test_discoverableNotAllServicesReadyUnsetsServicesReady(dns_sd):
discoverable = dns_sd.ServiceContainer()
service1 = dns_sd.Service(type_='foo')
discoverable.add_service(service1)
service2 = dns_sd.Service(type_='bar')
discoverable.add_service(service2)
service1.ready = True
service2.ready = True
service1.ready = False
assert discoverable.services_ready is False
def test_discoverableServicesReadyChangedCallsCallback(dns_sd):
cb_called = [False]
def cb(_):
cb_called[0] = True
discoverable = dns_sd.ServiceContainer()
discoverable.on_services_ready_changed.append(cb)
discoverable.services_ready = True
assert cb_called[0] is True
def test_serviceDiscoveryFilterAcceptCorrectUuid(dns_sd):
service_info = ServiceInfoFactory().create(uuid=b'987654321')
filter = dns_sd.ServiceDiscoveryFilter(txt_records={b'uuid': b'987654321'})
assert filter.matches_service_info(service_info) is True
def test_serviceDiscoveryFilterRejectWrongUuid(dns_sd):
service_info = ServiceInfoFactory().create(uuid=b'123456789')
filter = dns_sd.ServiceDiscoveryFilter(txt_records={b'uuid': b'987654321'})
assert filter.matches_service_info(service_info) is False
def test_serviceDiscoveryFilterAcceptFuzzyName(dns_sd):
service_info = ServiceInfoFactory().create(name='Hello World')
filter = dns_sd.ServiceDiscoveryFilter(name='Hello')
assert filter.matches_service_info(service_info) is True
def test_serviceDiscoveryFilterAcceptExactMatchingName(dns_sd):
service_info = ServiceInfoFactory().create(name='Foo')
filter = dns_sd.ServiceDiscoveryFilter(name='Foo')
assert filter.matches_service_info(service_info) is True
def test_serviceDiscoveryFilterRejectNonMatchingName(dns_sd):
service_info = ServiceInfoFactory().create(name='Carolus Rex')
filter = dns_sd.ServiceDiscoveryFilter(name='Adolfus Maximus')
assert filter.matches_service_info(service_info) is False
def test_serviceDiscoveryFilterPassingWrongObjectFails(dns_sd):
filter = dns_sd.ServiceDiscoveryFilter()
try:
filter.matches_service_info(object())
assert False
except TypeError:
assert True
def test_serviceDiscoveryFiltersOutDiscoveredServiceWithWrongUuid(dns_sd, sd, zeroconf):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
sd.filter = dns_sd.ServiceDiscoveryFilter(txt_records={b'uuid': b'87654321'})
sd.add_service(
zeroconf,
'_machinekit._tcp.local.',
'Machinekit on MyBox 12.0.0.1._machinekit._tcp.local.',
)
assert service.ready is False
def test_serviceDiscoveryFiltersInDiscoveredServiceWithCorrectUuid(
dns_sd, sd, zeroconf
):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
sd.filter = dns_sd.ServiceDiscoveryFilter(txt_records={b'uuid': b'12345678'})
sd.add_service(
zeroconf,
'_machinekit._tcp.local.',
'SuperPrint 192.168.7.2._machinekit._tcp.local.',
)
assert service.ready is True
def test_serviceDiscoveryFiltersInDisappearedServiceWithCorrectUuid(
dns_sd, sd, zeroconf
):
service = dns_sd.Service(type_='halrcomp')
sd.register(service)
sd.filter = dns_sd.ServiceDiscoveryFilter(txt_records={b'uuid': b'12345678'})
sd.add_service(
zeroconf,
'_machinekit._tcp.local.',
'SuperPrint 192.168.7.2._machinekit._tcp.local.',
)
sd.remove_service(
zeroconf,
'_machinekit._tcp.local.',
'SuperPrint 192.168.7.2._machinekit._tcp.local.',
)
assert service.ready is False
|
strahlex/pymachinetalk
|
pymachinetalk/tests/test_dns_sd.py
|
Python
|
mit
| 13,743
|
import typing
import twittback
TweetSequence = typing.Sequence[twittback.Tweet]
UserSequence = typing.Sequence[twittback.User]
|
dmerejkowsky/twittback
|
twittback/types.py
|
Python
|
mit
| 130
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
from scrapyproject.models import (Cinema, Showing, ShowingBooking, Movie,
db_connect, drop_table_if_exist,
create_table, Session)
from scrapyproject.items import (CinemaItem, ShowingItem, ShowingBookingItem,
MovieItem)
from scrapyproject.utils import (use_cinema_database,
use_showing_database,
use_movie_database)
class DataBasePipeline(object):
"""
pipeline to add item to database
will keep exist data if spider has attribute 'keep_old_data'
"""
def __init__(self, database):
self.database = database
# keep crawled movie to sum cinema count
self.crawled_movies = {}
@classmethod
def from_crawler(cls, crawler):
return cls(database=crawler.settings.get('DATABASE'))
def open_spider(self, spider):
engine = db_connect()
if not spider.keep_old_data:
# drop data
if use_showing_database(spider):
drop_table_if_exist(engine, ShowingBooking)
drop_table_if_exist(engine, Showing)
elif use_cinema_database(spider):
drop_table_if_exist(engine, Cinema)
elif use_movie_database(spider):
drop_table_if_exist(engine, Movie)
create_table(engine)
def close_spider(self, spider):
for title in self.crawled_movies:
self.process_movie_item(self.crawled_movies[title], spider)
# close global session when spider ends
Session.remove()
def process_item(self, item, spider):
"""
use cinema table if spider has attribute "use_cinema_database"
use showing table if spider has attribute "use_showing_database"
a spider should not have both attributes
"""
if isinstance(item, CinemaItem):
return self.process_cinema_item(item, spider)
elif isinstance(item, ShowingItem):
return self.process_showing_item(item, spider)
elif isinstance(item, ShowingBookingItem):
return self.process_showing_booking_item(item, spider)
elif isinstance(item, MovieItem):
# sum cinema count for each cinema
if item['title'] not in self.crawled_movies:
self.crawled_movies[item['title']] = item
else:
count = (item['current_cinema_count'] +
self.crawled_movies[item['title']]['current_cinema_count'])
self.crawled_movies[item['title']]['current_cinema_count'] = count
return item
def process_cinema_item(self, item, spider):
cinema = Cinema(**item)
exist_cinema = Cinema.get_cinema_if_exist(cinema)
if not exist_cinema:
# if data do not exist in database, add it
self.add_item_to_database(cinema)
else:
# otherwise check if it should be merged to exist record
# merge strategy:
# - if exist data is crawled from other source, only add names
# and screens to exist data;
# - if cinema do not have site url, item is treated as duplicate
# and dropped;
# - otherwise, merge all data
if cinema.source != exist_cinema.source:
# replace when new cinema data crawled more screens
if cinema.screen_count > exist_cinema.screen_count:
exist_cinema.merge(
cinema, merge_method=Cinema.MergeMethod.replace)
else:
exist_cinema.merge(
cinema, merge_method=Cinema.MergeMethod.info_only)
self.add_item_to_database(exist_cinema)
elif cinema.site:
exist_cinema.merge(
cinema, merge_method=Cinema.MergeMethod.update_count)
self.add_item_to_database(exist_cinema)
return item
def process_showing_item(self, item, spider):
showing = Showing(**item)
# if data do not exist in database, add it
if not Showing.get_showing_if_exist(showing):
self.add_item_to_database(showing)
return item
def process_showing_booking_item(self, item, spider):
showing_booking = ShowingBooking()
showing_booking.from_item(item)
# if showing exists use its id in database
exist_showing = Showing.get_showing_if_exist(showing_booking.showing)
if exist_showing:
old_showing = showing_booking.showing
showing_booking.showing = exist_showing
showing_booking.showing.title = old_showing.title
showing_booking.showing.title_en = old_showing.title_en
showing_booking.showing.start_time = old_showing.start_time
showing_booking.showing.end_time = old_showing.end_time
showing_booking.showing.cinema_name = old_showing.cinema_name
showing_booking.showing.cinema_site = old_showing.cinema_site
showing_booking.showing.screen = old_showing.screen
showing_booking.showing.seat_type = old_showing.seat_type
showing_booking.showing.total_seat_count = \
old_showing.total_seat_count
showing_booking.showing.source = old_showing.source
# then add self
self.add_item_to_database(showing_booking)
return item
def process_movie_item(self, item, spider):
movie = Movie(**item)
# if data do not exist in database, add it
if not Movie.get_movie_if_exist(movie):
self.add_item_to_database(movie)
return item
def add_item_to_database(self, db_item):
try:
db_item = Session.merge(db_item)
Session.commit()
except:
Session.rollback()
raise
|
gas1121/JapanCinemaStatusSpider
|
scrapyproject/pipelines.py
|
Python
|
mit
| 6,140
|
"""Test methods for `zcode/math/math_core.py`.
Can be run with:
$ nosetests math/tests/test_math_core.py
$ nosetests math/tests/test_math_core.py:TestMathCore.test_around
$ python math/tests/test_math_core.py
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import numpy as np
from numpy.testing import run_module_suite
import scipy as sp
import scipy.stats # noqa
from nose.tools import assert_true, assert_false, assert_equal, assert_raises, assert_almost_equal
from zcode.math import math_core, interpolate
class TestMathCore(object):
@classmethod
def setup_class(cls):
np.random.seed(9865)
cls.SIZE = 100
cls.r1 = np.random.random(cls.SIZE)
cls.r2 = np.random.uniform(-1.0, 1.0, size=cls.SIZE)
def test_argnearest_ordered(self):
from zcode.math.math_core import argnearest
edges = np.array([0.2, 0.8, 1.3, 1.5, 2.0, 3.1, 3.8, 3.9, 4.5, 5.1, 5.5])
vals = np.array([-1, 0.2, 1, 1.4, 2, 3, 4, 5, 5.5, 10])
correct = [0, 0, 1, 2, 4, 5, 7, 9, 10, 10]
retval = argnearest(edges, vals, assume_sorted=True)
assert_true(np.all(correct == retval))
print("Edges = {}".format(edges))
print("Vals = {}".format(vals))
print("retval = {}".format(retval))
print("correct = {}".format(correct))
return
def test_argnearest_ordered_left_right(self):
from zcode.math.math_core import argnearest
# 0 1 2 3 4 5 6 7 8 9 10
edges = np.array([0.2, 0.8, 1.3, 1.5, 2.0, 3.1, 3.8, 3.9, 4.5, 5.1, 5.5])
vals = np.array([-1, 0.2, 1, 1.4, 2, 3, 4, 5, 5.5, 10])
correct = np.array([-1, -1 , 1, 2 , 3, 4, 7, 8, 9 , 10])
print("LEFT")
retval = argnearest(edges, vals, assume_sorted=True, side='left')
print("Edges = {}".format(edges))
print("Vals = {}".format(vals))
print("retval = {}".format(retval))
print("correct = {}".format(correct))
print(correct == retval)
print(np.all(correct == retval))
assert_true(np.all(correct == retval))
correct += 1
for ee in edges:
correct[vals == ee] += 1
print("RIGHT")
retval = argnearest(edges, vals, assume_sorted=True, side='right')
print("Edges = {}".format(edges))
print("Vals = {}".format(vals))
print("retval = {}".format(retval))
print("correct = {}".format(correct))
assert_true(np.all(correct == retval))
return
def test_argnearest_unordered_x(self):
from zcode.math.math_core import argnearest
edges = np.array([0.2, 0.8, 1.3, 1.5, 2.0, 3.1, 3.8, 3.9, 4.5, 5.1, 5.5])
vals = np.array([-1, 0.2, 1, 1.4, 2, 3, 4, 5, 5.5, 10])
correct = np.array([2, 2, 8, 7, 0, 6, 9, 3, 4, 4])
# ix = np.random.permutation(edges.size)
ix = np.array([4, 3, 0, 9, 10, 8, 5, 2, 1, 7, 6])
edges = edges[ix]
retval = argnearest(edges, vals, assume_sorted=False)
print("Edges = {}".format(edges))
print("Vals = {}".format(vals))
print("retval = {}".format(retval))
print("nearest = {}".format(edges[retval]))
print("Vals = {}".format(vals))
print("retval = {}".format(retval))
print("correct = {}".format(correct))
assert_true(np.all(correct == retval))
return
def test_argnearest_unordered_xy(self):
from zcode.math.math_core import argnearest
edges = np.array([0.2, 0.8, 1.3, 1.5, 2.0, 3.1, 3.8, 3.9, 4.5, 5.1, 5.5])
vals = np.array([-1, 0.2, 1, 1.4, 2, 3, 4, 5, 5.5, 10])
correct = np.array([0, 7, 6, 3, 4, 9, 2, 4, 2, 8])
# ix = np.random.permutation(edges.size)
ix = np.array([4, 3, 0, 9, 10, 8, 5, 2, 1, 7, 6])
edges = edges[ix]
iy = np.array([4, 3, 5, 7, 9, 6, 0, 8, 1, 2])
vals = vals[iy]
retval = argnearest(edges, vals, assume_sorted=False)
print("Edges = {}".format(edges))
print("Vals = {}".format(vals))
print("retval = {}".format(retval))
print("nearest = {}".format(edges[retval]))
print("Vals = {}".format(vals))
print("retval = {}".format(retval))
print("correct = {}".format(correct))
assert_true(np.all(correct == retval))
return
def test_spacing(self):
from zcode.math.math_core import spacing
# Linear Spacing
ref1 = np.linspace(0.0, 1.0, num=20)
spc1 = spacing([0.0, 1.0], scale='lin', num=20)
assert_true(np.allclose(ref1, spc1))
# Logarithmic Spacing
ref2 = np.logspace(0.0, 2.5, num=20)
spc2 = spacing([np.power(10.0, 0.0), np.power(10.0, 2.5)], scale='log', num=20)
assert_true(np.allclose(ref2, spc2))
# Automatically selects appropriate Range
ref3 = np.logspace(1.0, 2.0, num=13)
spc3 = spacing([-10.0, 100.0, 0.0, 10.0], scale='log', num=13)
assert_true(np.allclose(ref3, spc3))
# Manually selects appropraite range
ref4 = np.linspace(-5.0, -2.5, num=27)
spc4 = spacing([3.0, -2.5, -5.0, 0.0], scale='lin', num=27, filter='<')
assert_true(np.allclose(ref4, spc4))
# Only integral (whole number) values
# log spacing
vals = [2.34, 365.23]
res = np.array([2., 3., 4., 5., 6., 7., 8., 9., 10.,
20., 30., 40., 50., 60., 70., 80., 90., 100.,
200., 300., 400.])
retvals = spacing(vals, 'log', integers=True)
print("integers, log\n", vals, "\n\t", res, "\n\t", retvals)
print(retvals)
print(np.allclose(retvals, res))
assert_true(np.allclose(retvals, res))
# lin spacing
vals = [2.34, 11.23]
res = np.arange(2, 13)
retvals = spacing(vals, 'lin', integers=True)
print("integers, lin\n", vals, "\n\t", res, "\n\t", retvals)
print(np.allclose(retvals, res))
assert_true(np.allclose(retvals, res))
return
def test_mono(self):
arr_g = [-1.0, 1.0, 2.0, 3.0]
arr_ge = [-1.0, 1.0, 1.0, 2.0, 2.5]
arr_l = [11.5, 9.2, -2.0, -301.0]
arr_le = [11.5, 9.2, -2.0, -2.0, -301.0]
arr_e = 11*[1.0]
assert_true(math_core.mono(arr_g, 'g'))
assert_true(math_core.mono(arr_ge, 'ge'))
assert_true(math_core.mono(arr_g, 'ge'))
assert_false(math_core.mono(arr_ge, 'g'))
assert_true(math_core.mono(arr_l, 'l'))
assert_true(math_core.mono(arr_le, 'le'))
assert_true(math_core.mono(arr_l, 'le'))
assert_false(math_core.mono(arr_le, 'l'))
assert_true(math_core.mono(arr_e, 'e'))
assert_false(math_core.mono(arr_le, 'e'))
def test_ordered_groups(self):
arr = np.array([99, 77, 14, 21, 71, 64, 98, 38, 66, 25])
sinds = np.argsort(arr)
targets = [40, 77]
print("arr = {}, targets = {}, sorted arr = {}".format(arr, targets, arr[sinds]))
# Group into elements below targets
# Exclusively
print("Below, exclusive:")
locs, isort = math_core.ordered_groups(arr, targets, inds=None, dir='b', include=False)
assert_true(np.all(sinds == isort))
# Check subsets from each target location
for ll, tt in zip(locs, targets):
print("target = {}, loc = {}".format(tt, ll))
print(set(arr[isort[:ll]]), set(arr[sinds][arr[sinds] < tt]))
assert_true(set(arr[isort[:ll]]) == set(arr[sinds][arr[sinds] < tt]))
# Inclusively
print("Below, inclusive:")
locs, isort = math_core.ordered_groups(arr, targets, inds=None, dir='b', include=True)
assert_true(np.all(sinds == isort))
# Check subsets from each target location
for ll, tt in zip(locs, targets):
print("target = {}, loc = {}".format(tt, ll))
print(set(arr[isort[:ll]]), set(arr[sinds][arr[sinds] <= tt]))
assert_true(set(arr[isort[:ll]]) == set(arr[sinds][arr[sinds] <= tt]))
# Group into elements above targets
# Exclusive
print("Above, exclusive:")
locs, isort = math_core.ordered_groups(arr, targets, inds=None, dir='a', include=False)
assert_true(np.all(sinds[::-1] == isort))
# Check subsets from each target location
for ll, tt in zip(locs, targets):
print("target = {}, loc = {}".format(tt, ll))
print(set(arr[isort[:ll]]), set(arr[sinds][arr[sinds] > tt]))
assert_true(set(arr[isort[:ll]]) == set(arr[sinds][arr[sinds] > tt]))
# Exclusive
print("Above, inclusive:")
locs, isort = math_core.ordered_groups(arr, targets, inds=None, dir='a', include=True)
assert_true(np.all(sinds[::-1] == isort))
# Check subsets from each target location
for ll, tt in zip(locs, targets):
print("target = {}, loc = {}".format(tt, ll))
print(set(arr[isort[:ll]]), set(arr[sinds][arr[sinds] >= tt]))
assert_true(set(arr[isort[:ll]]) == set(arr[sinds][arr[sinds] >= tt]))
# Should raise error for unsorted `targets`
assert_raises(ValueError, math_core.ordered_groups, arr, targets[::-1])
# Should raise error for `dir` not starting with 'a' or 'b'
assert_raises(ValueError, math_core.ordered_groups, arr, targets, None, 'c')
return
def test_really1d(self):
from zcode.math import really1d
assert_true(really1d([1, 2, 3]))
assert_true(really1d([1]))
assert_true(really1d([]))
assert_true(really1d(np.arange(10)))
assert_false(really1d(1))
assert_false(really1d([[1]]))
assert_false(really1d([[1, 2], [2, 3]]))
assert_false(really1d([[1, 2, 3], [4, 5]]))
assert_false(really1d(np.random.random((4, 3))))
assert_false(really1d([[]]))
def test_argextrema(self):
# Basic usage without filtering
assert_equal(math_core.argextrema([-1, -5, 2, 10], 'min'), 1)
assert_equal(math_core.argextrema([-1, -5, 2, 10], 'max'), 3)
# Filtering
# min
assert_equal(math_core.argextrema([-1, -5, 2, 10, 0], 'min', 'g'), 2)
assert_equal(math_core.argextrema([-1, -5, 2, 10, 0], 'min', 'ge'), 4)
assert_equal(math_core.argextrema([-1, -5, 0, 2, 10], 'min', 'l'), 1)
assert_equal(math_core.argextrema([-1, -5, 0, 2, 10], 'min', 'le'), 1)
# max
assert_equal(math_core.argextrema([-1, -5, 2, 10, 0], 'max', 'g'), 3)
assert_equal(math_core.argextrema([-1, -5, 2, 10, 0], 'max', 'ge'), 3)
assert_equal(math_core.argextrema([-1, -5, 0, 2, 10], 'max', 'l'), 0)
assert_equal(math_core.argextrema([-1, -5, 0, 2, 10], 'max', 'le'), 2)
# Raises appropriate errors
# Incorrect shape input array
assert_raises(ValueError, math_core.argextrema, np.arange(4).reshape(2, 2), 'max')
assert_raises(ValueError, math_core.argextrema, 0.0, 'max')
# Invalid `type` argument
assert_raises(ValueError, math_core.argextrema, [1, 2], 'mex')
# Invalid `filter` argument
assert_raises(ValueError, math_core.argextrema, [1, 2], 'max', 'e')
# Invalid `filter` argument
assert_raises(ValueError, math_core.argextrema, [1, 2], 'max', 'greater')
def test_asBinEdges_1d(self):
print("TestMathCore.test_asBinEdges_1d")
from zcode.math import asBinEdges, spacing
data_1d = np.random.random(40)
bins_1d = np.arange(20)
# Preserves valid bins
assert_true(np.allclose(bins_1d, asBinEdges(bins_1d, data_1d)))
# Constructs valid bins
# lin
lin_1d = spacing(data_1d, scale='lin', num=8+1)
lin_edges_1d = asBinEdges(8, data_1d, scale='lin')
assert_true(np.allclose(lin_1d, lin_edges_1d))
# log
log_1d = spacing(data_1d, scale='log', num=7+1)
log_edges_1d = asBinEdges(7, data_1d, scale='log')
assert_true(np.allclose(log_1d, log_edges_1d))
# Raises appropriate errors
data_2d = data_1d.reshape(8, 5)
bins_2d = bins_1d.reshape(4, 5)
# 1D bins, 2D data
assert_raises(ValueError, asBinEdges, bins_1d, data_2d)
# 2D bins, 1D data
assert_raises(ValueError, asBinEdges, bins_2d, data_1d)
def test_asBinEdges_nd(self):
print("TestMathCore.test_asBinEdges_nd")
from zcode.math import asBinEdges
data_2d = np.random.random((8, 2))
bins_2d = np.arange(8).reshape(2, 4)
bins_2d2 = [[0.0, 1.0], [0.0, 0.5, 1.0]]
# Preserves valid bins
edges_2d = asBinEdges(bins_2d, data_2d)
assert_true(np.allclose(bins_2d, edges_2d))
edges_2d2 = asBinEdges(bins_2d2, data_2d)
assert_true(np.allclose(bins_2d2[0], edges_2d2[0]))
assert_true(np.allclose(bins_2d2[1], edges_2d2[1]))
# Constructs valid bins
# lin
lin_2d1 = sp.stats.binned_statistic_dd(data_2d, None, 'count', bins=4).bin_edges
lin_edges_2d1 = asBinEdges(4, data_2d, scale='lin')
assert_true(np.allclose(lin_2d1, lin_edges_2d1))
lin_2d2 = sp.stats.binned_statistic_dd(data_2d, None, 'count', bins=[4, 3]).bin_edges
lin_edges_2d2 = asBinEdges([4, 3], data_2d, scale='lin')
assert_true(np.allclose(lin_2d2[0], lin_edges_2d2[0]))
assert_true(np.allclose(lin_2d2[1], lin_edges_2d2[1]))
# Raises appropriate errors
# 1D bins, 2D data
assert_raises(ValueError, asBinEdges, [4], data_2d)
# 2D bins, 1D data
assert_raises(ValueError, asBinEdges, [4, 3, 2], data_2d)
def test_comparison_function(self):
from zcode.math.math_core import _comparison_function
comp = ['g', '>']
arr = [0.5, 1.5, -0.5, 0.0]
res = [True, True, False, False]
for cc in comp:
func = _comparison_function(cc, value=0.0)
assert_true(np.all(np.equal(func(arr), res)))
comp = ['ge', '>=']
arr = [0.5, 1.5, -0.5, 0.0]
res = [True, True, False, True]
for cc in comp:
func = _comparison_function(cc, value=0.0)
assert_true(np.all(np.equal(func(arr), res)))
comp = ['l', '<']
arr = [-10.5, -1.5, 0.5, 0.0]
res = [True, True, False, False]
for cc in comp:
func = _comparison_function(cc, value=0.0)
assert_true(np.all(np.equal(func(arr), res)))
comp = ['le', '<=']
arr = [-10.5, -1.5, 0.5, 0.0]
res = [True, True, False, True]
for cc in comp:
func = _comparison_function(cc, value=0.0)
assert_true(np.all(np.equal(func(arr), res)))
comp = ['e', '=', '==']
arr = [-10.5, 0.5, 0.0]
res = [False, False, True]
for cc in comp:
func = _comparison_function(cc, value=0.0)
assert_true(np.all(np.equal(func(arr), res)))
comp = ['ne', '!=']
arr = [-10.5, 0.5, 0.0]
res = [True, True, False]
for cc in comp:
func = _comparison_function(cc, value=0.0)
assert_true(np.all(np.equal(func(arr), res)))
return
def test_comparison_filter(self):
from zcode.math.math_core import comparison_filter
comp = ['g', '>']
arr = [0.5, -1.0, 1.5, -0.5, 0.0]
res = [0.5, 1.5]
inds = [0, 2]
arr = np.array(arr)
for cc in comp:
vals = comparison_filter(arr, cc, value=0.0)
assert_true(np.all(np.equal(vals, res)))
val_inds = comparison_filter(arr, cc, inds=True, value=0.0)
assert_true(np.all(np.equal(arr[val_inds], arr[inds])))
comp = ['le', '<=']
arr = [0.5, -1.0, 1.5, -0.5, 0.0]
res = [-1.0, -0.5, 0.0]
inds = [1, 3, 4]
arr = np.array(arr)
for cc in comp:
vals = comparison_filter(arr, cc, value=0.0)
assert_true(np.all(np.equal(vals, res)))
vals = comparison_filter(arr, cc, inds=True, value=0.0)
assert_true(np.all(np.equal(arr[vals], arr[inds])))
return
def test_around(self):
from zcode.math.math_core import around
vals = [
# Nearest
# linear
[[123.4678, 0, 'lin', 'near'], 123.00],
[[123.4678, 1, 'linear', 'nearest'], 123.50],
[[123.4678, 2, 'lin', 'n'], 123.47],
# logarithmic
[[123.4678, 0, 'log', 'nearest'], 100.0],
[[123.4678, 1, 'logarithmic', 'nearest'], 120.0],
[[123.4678, 2, 'log', 'nearest'], 123.0],
[[123.4678, 3, 'log', 'nearest'], 123.5],
# Negative decimals (order-of-magnitude rounding)
[[213.4678, -1, 'log', 'nearest'], 100.0],
# Ceiling (up)
# linear
[[123.4678, 0, 'lin', 'c'], 124.0],
[[123.4678, 1, 'linear', 'ceiling'], 123.5],
[[123.4678, 2, 'lin', 'ceil'], 123.47],
# logarithmic
[[123.4678, 0, 'log', 'c'], 200.0],
[[123.4678, 1, 'logarithmic', 'c'], 130.0],
[[123.4678, 2, 'log', 'c'], 124.0],
[[123.4678, 3, 'log', 'c'], 123.5],
# Negative decimals (order-of-magnitude rounding)
[[213.4678, -1, 'log', 'c'], 1000.0],
# Floor (down)
# linear
[[123.4678, 0, 'lin', 'f'], 123.0],
[[123.4678, 1, 'linear', 'fl'], 123.4],
[[123.4678, 2, 'lin', 'floor'], 123.46],
# logarithmic
[[123.4678, 0, 'log', 'f'], 100.0],
[[123.4678, 1, 'logarithmic', 'f'], 120.0],
[[123.4678, 2, 'log', 'f'], 123.0],
[[123.4678, 3, 'log', 'f'], 123.4],
# Negative decimals (order-of-magnitude rounding)
[[213.4678, -1, 'log', 'f'], 100.0],
]
for vv in vals:
print(vv)
res = around(*vv[0])
print("\t", res)
assert_true(np.isclose(vv[1], res))
# Invalid 'scaling'
assert_raises(ValueError, around, 1234.567, 1, 'symlog', 'n')
# Invalid 'dir'ection
assert_raises(ValueError, around, 1234.567, 1, 'log', 'm')
return
def test_str_array(self):
from zcode.math.math_core import str_array
print("TestMathCore.test_str_array()")
arr = np.linspace(0, 10.0, 6)
correct = '[0.00, 2.00, 4.00, 6.00, 8.00, 10.00]'
sa = str_array(arr)
print("'({})' ==> '{}', should be '{}'".format(arr, sa, correct))
assert_true(sa == correct)
sa = str_array(arr, (2, 2))
print("'({}, (2, 2))' ==> '{}'".format(arr, sa))
assert_true(sa == '[0.00, 2.00... 8.00, 10.00]')
sa = str_array(arr, None)
print("'({}, None)' ==> '{}'".format(arr, sa))
assert_true(sa == '[0.00, 2.00, 4.00, 6.00, 8.00, 10.00]')
sa = str_array(arr, 1)
print("'({}, 1)' ==> '{}'".format(arr, sa))
assert_true(sa == '[0.00... 10.00]')
sa = str_array(arr, (1, 3))
print("'({}, (1, 3))' ==> '{}'".format(arr, sa))
assert_true(sa == '[0.00... 6.00, 8.00, 10.00]')
sa = str_array(arr, (12, 10))
print("'({}, (12, 10))' ==> '{}'".format(arr, sa))
assert_true(sa == '[0.00, 2.00, 4.00, 6.00, 8.00, 10.00]')
sa = str_array(arr, (2, 1), delim=' ')
print("'({}, (2, 1), delim=' ')' ==> '{}'".format(arr, sa))
assert_true(sa == '[0.00 2.00... 10.00]')
sa = str_array(arr, (2, 1), format=':.1e')
print("'({}, (2, 1), format=':.1e')' ==> '{}'".format(arr, sa))
assert_true(sa == '[0.0e+00, 2.0e+00... 1.0e+01]')
return
def test_broadcast(self):
from zcode.math.math_core import broadcast
def check_in_ot(din, check):
dot = broadcast(*din)
print("input: {}".format(din))
print("output: {} ({})".format(dot, check))
assert_true(np.all([dd == cc for dd, cc in zip(dot, check)]))
assert_true(np.all([np.shape(dd) == np.shape(cc) for dd, cc in zip(dot, check)]))
return
# Normal broadcast (1,) (2,) ==> (2,) (2,)
din = [[1.0], [2.0, 3.0]]
check = [[[1.0, 1.0]], [[2.0, 3.0]]]
check_in_ot(din, check)
# Scalar-only broadcast () () ==> () ()
din = [1.0, 2.0]
check = din
check_in_ot(din, check)
# Mixed scalar and array
din = [1.5, [1.0, 2.0], [1.0, 2.0, 3.0]]
check = [
[[1.5, 1.5, 1.5], [1.5, 1.5, 1.5]],
[[1.0, 1.0, 1.0], [2.0, 2.0, 2.0]],
[[1.0, 2.0, 3.0], [1.0, 2.0, 3.0]]
]
check_in_ot(din, check)
din = [[1.0], [2.0, 3.0]]
check = [[[1.0, 1.0]], [[2.0, 3.0]]]
dot = broadcast(*din)
check_in_ot(din, check)
sh_in = np.random.randint(1, 5, 3)
sh_ot = [sh_in for ii in range(len(sh_in))]
din = [np.random.normal(size=sh) for sh in sh_in]
dot = broadcast(*din)
print("Input shapes: '{}'".format(sh_in))
print("Output shapes: '{}' ({})".format([dd.shape for dd in dot], sh_ot))
assert_true(np.all([dd.shape == sh for dd, sh in zip(dot, sh_ot)]))
return
class Test_Interp(object):
def test_interp_lin_lin(self):
print("\ntest_interp_lin_lin()")
kw = dict(xlog=False, ylog=False, valid=False, left=np.nan, right=100.0)
xo = [1.0, 2.0, 3.0]
yo = [10.0, 20.0, 30.0]
tests = [1.5, 2.5, 0.5, 3.5]
truth = [15.0, 25.0, np.nan, 100.0]
for xx, zz in zip(tests, truth):
yy = interpolate.interp(xx, xo, yo, **kw)
print("{} ==> {}, should be {}".format(xx, yy, zz))
if np.isnan(zz):
assert_true(np.isnan(yy))
else:
assert_almost_equal(yy, zz)
return
def test_interp_lin_log(self):
print("\ntest_interp_lin_log()")
kw = dict(xlog=False, ylog=True, valid=False, left=100.0, right=np.nan)
xo = [1.0, 2.0, 3.0]
yo = [1.0e1, 1.0e3, 1.0e5]
tests = [1.5, 2.5, 0.5, 3.5]
truth = [1.0e2, 1.0e4, 100.0, np.nan]
for xx, zz in zip(tests, truth):
yy = interpolate.interp(xx, xo, yo, **kw)
print("{} ==> {}, should be {}".format(xx, yy, zz))
if np.isnan(zz):
assert_true(np.isnan(yy))
else:
assert_almost_equal(yy, zz)
return
def test_interp_log_lin(self):
print("\ntest_interp_log_lin()")
kw = dict(xlog=True, ylog=False, valid=False, left=100.0, right=np.nan)
xo = [2.0e-5, 2.0e-3, 2.0e-1]
yo = [-10.0, -20.0, -30.0]
tests = [2.0e-4, 2.0e-2, 1.0e-8, 1.0e8]
truth = [-15.0, -25.0, 100.0, np.nan]
for xx, zz in zip(tests, truth):
yy = interpolate.interp(xx, xo, yo, **kw)
print("{} ==> {}, should be {}".format(xx, yy, zz))
if np.isnan(zz):
assert_true(np.isnan(yy))
else:
assert_almost_equal(yy, zz)
return
def test_interp_log_log(self):
print("\ntest_interp_log_log()")
kw = dict(xlog=True, ylog=True, valid=False, left=np.nan, right=100.0)
xo = [1.0e-1, 1.0e1, 1.0e5]
yo = [3.0e0, 3.0e-2, 3.0e6]
tests = [1.0, 1.0e3, 1.0e-8, 1.0e8]
truth = [3.0e-1, 3.0e2, np.nan, 100.0]
for xx, zz in zip(tests, truth):
yy = interpolate.interp(xx, xo, yo, **kw)
print("{} ==> {}, should be {}".format(xx, yy, zz))
if np.isnan(zz):
assert_true(np.isnan(yy))
else:
assert_almost_equal(yy, zz)
return
class Test_Interp_Func_Linear(object):
KW = dict(kind='linear', bounds_error=False)
def test_interp_func(self):
print("\n|test_interp_func()|")
options = [True, False]
TRIES = 10
SAMPS = 40
TESTS = 100
LOG_RANGE = [-8.0, 8.0]
for xlog in options:
for ylog in options:
kw = dict(xlog=xlog, ylog=ylog)
print("xlog = {}, ylog = {}".format(xlog, ylog))
for kk in range(TRIES):
xo = np.random.uniform(*LOG_RANGE, SAMPS)
xo = np.sort(xo)
yo = np.random.uniform(*LOG_RANGE, SAMPS)
xx = np.random.uniform(*math_core.minmax(xo), TESTS)
if xlog:
xo = np.power(10.0, xo)
xx = np.power(10.0, xx)
if ylog:
yo = np.power(10.0, yo)
y1 = interpolate.interp(xx, xo, yo, valid=False, **kw)
y2 = interpolate.interp_func(xo, yo, kind='linear', bounds_error=False, **kw)(xx)
assert_true(np.allclose(y1, y2))
return
def test_interp_func_lin_lin(self):
print("\n|test_interp_func_lin_lin()|")
kw = dict(xlog=False, ylog=False, fill_value=(np.nan, 100.0))
kw.update(self.KW)
xo = [1.0, 2.0, 3.0]
yo = [10.0, 20.0, 30.0]
tests = [1.5, 2.5, 0.5, 3.5]
truth = [15.0, 25.0, np.nan, 100.0]
for xx, zz in zip(tests, truth):
yy = interpolate.interp_func(xo, yo, **kw)(xx)
print("{} ==> {}, should be {}".format(xx, yy, zz))
if np.isnan(zz):
assert_true(np.isnan(yy))
else:
assert_almost_equal(yy, zz)
return
def test_interp_func_lin_log(self):
print("\n|test_interp_func_lin_log()|")
kw = dict(xlog=False, ylog=True, fill_value=(100.0, np.nan))
kw.update(self.KW)
xo = [1.0, 2.0, 3.0]
yo = [1.0e1, 1.0e3, 1.0e5]
tests = [1.5, 2.5, 0.5, 3.5]
truth = [1.0e2, 1.0e4, 100.0, np.nan]
for xx, zz in zip(tests, truth):
yy = interpolate.interp_func(xo, yo, **kw)(xx)
print("{} ==> {}, should be {}".format(xx, yy, zz))
if np.isnan(zz):
assert_true(np.isnan(yy))
else:
assert_almost_equal(yy, zz)
return
def test_interp_func_log_lin(self):
print("\n|test_interp_func_log_lin()|")
kw = dict(xlog=True, ylog=False, fill_value=(100.0, np.nan))
kw.update(self.KW)
xo = [2.0e-5, 2.0e-3, 2.0e-1]
yo = [-10.0, -20.0, -30.0]
tests = [2.0e-4, 2.0e-2, 1.0e-8, 1.0e8]
truth = [-15.0, -25.0, 100.0, np.nan]
for xx, zz in zip(tests, truth):
yy = interpolate.interp_func(xo, yo, **kw)(xx)
print("{} ==> {}, should be {}".format(xx, yy, zz))
if np.isnan(zz):
assert_true(np.isnan(yy))
else:
assert_almost_equal(yy, zz)
return
def test_interp_func_log_log(self):
print("\n|test_interp_func_log_log()|")
kw = dict(xlog=True, ylog=True, fill_value=(np.nan, 100.0))
kw.update(self.KW)
xo = [1.0e-1, 1.0e1, 1.0e5]
yo = [3.0e0, 3.0e-2, 3.0e6]
tests = [1.0, 1.0e3, 1.0e-8, 1.0e8]
truth = [3.0e-1, 3.0e2, np.nan, 100.0]
for xx, zz in zip(tests, truth):
yy = interpolate.interp_func(xo, yo, **kw)(xx)
print("{} ==> {}, should be {}".format(xx, yy, zz))
if np.isnan(zz):
assert_true(np.isnan(yy))
else:
assert_almost_equal(yy, zz)
return
class Test_Interp_Func_Mono(object):
KW = dict(kind='mono')
def test_interp_func(self):
print("\n|test_interp_func()|")
xo = [0.1, 1.0, 2.0, 3.0, 4.0, 5.0]
yo = [100.0, 100.0, 90.0, 0.1, 2.0, 2.0]
NUM = len(xo)
xn = np.linspace(xo[1], xo[-2], 1000)
def test_within(xx, yy):
vals = []
for ii in range(NUM-1):
xl = xo[ii]
xh = xo[ii+1]
yl = yo[ii]
yh = yo[ii+1]
inds = (xl <= xx) & (xx <= xh)
rv1 = math_core.within(yy[inds], [yl, yh], all=True, close=True)
rv2 = math_core.mono(yy[inds], 'ge') or math_core.mono(yy[inds], 'le')
rv = (rv1 and rv2)
vals.append(rv)
return np.all(vals)
options = [True, False]
for xlog in options:
for ylog in options:
func = interpolate.interp_func(xo, yo, xlog=xlog, ylog=ylog, kind='mono')
yn = func(xn)
print("xlog = {}, ylog = {}".format(xlog, ylog))
assert_true(test_within(xn, yn))
# 'cubic' should be NON-monotonic, make sure test shows that
func = interpolate.interp_func(xo, yo, xlog=xlog, ylog=ylog, kind='cubic')
yn = func(xn)
assert_false(test_within(xn, yn))
return
class Test_Edges_From_Cents(object):
def test_lin_spacing(self):
print("\n|test_lin_spacing()|")
edges_true = [
np.linspace(0.0, 1.0, 20),
np.linspace(1.0, 0.0, 20),
np.linspace(-100, 100, 100)
]
for true in edges_true:
cents = math_core.midpoints(true, log=False)
edges = math_core.edges_from_cents(cents, log=False)
print("truth = {}".format(math_core.str_array(true)))
print("recov = {}".format(math_core.str_array(edges)))
assert_true(np.allclose(edges, true))
return
def test_log_spacing(self):
print("\n|test_log_spacing()|")
true_pars = [
[0.0, 1.0, 20],
[1.0, 0.0, 20],
[2.0, -2.0, 100]
]
for pars in true_pars:
true = np.logspace(*pars)
cents = math_core.midpoints(true, log=True)
edges = math_core.edges_from_cents(cents, log=True)
print("pars = ", pars)
print("truth = {}".format(math_core.str_array(true)))
print("recov = {}".format(math_core.str_array(edges)))
assert_true(np.allclose(edges, true))
return
def test_irr_spacing(self):
print("\n|test_irr_spacing()|")
NUM = 10
xx = np.arange(NUM)
widths = 1.5 + 0.4*xx + 0.1*(xx**2)
true = np.zeros(NUM+1)
true[0] = 4.0
for ii in range(1, NUM+1):
true[ii] = true[ii-1] + widths[ii-1]
cents = math_core.midpoints(true, log=False)
edges = math_core.edges_from_cents(cents, log=False)
print("truth = {}".format(math_core.str_array(true)))
print("recov = {}".format(math_core.str_array(edges)))
assert_true(np.allclose(edges, true, rtol=1e-1))
return
# Run all methods as if with `nosetests ...`
if __name__ == "__main__":
run_module_suite()
|
lzkelley/zcode
|
zcode/math/tests/test_math_core.py
|
Python
|
mit
| 31,267
|
'''
Unit tests for MergeTracker.py
Verification tracking of which comps have been merged already
works as expected and produces valid models.
'''
import numpy as np
import unittest
from bnpy.learnalg import MergeTracker
class TestMergeTracker(unittest.TestCase):
def shortDescription(self):
return None
def setUp(self):
pass
def test_recordMergeResult_assertRaisesOnRepeatPair(self):
MT = MergeTracker(4)
print MT.excludeList
MT.recordResult(0, 1, True)
with self.assertRaises(AssertionError):
MT.recordResult(0, 1, True)
def test_recordMergeResult_assertRaisesWhenCompAlreadyPartOfMerge(self):
MT = MergeTracker(4)
print MT.excludeList
MT.recordResult(2, 3, True)
with self.assertRaises(AssertionError):
MT.recordResult(0, 2, False)
with self.assertRaises(AssertionError):
MT.recordResult(1, 2, False)
def test_recordMergeResult_assertRaisesOnRepeatPair2(self):
MT = MergeTracker(6)
MT.recordResult(0, 1, False)
MT.recordResult(0, 2, False)
MT.recordResult(0, 3, False)
MT.recordResult(0, 4, True)
MT.recordResult(1, 2, True)
assert len(MT.excludePairs[1]) == MT.K
with self.assertRaises(AssertionError):
MT.recordResult(1, 2, False)
def test_recordMergeResult(self):
MT = MergeTracker(6)
MT.recordResult(0, 1, False)
MT.recordResult(0, 2, False)
MT.recordResult(0, 3, False)
assert len(MT.excludeList) == 0
MT.recordResult(0, 4, True)
assert 0 in MT.excludeList
assert 1 not in MT.excludeList
MT.recordResult(1, 2, True)
assert 1 in MT.excludeList
assert 2 not in MT.excludeList
MT.recordResult(2, 3, True)
assert 2 in MT.excludeList
assert MT.K == 3
assert MT.OrigK == 6
assert (0,4) in MT.acceptedOrigIDs
assert (1,2) in MT.acceptedOrigIDs
assert (3,5) in MT.acceptedOrigIDs
def test_synchronize_catch_former_bug1(self):
''' Given un-synched excludeList and excludePairs,
verify that the synchronization will discover (correctly)
that no pairs are left
This prevents relapse of a bug captured in Jan 2013
'''
MT = MergeTracker(6)
MT.excludeList = set([0, 2, 1, 4])
MT.excludePairs[0] = set([0, 1, 2, 3, 4, 5])
MT.excludePairs[1] = set([0, 1, 2, 3, 4, 5])
MT.excludePairs[2] = set([0, 1, 2, 3, 4, 5])
MT.excludePairs[3] = set([0, 1, 2, 3, 5])
MT.excludePairs[4] = set([0, 1, 2, 3, 4, 5])
MT.excludePairs[5] = set([0, 1, 2, 3, 5])
MT._synchronize_and_verify()
for k in range(6):
assert k in MT.excludeList
assert not MT.hasAvailablePairs()
def test_synchronize_catch_former_bug2(self):
''' Given un-synched excludeList and excludePairs,
verify that the synchronization will discover (correctly)
that no pairs are left
This prevents relapse of a bug captured in Jan 2013
'''
MT = MergeTracker(6)
MT.excludeList = set([1, 4, 2, 3])
MT.excludePairs[0] = set([0, 1, 3, 4, 5])
MT.excludePairs[1] = set([0, 1, 2, 3, 4, 5])
MT.excludePairs[2] = set([0, 1, 2, 3, 4, 5])
MT.excludePairs[3] = set([0, 1, 2, 3, 4, 5])
MT.excludePairs[4] = set([0, 1, 2, 3, 4, 5])
MT.excludePairs[5] = set([0, 1, 3, 4, 5])
MT._synchronize_and_verify()
for k in range(6):
assert k in MT.excludeList
assert not MT.hasAvailablePairs()
def test_synchronize_catch_former_bug3(self):
'''
This prevents relapse of a bug captured in Jan 2013
'''
MT = MergeTracker(7)
MT.excludeList = set([3, 0, 2, 6])
MT.excludePairs[0] = set([0, 1, 2, 3, 4, 5, 6])
MT.excludePairs[1] = set([0, 1, 2, 3, 5])
MT.excludePairs[2] = set([0, 1, 2, 3, 4, 5, 6])
MT.excludePairs[3] = set([0, 1, 2, 3, 4, 5, 6])
MT.excludePairs[4] = set([0, 2, 3, 4, 5])
MT.excludePairs[5] = set([0, 1, 2, 3, 4, 5])
MT.excludePairs[6] = set([0, 1, 2, 3, 4, 5, 6])
MT._synchronize_and_verify()
assert 1 in MT.getAvailableComps()
assert 4 in MT.getAvailableComps()
assert 5 in MT.excludePairs[1]
assert 1 in MT.excludePairs[5]
assert 6 in MT.excludePairs[4]
assert 6 in MT.excludePairs[1]
|
daeilkim/refinery
|
refinery/bnpy/bnpy-dev/tests/merge/TestMergeTracker.py
|
Python
|
mit
| 4,171
|
"""
A pretty lame implementation of a memoryview object for Python 2.6.
"""
from collections import Iterable
from numbers import Integral
import string
from future.utils import istext, isbytes, PY3, with_metaclass
from future.types import no, issubset
# class BaseNewBytes(type):
# def __instancecheck__(cls, instance):
# return isinstance(instance, _builtin_bytes)
class newmemoryview(object): # with_metaclass(BaseNewBytes, _builtin_bytes)):
"""
A pretty lame backport of the Python 2.7 and Python 3.x
memoryviewview object to Py2.6.
"""
def __init__(self, obj):
return obj
__all__ = ['newmemoryview']
|
thonkify/thonkify
|
src/lib/future/types/newmemoryview.py
|
Python
|
mit
| 654
|
# Author: Sungchul Choi, sc82.choi at gachon.ac.kr
# Version: 0.1
# Description
# 가천대학교 프로그래밍 입문 시간에 활용되는 "숙제 자동 채점 프로그램"의 Client 프로그램입니다.
#
# HUMAN KNOWLEDGE BELONGS TO THE WORLD. -- From the movie "Antitrust"
# Copyright (C) 2015 TeamLab@Gachon University
import argparse
import pickle
import os
import types
import requests
import json
from importlib.machinery import SourceFileLoader
import unittest
TOKEN_PICKLE_FILE_NAME = "access_token"
HOST = "theteamlab.io"
ASSIGNMENT_NAME = "product_mix_problem.py"
def getArgumentsParser(argv=None):
parser = argparse.ArgumentParser(
prog='An program for autograder of your assignement. Coded by TeamLab@Gachon University ',)
parser.add_argument("-get", help="Write your assignment name that you want to download")
parser.add_argument("-submit", help="Write your assignment name that you want to submit")
argumentValue = parser.parse_args(argv)
if not (argumentValue.get or argumentValue.submit):
parser.error('One of -submit or -get must be given')
return argumentValue;
def printInformationMessage(actionType, assignmentName):
if (actionType == "get"):
message = "== Getting templates | "
else:
message = "== Submmting solutions | "
print (message + assignmentName)
# Get JWT token to access REST API
def getToken():
if os.path.isfile(TOKEN_PICKLE_FILE_NAME):
try:
with open(TOKEN_PICKLE_FILE_NAME, 'rb') as accesstoken:
token_file = pickle.load(accesstoken)
return token_file['token'], token_file['username']
except EOFError:
print ("Existing access_token is NOT validated")
return None, None
else:
return None,None
def getLoginInformation():
login_id = input("Login ID: ")
login_password = input("Password :")
return [login_id, login_password]
def getAccessTokenFromServer(username, login_password):
headers = {'Content-type': 'application/json'}
payload = {"password":login_password, "username":username}
access_token_jwt = requests.post("http://"+HOST+"/api-token-auth/", json=payload, headers=headers)
if (access_token_jwt.ok) : return access_token_jwt.text
else: return None
def makeAccessTokenPickle(access_token, username):
pickle_file_Name = "access_token"
pcikleObject = open(pickle_file_Name,'wb')
username_json = {'username' : username}
toekn_json = {'token' : access_token}
data =json.loads(json.dumps(toekn_json , ensure_ascii=False))
data.update(username_json)
pickle.dump(data, pcikleObject)
return pickle
def checkTokenReplacement(username):
replacment = 'a'
while replacment.lower() not in ['t','yes','y','true', 'n','no','f','false']:
message = ("Use token from last successful submission (%s)? (Y/n): " % username)
replacment = input(message)
if replacment.lower() in ['t','yes','y','true']:
return True
elif replacment.lower() in ['n','no','f','false']:
return False
else:
print ("Wrong Input")
return True
def getFileContents(fileName):
with open (fileName, "r", encoding="utf8") as contens_file:
contens = contens_file.read()
return contens
def getAssignmentTemplateFileFromServer(access_token, assignment_name):
payload = {
"assignment_name" : assignment_name,
}
accesstoken_dict = json.loads(access_token)
headers = {'Authorization': 'JWT ' + accesstoken_dict['token']}
result = requests.post("http://"+HOST+"/autograder/assignments/%s/submissionready" % assignment_name, json=payload, headers=headers)
return result
def submitAssignmentFileToServer(access_token, assignment_file_name):
assignment_contents = getFileContents(assignment_file_name)
[basename, ext] = assignment_file_name.split(".")
payload = {
"template_file_name" : assignment_file_name,
"template_file_contents" : assignment_contents,
}
accesstoken_dict = json.loads(access_token)
headers = {'Authorization': 'JWT ' + accesstoken_dict['token']}
result = requests.post("http://"+HOST+"/autograder/assignments/%s/submission" % basename, json=payload, headers=headers)
#TODO Add exception handling
return result
def makeTemplateFile(result_text):
try:
data = json.loads(result_text, strict=False)
with open(data['template_file_name'], 'w') as f:
f.write(data['template_file_contents'])
print ("%s file is created for your %s assignment" % (data['template_file_name'], data['assignment_name']))
return True
except IOError:
print ("Unavailable making the template file: %s" % data['template_file_name'])
return False
except:
return False
def removeExpiredAccessKey():
if os.path.isfile(TOKEN_PICKLE_FILE_NAME):
os.remove(TOKEN_PICKLE_FILE_NAME)
else: ## Show an error ##
print("Error: %s file not found" % TOKEN_PICKLE_FILE_NAME)
def printTestResults(text):
json_data = json.loads(text)
a = "-"*20; b = "-"*10; c = "-"*20
print ( '%20s | %10s | %20s' % (a,b,c) )
print ( '%20s | %10s | %20s' % ("Function Name","Passed?","Feedback") )
print ( '%20s | %10s | %20s' % (a,b,c) )
for result in json_data:
if result['test_result'] == ('S'):
passed = 'PASS'
feedback = 'Good Job'
else:
passed = 'Not Yet'
if result['test_result'] == ('E'):
feedback = 'Check Your Logic'
if result['test_result'] == ('F'):
feedback = 'Check Your Grammar'
print ( '%20s | %10s | %20s' % (result['assignment_detail'],passed,feedback ) )
print ( '%20s | %10s | %20s' % (a,b,c) )
def main():
# Check Argument
# To download an assignment template file : -get <ASSIGNMENT_NAME>
# To submit an assignment template file : -submit <ASSIGNMENT_NAME>
# [actionType, assignment_name] = checkArguements(argumentValue)
actionType = "submit"
assignment_name = ASSIGNMENT_NAME
# Check User Login Information
printInformationMessage(actionType, assignment_name)
# Check Your Access Token
[access_token, username] = getToken()
# Get New Access Token
if access_token == None:
while (access_token == None):
[username, login_password] = getLoginInformation()
access_token = getAccessTokenFromServer(username, login_password)
if (access_token == None): print ("Wrong User ID or password. Please, input again.")
else:
answer = checkTokenReplacement(username)
if (answer == False):
access_token = None
while (access_token == None):
[username, login_password] = getLoginInformation()
access_token = getAccessTokenFromServer(username, login_password)
if (access_token == None): print ("Wrong User ID or password. Please, input again.")
# Make access pickle before end of program
makeAccessTokenPickle(access_token, username)
if (actionType == "get"):
result = getAssignmentTemplateFileFromServer(access_token, assignment_name)
if (result.status_code == 200):
is_file_created = makeTemplateFile(result.text)
if (is_file_created == True):
print ("Thank you for using the program. Enjoy Your Assignment - From TeamLab")
elif (result.status_code == 403):
print (result.text)
removeExpiredAccessKey()
print ("Your expired access key removed. Please, try again")
elif (result.status_code == 500):
print (result.text)
print ("Unexpected error exists. Please contact teamlab.gachon@gmail.com ")
elif (actionType == "submit"):
result = submitAssignmentFileToServer(access_token, assignment_name)
if (result.status_code == 200):
printTestResults(result.text)
# Make access pickle before end of program
elif (result.status_code == 403):
print (result.text)
removeExpiredAccessKey()
print ("Your expired access key removed. Please, try again")
elif (result.status_code == 500):
print ("Unexpected error exists. Your code does not seem to work. Please, Run your code. \n python {0} ".format(ASSIGNMENT_NAME) )
if __name__ == "__main__":
main()
|
TeamLab/Gachon_CS50_OR_KMOOC
|
gurobi_quiz/product_mix/submit.py
|
Python
|
mit
| 8,554
|
import json
import os
import time
from rottentomatoes import RT
BOX_OFFICE_COUNTRIES = [
"us",
"in",
"uk",
"nl",
]
LIMIT = 50 # max allowed by rotten tomatoes
OUTPUT_FILE = "download/more_movies.json"
def main():
assert os.environ["RT_KEY"], "Your Rotten Tomatoes API key should be stored in the RT_KEY env var!"
rt = RT() # NOTE: you should have your API key stored in RT_KEY before this will work
movies = []
link_template = ""
for country in BOX_OFFICE_COUNTRIES:
print "requesting box office hits for {}".format(country)
r = rt.lists('movies', 'box_office', limit=LIMIT, country=country)
movies += r['movies']
link_template = link_template or r['link_template']
time.sleep(10) # respect our API limits!
# to maintain compatibility with movies.json fields, our top level dict
# should have the following fields:
# total (int)
# movies (list)
# link_template (string)
total = len(movies)
result = {
"total": total,
"movies": movies,
"link_template": link_template,
}
with open(OUTPUT_FILE, "w") as f:
json.dump(result, f, indent=2, sort_keys=True)
if __name__ == "__main__":
main()
|
indirectlylit/whattowatch
|
data-utils/_fetch_new_rt_data.py
|
Python
|
mit
| 1,292
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class StorageProfile(Model):
"""Specifies the storage settings for the virtual machine disks.
:param image_reference: Specifies information about the image to use. You
can specify information about platform images, marketplace images, or
virtual machine images. This element is required when you want to use a
platform image, marketplace image, or virtual machine image, but is not
used in other creation operations.
:type image_reference:
~azure.mgmt.compute.v2016_03_30.models.ImageReference
:param os_disk: Specifies information about the operating system disk used
by the virtual machine. <br><br> For more information about disks, see
[About disks and VHDs for Azure virtual
machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-about-disks-vhds?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
:type os_disk: ~azure.mgmt.compute.v2016_03_30.models.OSDisk
:param data_disks: Specifies the parameters that are used to add a data
disk to a virtual machine. <br><br> For more information about disks, see
[About disks and VHDs for Azure virtual
machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-about-disks-vhds?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json).
:type data_disks: list[~azure.mgmt.compute.v2016_03_30.models.DataDisk]
"""
_attribute_map = {
'image_reference': {'key': 'imageReference', 'type': 'ImageReference'},
'os_disk': {'key': 'osDisk', 'type': 'OSDisk'},
'data_disks': {'key': 'dataDisks', 'type': '[DataDisk]'},
}
def __init__(self, *, image_reference=None, os_disk=None, data_disks=None, **kwargs) -> None:
super(StorageProfile, self).__init__(**kwargs)
self.image_reference = image_reference
self.os_disk = os_disk
self.data_disks = data_disks
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-compute/azure/mgmt/compute/v2016_03_30/models/storage_profile_py3.py
|
Python
|
mit
| 2,421
|
import hashlib
from typing import BinaryIO
def get_fp_sha256(fp: BinaryIO) -> str:
"""
Get the SHA-256 checksum of the data in the file `fp`.
:return: hex string
"""
fp.seek(0)
hasher = hashlib.sha256()
while True:
chunk = fp.read(524288)
if not chunk:
break
hasher.update(chunk)
return hasher.hexdigest()
|
valohai/valohai-cli
|
valohai_cli/utils/hashing.py
|
Python
|
mit
| 377
|
from setuptools import setup
setup(name='decision_tree',
version='0.04',
description='Practice implementation of a classification decision tree',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
],
keywords='classification decision tree machine learning random forest',
url='https://github.com/metjush/decision_tree',
author='metjush',
author_email='metjush@gmail.com',
license='MIT',
packages=['decision_tree'],
install_requires=[
'numpy',
'sklearn'
],
include_package_data=True,
zip_safe=False)
|
metjush/decision_tree
|
setup.py
|
Python
|
mit
| 734
|
import os
from redlib.api.system import sys_command, CronDBus, CronDBusError, is_linux, is_windows
from ..util.logger import log
from . import Desktop, DesktopError
from . import gnome_desktop
from . import feh_desktop
if is_windows():
from .windows_desktop import WindowsDesktop
def load_optional_module(module, package=None, err_msg=None):
import importlib
try:
importlib.import_module(module, package=package)
except ImportError as e:
print(e)
if err_msg is not None:
print(err_msg)
load_optional_module('.kde_plasma_desktop', package='wallp.desktop', err_msg='KDE Plasma will not be supported.')
def get_desktop():
if is_linux():
crondbus = CronDBus(vars=['GDMSESSION', 'DISPLAY', 'XDG_CURRENT_DESKTOP'])
crondbus.setup()
gdmsession = os.environ.get('GDMSESSION', None)
xdg_current_desktop = os.environ.get('XDG_CURRENT_DESKTOP', None)
if gdmsession is None and xdg_current_desktop is None:
log.error('could not read environment variables: GDMSESSION or XDG_CURRENT_DESKTOP')
raise DesktopError()
for desktop_class in Desktop.__subclasses__():
if desktop_class.supports(gdmsession, xdg_current_desktop):
return desktop_class()
log.error('unsupported window manager: %s, %s'%(gdmsession, xdg_current_desktop))
elif is_windows():
return WindowsDesktop()
else:
log.error('unsupported OS')
return None
|
amol9/wallp
|
wallp/desktop/desktop_factory.py
|
Python
|
mit
| 1,362
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='lkd',
version='2',
packages=['lkd', 'tests'],
author='Karan Goel',
author_email='karan@goel.im',
maintainer='Karan Goel',
maintainer_email='karan@goel.im',
url='http://www.goel.im/',
license='MIT License',
long_description='Python wrapper for lkd.to API.',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Internet',
'Topic :: Internet :: WWW/HTTP :: Browsers',
'Topic :: Internet :: WWW/HTTP :: Indexing/Search',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
)
|
karan/py-lkd.to
|
setup.py
|
Python
|
mit
| 1,225
|
#!/usr/bin/env python3
"""
http://adventofcode.com/day/17
Part 1
------
The elves bought too much eggnog again - 150 liters this time. To
fit it all into your refrigerator, you'll need to move it into
smaller containers. You take an inventory of the capacities of
the available containers.
For example, suppose you have containers of size 20, 15, 10, 5,
and 5 liters. If you need to store 25 liters, there are four ways
to do it:
- 15 and 10
- 20 and 5 (the first 5)
- 20 and 5 (the second 5)
- 15, 5, and 5
Filling all containers entirely, how many different combinations
of containers can exactly fit all 150 liters of eggnog?
Part 2
------
While playing with all the containers in the kitchen, another load
of eggnog arrives! The shipping and receiving department is
requesting as many containers as you can spare.
Find the minimum number of containers that can exactly fit all
150 liters of eggnog. How many different ways can you fill that
number of containers and still hold exactly 150 litres?
In the example above, the minimum number of containers was two.
There were three ways to use that many containers, and so the
answer there would be 3.
"""
from __future__ import print_function, unicode_literals
from itertools import combinations
import os
import re
import sys
INFILE = 'inputs/input17.txt'
def main():
containers = list()
with open(INFILE) as f:
for line in f:
containers.append(int(line.strip()))
# Part 1
p1count = 0
for s in range(len(containers)):
for c in combinations(containers, s):
if sum(c) == 150:
p1count += 1
# Part 2
p2sizes = dict()
p2min = len(containers)
for i in range(p2min):
p2sizes[i] = 0
for s in range(len(containers)):
for c in combinations(containers, s):
if sum(c) == 150:
if len(c) < p2min:
p2min = len(c)
p2sizes[s] += 1
msg = '[Python] Puzzle 17-1: {}'
print(msg.format(p1count))
msg = '[Python] Puzzle 17-2: {}'
print(msg.format(p2sizes[p2min]))
if __name__ == '__main__':
main()
|
rnelson/adventofcode
|
advent2015/day17.py
|
Python
|
mit
| 2,152
|
from __future__ import absolute_import, division, print_function, unicode_literals
import numpy as np
class Unicorn(object):
def __init__(self):
import unicornhathd as unicorn
unicorn.rotation(180)
unicorn.brightness(0.75)
def write_pixels(self, data):
import unicornhathd as unicorn
for y, row in enumerate((data * 255).astype(np.uint8)):
for x, color in enumerate(row):
unicorn.set_pixel(x, y, *color)
unicorn.show()
|
Spooner/pixel-table
|
pixel_table/external/unicorn.py
|
Python
|
mit
| 513
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from unittest import TestCase, main
from pitchpx.mlbam_util import MlbamUtil, MlbAmHttpNotFound
__author__ = 'Shinichi Nakagawa'
class TestMlbamUtil(TestCase):
"""
MLBAM Util Class Test
"""
def setUp(self):
pass
def tearDown(self):
pass
def test_get_content_200(self):
"""
Get html content(status:200, head:default)
"""
req = MlbamUtil._get_content(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_lanmlb_sdnmlb_1/game.xml'
)
self.assertEqual(req.status_code, 200)
self.assertEqual(req.request.headers['Accept'],
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8')
self.assertEqual(req.request.headers['User-Agent'],
('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_4) '
'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.86 Safari/537.36'))
def test_get_content_200_setting_header(self):
"""
Get html content(status:200, head:original)
"""
req = MlbamUtil._get_content(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_lanmlb_sdnmlb_1/game.xml',
headers={'Accept': 'text/html', 'User-Agent': 'Python-urllib/3.5'}
)
self.assertEqual(req.status_code, 200)
self.assertEqual(req.request.headers['Accept'], 'text/html')
self.assertEqual(req.request.headers['User-Agent'], 'Python-urllib/3.5')
def test_get_content_404_setting_header(self):
"""
Get html content(status:404, head:original)
"""
req = MlbamUtil._get_content(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_chnmlb_anamlb_1/game.xml',
headers={'Accept': 'text/html', 'User-Agent': 'Python-urllib/3.5'}
)
self.assertEqual(req.status_code, 404)
self.assertEqual(req.request.headers['Accept'], 'text/html')
self.assertEqual(req.request.headers['User-Agent'], 'Python-urllib/3.5')
def test_find_xml_200(self):
"""
Get xml content(status:200, head:default)
"""
req = MlbamUtil.find_xml(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_lanmlb_sdnmlb_1/game.xml',
'lxml',
)
self.assertIsNotNone(req)
def test_find_xml_404(self):
"""
Get xml content(status:404, head:default)
"""
try:
_ = MlbamUtil.find_xml(
'http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_chnmlb_anamlb_1/game.xml',
'lxml',
)
except MlbAmHttpNotFound as e:
self.assertEqual(
e.msg,
('HTTP Error '
'url: http://gd2.mlb.com/components/game/mlb/year_2016/month_04/day_06/gid_2016_04_06_chnmlb_anamlb_1/game.xml '
'status: 404'
)
)
if __name__ == '__main__':
main()
|
Shinichi-Nakagawa/pitchpx
|
tests/pitchpx/test_mlbam_util.py
|
Python
|
mit
| 3,194
|
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import patterns, url
from . import tasks
from . import views
urlpatterns = patterns('',
url(r'^$', views.index, name='layoutdemo_index'),
)
# Local Variables:
# indent-tabs-mode: nil
# End:
# vim: ai et sw=4 ts=4
|
rentalita/django-layoutdemo
|
src/python/layoutdemo/default/urls.py
|
Python
|
mit
| 276
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2011-2015 Slack
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from django.test import TestCase as TestCaseBase
from .scraper import beatport, discogs, itunes, junodownload, metalarchives, musicbrainz, bandcamp, musiksammler
from .result import ReleaseResult, ListResult, NotFoundResult, Result
import unittest
def todict(obj):
if hasattr(obj, "__iter__"):
return [todict(v) for v in obj]
elif hasattr(obj, "__dict__"):
return dict([(key, todict(value))
for key, value in obj.__dict__.iteritems()
if not callable(value) and not key.startswith('_')])
else:
return obj
class TestCase(TestCaseBase):
maxDiff = None
def setUp(self):
self.addTypeEqualityFunc(ReleaseResult, 'assertResultEqual')
self.addTypeEqualityFunc(ListResult, 'assertResultEqual')
self.addTypeEqualityFunc(NotFoundResult, 'assertResultEqual')
def assertResultEqual(self, d1, d2, msg=None):
self.assertTrue(issubclass(d1.__class__, Result), 'First argument is not a Result')
self.assertTrue(issubclass(d2.__class__, Result), 'Second argument is not a Result')
self.assertEqual(d1.__class__.__name__, d2.__class__.__name__)
self.assertEqual(todict(d1), todict(d2), msg)
class DiscogsTest(TestCase):
def test_simple_album(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'03 Nov 2000')
release_event.set_country(u'Germany')
expected.append_release_event(release_event)
expected.set_format(u'CD, Album')
label_id = expected.create_label_id()
label_id.set_label(u'Richterskala')
label_id.append_catalogue_nr(u'TRI 070 CD')
expected.append_label_id(label_id)
expected.set_title(u'Hast Du Mich Vermisst?')
artist = expected.create_artist()
artist.set_name(u'ASP')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_genre(u'Rock')
expected.append_style(u'Goth Rock')
expected.append_style(u'Synth-pop')
expected.set_url(u'http://www.discogs.com/ASP-Hast-Du-Mich-Vermisst/release/453432')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Schwarzer Schmetterling')
track.set_length(290)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Where Do The Gods Go')
track.set_length(226)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Dancing')
track.set_length(345)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'K\xfcss Mich')
track.set_length(311)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Sing Child')
track.set_length(239)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Teach Me War')
track.set_length(225)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Imbecile Anthem')
track.set_length(222)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Und Wir Tanzten (Ungeschickte Liebesbriefe)')
track.set_length(305)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Blinded')
track.set_length(443)
disc.append_track(track)
expected.append_disc(disc)
scraper = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/ASP-Hast-Du-Mich-Vermisst/release/453432')
result = scraper.get_result()
self.assertEqual(expected, result)
def test_multiple_cds(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'25 May 2007')
release_event.set_country(u'Germany')
expected.append_release_event(release_event)
expected.set_format(u'4 \xd7 CD, Compilation, Limited Edition, Digipak, Box Set, Limited Edition, Hand-Numbered')
label_id = expected.create_label_id()
label_id.set_label(u'[Trisol] Music Group GmbH')
label_id.append_catalogue_nr(u'TRI 303 CD')
expected.append_label_id(label_id)
expected.set_title(u"The 'Once In A Lifetime' Recollection Box")
artist = expected.create_artist()
artist.set_name(u'ASP')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
artist = expected.create_artist()
artist.set_name(u'Chamber')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Classical')
expected.append_genre(u'Non-Music')
expected.append_genre(u'Rock')
expected.append_style(u'Acoustic')
expected.append_style(u'Goth Rock')
expected.append_style(u'Classical')
expected.append_style(u'Speech')
expected.set_url(u'http://www.discogs.com/ASP-Chamber-The-Once-In-A-Lifetime-Recollection-Box/release/977684')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Once In A Lifetime, Part 1')
track.set_length(351)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u"A Dead Man's Song")
track.set_length(312)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Versuchung')
track.set_length(345)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Torn')
track.set_length(304)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Demon Love')
track.set_length(272)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'The Paperhearted Ghost')
track.set_length(283)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'A Tale Of Real Love')
track.set_length(316)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Hunger')
track.set_length(289)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'The Truth About Snow-White')
track.set_length(240)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'She Wore Shadows')
track.set_length(275)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Und Wir Tanzten (Ungeschickte Liebesbriefe)')
track.set_length(317)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Once In A Lifetime, Part 2 (Reprise)')
track.set_length(164)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'K\xfcss Mich')
track.set_length(384)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Silence - Release')
track.set_length(225)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Solitude')
track.set_length(220)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Die Ballade Von Der Erweckung')
track.set_length(527)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Another Conversation')
track.set_length(201)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Sing Child')
track.set_length(449)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Ich Will Brennen')
track.set_length(300)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Toscana')
track.set_length(374)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Ride On')
track.set_length(222)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Hometown')
track.set_length(181)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Werben')
track.set_length(293)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Once In A Lifetime, Part 3 (Finale)')
track.set_length(608)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(3)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'H\xe4sslich')
track.set_length(145)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Backstage (All Areas)')
track.set_length(573)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Paracetamoltr\xe4ume')
track.set_length(517)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Ausszug Aus "Tremendista" Feat. Ralph M\xfcller/Gitarre')
track.set_length(1473)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Campari O')
track.set_length(159)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(4)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Asp, Soundcheck-Outtake: "Sicamore Trees"')
track.set_length(94)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Demon Love')
track.set_length(275)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'The Truth About Snow-White')
track.set_length(274)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'She Wore Shadows')
track.set_length(319)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Sing Child')
track.set_length(469)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Hometown')
track.set_length(221)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Hunger')
track.set_length(274)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Silence-Release')
track.set_length(208)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Asp, Soundcheck-Outtake: "She Moved Through The Fair"')
track.set_length(120)
disc.append_track(track)
expected.append_disc(disc)
scraper = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/ASP-Chamber-The-Once-In-A-Lifetime-Recollection-Box/release/977684')
result = scraper.get_result()
self.assertEqual(expected, result)
def test_featuring_track_artist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'01 Apr 2011')
release_event.set_country(u'Europe')
expected.append_release_event(release_event)
expected.set_format(u'CD, Album')
label_id = expected.create_label_id()
label_id.set_label(u'Rootdown Records')
label_id.append_catalogue_nr(u'RDM13074-2')
expected.append_label_id(label_id)
expected.set_title(u'Unter Freunden')
artist = expected.create_artist()
artist.set_name(u'Mono & Nikitaman')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Reggae')
expected.append_style(u'Dancehall')
expected.set_url(u'http://www.discogs.com/Mono-Nikitaman-Unter-Freunden/release/3432154')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Intro')
track.set_length(13)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Unter Freunden')
track.set_length(184)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Karma')
track.set_length(189)
track_artist = expected.create_artist()
track_artist.set_name(u"Ce'cile")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Zeit Steht Still')
track.set_length(260)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Komplizen')
track.set_length(185)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Wenn Sich Der Nebel Verzieht')
track.set_length(197)
track_artist = expected.create_artist()
track_artist.set_name(u'Gentleman')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Schwerelos')
track.set_length(227)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Ein Paar Meter')
track.set_length(198)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Cash')
track.set_length(188)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Dezibel')
track.set_length(270)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Kontrast')
track.set_length(214)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'R\xfcckkehr Der Clowns')
track.set_length(198)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Superstar')
track.set_length(227)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Underground')
track.set_length(204)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Showdown')
track.set_length(261)
track_artist = expected.create_artist()
track_artist.set_name(u'Rebellion')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
scraper = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Mono-Nikitaman-Unter-Freunden/release/3432154')
result = scraper.get_result()
self.assertEqual(expected, result)
def test_remix_track_artist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'04 Jul 2005')
release_event.set_country(u'Germany')
expected.append_release_event(release_event)
expected.set_format(u'CD, Album, Limited Edition, Digibook, CD, Compilation, Limited Edition')
label_id = expected.create_label_id()
label_id.set_label(u'Trisol')
label_id.append_catalogue_nr(u'TRI 231 CD')
expected.append_label_id(label_id)
expected.set_title(u'Aus Der Tiefe')
artist = expected.create_artist()
artist.set_name(u'ASP')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_genre(u'Rock')
expected.append_style(u'Alternative Rock')
expected.set_url(u'http://www.discogs.com/ASP-Aus-Der-Tiefe-Der-Schwarze-Schmetterling-IV/release/710517')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Beschw\xf6rung')
track.set_length(391)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Willkommen Zur\xfcck')
track.set_length(137)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Schwarzes Blut')
track.set_length(212)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Im Dunklen Turm')
track.set_length(101)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Me')
track.set_length(278)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Schattenschreie')
track.set_length(21)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Hunger')
track.set_length(321)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Fremde Erinnerungen')
track.set_length(72)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Ballade Von Der Erweckung')
track.set_length(533)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Tiefenrausch')
track.set_length(245)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Schmetterling, Du Kleines Ding')
track.set_length(42)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Ich Komm Dich Holn')
track.set_length(257)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Werben')
track.set_length(268)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Aus Der Tiefe')
track.set_length(198)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Spiegelaugen')
track.set_length(204)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Tiefenrausch (Reprise)')
track.set_length(67)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'17')
track.set_title(u'Panik')
track.set_length(252)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'18')
track.set_title(u'Spiegel')
track.set_length(331)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Schwarzes Blut (Haltung Version)')
track.set_length(249)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Werben (Subtil Edit)')
track.set_length(257)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Me (Single Version)')
track.set_length(225)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Tiefenrausch (Feat. Sara Noxx)')
track.set_length(245)
track_artist = expected.create_artist()
track_artist.set_name(u'Sara Noxx')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Hunger (Single Mix)')
track.set_length(259)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Panik (Ganz Rauf-Verison)')
track.set_length(273)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Beschw\xf6rung (Siegeszug Instrumental)')
track.set_length(205)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Buch Des Vergessens (Unreines Spiegelsonett)')
track.set_length(115)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Kokon (Brandneu-Remix Von Umbra Et Imago)')
track.set_length(279)
track_artist = expected.create_artist()
track_artist.set_name(u'Umbra Et Imago')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Me (Me And You Remix Von Blutengel)')
track.set_length(344)
track_artist = expected.create_artist()
track_artist.set_name(u'Blutengel')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Und Wir Tanzten (Ungeschickte Liebesbriefe) (Live)')
track.set_length(347)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Ich Will Brennen (Live)')
track.set_length(369)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Starfucker: In Der Folterkammer')
track.set_length(127)
disc.append_track(track)
expected.append_disc(disc)
scraper = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/ASP-Aus-Der-Tiefe-Der-Schwarze-Schmetterling-IV/release/710517')
result = scraper.get_result()
self.assertEqual(expected, result)
def test_vinyl(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'2008')
release_event.set_country(u'Germany')
expected.append_release_event(release_event)
expected.set_format(u'2 \xd7 Vinyl, LP')
label_id = expected.create_label_id()
label_id.set_label(u'Rootdown Records')
label_id.append_catalogue_nr(u'RDM 13051-1')
expected.append_label_id(label_id)
expected.set_title(u'Ausser Kontrolle')
artist = expected.create_artist()
artist.set_name(u'Mono & Nikitaman')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Reggae')
expected.append_style(u'Dancehall')
expected.append_style(u'Reggae-Pop')
expected.set_url(u'http://www.discogs.com/Mono-Nikitaman-Ausser-Kontrolle/release/1540929')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'A1')
track.set_title(u'Intro')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'A2')
track.set_title(u'Schlag Alarm')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'A3')
track.set_title(u'Kann Ja Mal Passieren')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'A4')
track.set_title(u'Ausser Kontrolle')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'A5')
track.set_title("Hol's Dir")
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'B1')
track.set_title(u'Das Alles')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'B2')
track.set_title(u'Digge Digge')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'B3')
track.set_title(u'Nur So')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'B4')
track.set_title(u'Yeah')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'C1')
track.set_title(u'Von Osten Bis Westen')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Russkaja')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'C2')
track.set_title(u'Wenn Ihr Schlaft')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'C3')
track.set_title(u'Unterwegs')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'C4')
track.set_title(u'Tiktak')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'D1')
track.set_title(u'Tut Mir Leid')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Nosliw')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'D2')
track.set_title(u'Es Kommt Anders')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'D3')
track.set_title(u'Das Alles (Zion Train Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Zion Train')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Mono-Nikitaman-Ausser-Kontrolle/release/1540929')
r = s.get_result()
self.assertEqual(expected, r)
def test_featuring_main_artist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'08 Feb 2011')
release_event.set_country(u'Germany')
expected.append_release_event(release_event)
expected.set_format(u'3 \xd7 File, MP3, 320 kbps')
label_id = expected.create_label_id()
label_id.set_label(u'Redux Recordings')
label_id.append_catalogue_nr(u'RDX062')
expected.append_label_id(label_id)
expected.set_title(u'In My Dreams')
artist = expected.create_artist()
artist.set_name(u'Lifted Emotion')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
artist = expected.create_artist()
artist.set_name(u'Anastasiia Purple')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.FEATURING)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_style(u'Trance')
expected.set_url(u'http://www.discogs.com/Lifted-Emotion-feat-Anastasiia-Purple-In-My-Dreams/release/2806179')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'In My Dreams (Original Vocal Mix)')
track.set_length(558)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'In My Dreams (Original Dub Mix)')
track.set_length(558)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'In My Dreams (Ost & Meyer Extraodinary Mix)')
track.set_length(472)
track_artist = expected.create_artist()
track_artist.set_name(u'Ost & Meyer')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Lifted-Emotion-feat-Anastasiia-Purple-In-My-Dreams/release/2806179')
r = s.get_result()
self.assertEqual(expected, r)
def test_various_artists(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'2010')
release_event.set_country(u'Germany')
expected.append_release_event(release_event)
expected.set_format(u'CD, Compilation')
label_id = expected.create_label_id()
label_id.set_label(u'Batbeliever Releases')
label_id.append_catalogue_nr(u'BAT 075')
expected.append_label_id(label_id)
expected.set_title(u'Gothic File 14')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_genre(u'Rock')
expected.append_style(u'EBM')
expected.append_style(u'Darkwave')
expected.append_style(u'Industrial')
expected.append_style(u'Goth Rock')
expected.append_style(u'Electro')
expected.set_url(u'http://www.discogs.com/Various-Gothic-File-14/release/3700493')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Echo In Me')
track.set_length(236)
track_artist = expected.create_artist()
track_artist.set_name(u'Diary Of Dreams')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Liar (Version)')
track.set_length(219)
track_artist = expected.create_artist()
track_artist.set_name(u'Gothminister')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'The End Of It All (Edit)')
track.set_length(237)
track_artist = expected.create_artist()
track_artist.set_name(u'Sirenia')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Sanctuary')
track.set_length(239)
track_artist = expected.create_artist()
track_artist.set_name(u'Merciful Nuns')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Worlds Collide (Demo Version)')
track.set_length(261)
track_artist = expected.create_artist()
track_artist.set_name(u'Covenant')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Drowning World')
track.set_length(253)
track_artist = expected.create_artist()
track_artist.set_name(u'Ien Oblique')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'In The Name Of God')
track.set_length(297)
track_artist = expected.create_artist()
track_artist.set_name(u'Betamorphose')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'PsychoCop (Folge 8)')
track.set_length(171)
track_artist = expected.create_artist()
track_artist.set_name(u'Don Harris')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Various-Gothic-File-14/release/3700493')
r = s.get_result()
self.assertEqual(expected, r)
def test_label_with_suffix(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'25 Nov 2005')
release_event.set_country(u'Germany')
expected.append_release_event(release_event)
expected.set_format(u'CD, Album')
label_id = expected.create_label_id()
label_id.set_label(u'Indigo')
label_id.append_catalogue_nr(u'CD 55182')
expected.append_label_id(label_id)
expected.set_title(u'Prima Nocte')
artist = expected.create_artist()
artist.set_name(u'Feuerschwanz')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Folk')
expected.append_genre(u'World')
expected.append_genre(u'Country')
expected.append_genre(u'Rock')
expected.append_style(u'Medieval')
expected.set_url(u'http://www.discogs.com/Feuerschwanz-Prima-Nocte/release/2611694')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Es War Einmal')
track.set_length(172)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Das Mittelalter')
track.set_length(260)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Drachentanz')
track.set_length(224)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Das Turnier')
track.set_length(254)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Prima Nocte')
track.set_length(331)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'B\xe4rentanz')
track.set_length(232)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Herren Der Winde')
track.set_length(265)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Der Teufel')
track.set_length(290)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Schneewittchen')
track.set_length(377)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Der Traum')
track.set_length(319)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'R\xe4uber')
track.set_length(206)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Sauflied')
track.set_length(234)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Teufelsgeschenk')
track.set_length(264)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'La\xdft Die Ritter Schlafen')
track.set_length(313)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Gute Nacht')
track.set_length(420)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Feuerschwanz-Prima-Nocte/release/2611694')
r = s.get_result()
self.assertEqual(expected, r)
def test_album_with_unicode_dash_in_title(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'25 Jun 2012')
release_event.set_country(u'UK')
expected.append_release_event(release_event)
expected.set_format(u'CD, Album')
label_id = expected.create_label_id()
label_id.set_label(u'Ash International')
label_id.append_catalogue_nr(u'Ash 9.5')
expected.append_label_id(label_id)
expected.set_title(u'AUN \u2013 The Beginning And The End Of All Things')
artist = expected.create_artist()
artist.set_name(u'Christian Fennesz')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_genre(u'Stage & Screen')
expected.append_style(u'Abstract')
expected.append_style(u'Ambient')
expected.append_style(u'Modern Classical')
expected.append_style(u'Soundtrack')
expected.set_url(u'http://www.discogs.com/Christian-Fennesz-AUN-The-Beginning-And-The-End-Of-All-Things/release/2881000')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Kae')
track.set_length(131)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Aware')
track.set_length(288)
track_artist = expected.create_artist()
track_artist.set_name(u'Fennesz Sakamoto')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Haru')
track.set_length(282)
track_artist = expected.create_artist()
track_artist.set_name(u'Fennesz Sakamoto')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Sekai')
track.set_length(134)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Euclides')
track.set_length(184)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Sasazuka')
track.set_length(231)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Trace')
track.set_length(349)
track_artist = expected.create_artist()
track_artist.set_name(u'Fennesz Sakamoto')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Mori')
track.set_length(75)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'AUN40')
track.set_length(306)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Namuru')
track.set_length(170)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Himitsu')
track.set_length(166)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'AUN80')
track.set_length(217)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Nympha')
track.set_length(150)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Shinu')
track.set_length(215)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Hikari')
track.set_length(256)
track_artist = expected.create_artist()
track_artist.set_name(u'Christian Fennesz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Christian-Fennesz-AUN-The-Beginning-And-The-End-Of-All-Things/release/2881000')
r = s.get_result()
self.assertEqual(expected, r)
def test_master_release(self):
expected = ListResult()
expected.set_scraper_name(None)
item = expected.create_item()
item.set_name(u'Mystic Matt & Anthill Mob \u2013 Burning')
item.set_info(u'10", Pic, Ltd | Love Peace And Unity Recordings | LOVE-03 | UK | 1997')
item.set_query(u'http://www.discogs.com/release/146468')
item.set_url(u'http://www.discogs.com/release/146468')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Mystic Matt & Anthill Mob \u2013 Burning')
item.set_info(u'10", Pic, Ltd, S/Sided | Love Peace And Unity Recordings | LOVE-03 | UK | 1997')
item.set_query(u'http://www.discogs.com/release/1503116')
item.set_url(u'http://www.discogs.com/release/1503116')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Mystic Matt & Anthill Mob \u2013 Burning')
item.set_info(u'12", Ltd, Pic | Love Peace And Unity Recordings | LOVE-06 | UK | 2006-04-18')
item.set_query(u'http://www.discogs.com/release/670448')
item.set_url(u'http://www.discogs.com/release/670448')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Mystic Matt & Anthill Mob \u2013 Burnin')
item.set_info(u'12", Promo | Classic Confetti | CC 02 | UK | 2001')
item.set_query(u'http://www.discogs.com/release/2093234')
item.set_url(u'http://www.discogs.com/release/2093234')
expected.append_item(item)
item = expected.create_item()
item.set_name(u"Mystic Matt & Anthill Mob \u2013 'Burnin (Let The Music)'")
item.set_info(u'12" | Classic Confetti | CC 02 (2) | UK | 2001')
item.set_query(u'http://www.discogs.com/release/284437')
item.set_url(u'http://www.discogs.com/release/284437')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Mystic Matt & Anthill Mob \u2013 Burnin')
item.set_info(u'12" | Classic Confetti | CC 02 | UK | 2001')
item.set_query(u'http://www.discogs.com/release/149302')
item.set_url(u'http://www.discogs.com/release/149302')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Mystic Matt & Anthill Mob \u2013 Burnin')
item.set_info(u'12" | Classic Confetti | CC02(3F/02) | UK & Europe | 2002')
item.set_query(u'http://www.discogs.com/release/739159')
item.set_url(u'http://www.discogs.com/release/739159')
expected.append_item(item)
s = discogs.MasterScraper.from_string('http://www.discogs.com/Mystic-Matt-Anthill-Mob-Burning/master/181860')
r = s.get_result()
self.assertEqual(expected, r)
def test_album_with_face_in_track_numbers(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'1984')
release_event.set_country(u'Sweden')
expected.append_release_event(release_event)
expected.set_format(u'Vinyl, 7"')
label_id = expected.create_label_id()
label_id.set_label(u'Mamma')
label_id.append_catalogue_nr(u'MA-501')
expected.append_label_id(label_id)
expected.set_title(u'Another Story')
artist = expected.create_artist()
artist.set_name(u'General Belgrano')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Rock')
expected.append_style(u'New Wave')
expected.set_url(u'http://www.discogs.com/General-Belgrano-Another-Story/release/2213179')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'Face I')
track.set_title(u'Another Story')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'Face II')
track.set_title("War Isn't Gold")
track.set_length(None)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/General-Belgrano-Another-Story/release/2213179')
r = s.get_result()
self.assertEqual(expected, r)
def test_album_with_roman_track_numbers(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'22 Apr 2014')
release_event.set_country(u'US')
expected.append_release_event(release_event)
expected.set_format(u'CD, Album, Deluxe Edition, Target Edition')
label_id = expected.create_label_id()
label_id.set_label(u'Goodbye Records')
label_id.append_catalogue_nr(u'GLS-0161-02')
expected.append_label_id(label_id)
label_id = expected.create_label_id()
label_id.set_label(u'Glassnote')
label_id.append_catalogue_nr(u'GLS-0161-02')
expected.append_label_id(label_id)
expected.set_title(u'The Bones Of What You Believe')
artist = expected.create_artist()
artist.set_name(u'Chvrches')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_genre(u'Pop')
expected.append_style(u'Indie Pop')
expected.append_style(u'Synth-pop')
expected.set_url(u'http://www.discogs.com/Chvrches-The-Bones-Of-What-You-Believe/release/5622231')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'I')
track.set_title(u'The Mother We Share')
track.set_length(192)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'II')
track.set_title(u'We Sink')
track.set_length(214)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'III')
track.set_title(u'Gun')
track.set_length(234)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'IV')
track.set_title(u'Tether')
track.set_length(286)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'V')
track.set_title(u'Lies')
track.set_length(221)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'VI')
track.set_title(u'Under The Tide')
track.set_length(272)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'VII')
track.set_title(u'Recover')
track.set_length(226)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'VIII')
track.set_title(u'Night Sky')
track.set_length(231)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'IX')
track.set_title(u'Science/Visions')
track.set_length(238)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'X')
track.set_title(u'Lungs')
track.set_length(183)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'XI')
track.set_title(u'By The Throat')
track.set_length(249)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'XII')
track.set_title(u'You Caught The Light')
track.set_length(337)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'XIII')
track.set_title(u'Recover (Alucard Session)')
track.set_length(252)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'XIV')
track.set_title(u'The Mother We Share (Alucard Session)')
track.set_length(198)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'XV')
track.set_title(u'Gun (Alucard Session)')
track.set_length(265)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'XVI')
track.set_title(u'Tightrope')
track.set_length(209)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Chvrches-The-Bones-Of-What-You-Believe/release/5622231')
r = s.get_result()
self.assertEqual(expected, r)
def test_featuring_track_artist_in_artist_column(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'2014')
release_event.set_country(u'Portugal')
expected.append_release_event(release_event)
expected.set_format(u'3 \xd7 CD, Compilation')
label_id = expected.create_label_id()
label_id.set_label(u'Vidisco')
label_id.append_catalogue_nr(u'11.80.9534')
expected.append_label_id(label_id)
expected.set_title(u'Caribe Grande \xcaxitos 2014')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_genre(u'Latin')
expected.append_genre(u'Pop')
expected.append_genre(u'Folk')
expected.append_genre(u'World')
expected.append_genre(u'Country')
expected.append_style(u'African')
expected.append_style(u'Electro House')
expected.append_style(u'Forr\xf3')
expected.append_style(u'Latin')
expected.append_style(u'House')
expected.set_url(u'http://www.discogs.com/Various-Caribe-Grande-%C3%8Axitos-2014/release/5586877')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Show Das Poderosas')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Kelly Pink')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u"Me Agarra So' No Uhm")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Landrick')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Mi Ni\xf1a')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Pedro Amorim')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'David Miks')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Menina Loka')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Jey V')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'MC Y2K')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Meu Eu Em Vo\xe7\xea')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Tayla Riddel')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'N\xe3o P\xe1ra (Ela S\xf3 Quer Dan\xe7ar)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u"Johne D'luka")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Bam Bam Bam')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Rogerinho')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Vem A Mi')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Bheaven')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Pecado')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Irm\xe3os Verdades')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'We Gonna Party')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Pedro Amorim')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Daduh King')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Gao Percussion')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Ela \xc9 Top')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Mc Bola')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Mc Rodriguez')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Love Love')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'David Miks')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u"R'Bros")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Vamos Zuar')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Bryan Wilson')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Sebastian Crayn')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Mc Bola')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Bailando Asi')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ BodySoul')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Hugo Bessa')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Jay Laroye')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Stape')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Quem \xc9 Essa Mulher')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Valdemiro Jos\xe9')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Matias Dam\xe1sio')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Ensa\xf1ame (Conexi\xf3n)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Carlitos Rossy')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Pipe Calderon')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'El Cata')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Nova La Amenaza')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'M\xe1s')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'17')
track.set_title(u'Hoje N\xe3o Saio Daqui (Oh Tcha Tcharara)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'3 Beatz Muzik')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'18')
track.set_title(u'Bailando')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Mike Moonnight')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Mark F')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Vic J')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Alex B')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'19')
track.set_title(u'Noche De Fiesta')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Jose Delgado')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'David Miks')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'20')
track.set_title(u'Beijo Bom')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Hallux')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Marcus')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Lilian Raquel')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'21')
track.set_title(u'Mexe Assim')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Bodysoul')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'22')
track.set_title(u'Malandro')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Jey V')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'NGA')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Trair A Minha Namorada (Hoje Eu Quero Trair) (DJ Bruno F Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'MC Ricardo')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Bruno F')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Quem Bate')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u"R'Bros")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Taty Agressivo')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Bango')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Jay Lima')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Otro Dia (Mastkisoul Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Gregor Salto')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Kit (Kuenta / Tambu)')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Mastiksoul')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Mina Loca')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Bodytalk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Tiago')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Pm Akordeon')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Thiago Martins')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'I Found You (The Spacemakers Dirty Radio Edit)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Bryan Wilson')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Sebastian Crayn')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Quero Bail\xe1 (Mastik Jay Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Jey V')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Ademar')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Morena (Massivedrum Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Franklin Rodriques')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'William')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Massivedrum')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Stronger (DJ Bruno F Radio Edit)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Da Fonseca')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Jay Lion')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Daniela Pimenta')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Bruno Soares Sax')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Bruno F')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Xibita (DJ Mike C Radio Edit)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Meith')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Y.a.m.a')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Mc Guy H.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Mike C')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Africanism')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Alvaro Corz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'M\xfasica')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Bryan Dalton')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Te Voy Amar (Soul Beatz Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Pedrito')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Mike Moonnight')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Vic J')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Players')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Brian Chundro')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Santos')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Marlldexx')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Vem Rebolando')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'James Noyer')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Vale A Pena 2k14')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'D-Rashid')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Praia Del Sol')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'17')
track.set_title(u'Dan\xe7a Do Tchira')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Mika G')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'18')
track.set_title(u'Bagulho')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Mike C')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'19')
track.set_title(u'Nrg')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Glowinthedark')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Chuckie')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(3)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Mila')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Banda Mar Azul')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Canto Da Cidade')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Banda Mar Azul')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Beleza Rara')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Banda Mar Azul')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Chorando Se Foi')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Bahia Tropical')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Amor Perfeito')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Ax\xe9 Bahia')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Ranpuzel')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Banda Mar Azul')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Sorte Grande')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Bahia Pagode Tropical')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Saia E Bicicletinha')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Banda Ka\xe7amba')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'T\xf4 Nem A\xed')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Daniela')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Sozinho "Dance "')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Banda Mar Azul')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
# TODO: decide how to handle this weird Discogs track numbering corner case
# track = disc.create_track()
# track.set_number(u'11')
# track.set_title(u'Pout-Pourri 1')
# track.set_length(None)
# track_artist = expected.create_artist()
# track_artist.set_name(u'Unknown Artist')
# track_artist.set_various(False)
# track_artist.append_type(expected.ArtistTypes.MAIN)
# track.append_artist(track_artist)
# disc.append_track(track)
#
# track = disc.create_track()
# track.set_number(u'12')
# track.set_title(u'Pout-Pourri 2')
# track.set_length(None)
# track_artist = expected.create_artist()
# track_artist.set_name(u'Unknown Artist')
# track_artist.set_various(False)
# track_artist.append_type(expected.ArtistTypes.MAIN)
# track.append_artist(track_artist)
# disc.append_track(track)
#
# track = disc.create_track()
# track.set_number(u'13')
# track.set_title(u'Pout-Pourri 3 (Marchas)')
# track.set_length(None)
# track_artist = expected.create_artist()
# track_artist.set_name(u'Unknown Artist')
# track_artist.set_various(False)
# track_artist.append_type(expected.ArtistTypes.MAIN)
# track.append_artist(track_artist)
# disc.append_track(track)
#
# track = disc.create_track()
# track.set_number(u'14')
# track.set_title(u'Pout-Pourri 4')
# track.set_length(None)
# track_artist = expected.create_artist()
# track_artist.set_name(u'Unknown Artist')
# track_artist.set_various(False)
# track_artist.append_type(expected.ArtistTypes.MAIN)
# track.append_artist(track_artist)
# disc.append_track(track)
#
# track = disc.create_track()
# track.set_number(u'15')
# track.set_title(u'Pout-Pourri 5')
# track.set_length(None)
# track_artist = expected.create_artist()
# track_artist.set_name(u'Unknown Artist')
# track_artist.set_various(False)
# track_artist.append_type(expected.ArtistTypes.MAIN)
# track.append_artist(track_artist)
# disc.append_track(track)
#
# track = disc.create_track()
# track.set_number(u'16')
# track.set_title(u'Los Mayos- Disco Samba')
# track.set_length(None)
# track_artist = expected.create_artist()
# track_artist.set_name(u'Unknown Artist')
# track_artist.set_various(False)
# track_artist.append_type(expected.ArtistTypes.MAIN)
# track.append_artist(track_artist)
# disc.append_track(track)
#
# track = disc.create_track()
# track.set_number(u'17')
# track.set_title(u'Pout-porri 6')
# track.set_length(None)
# track_artist = expected.create_artist()
# track_artist.set_name(u'Unknown Artist')
# track_artist.set_various(False)
# track_artist.append_type(expected.ArtistTypes.MAIN)
# track.append_artist(track_artist)
# disc.append_track(track)
#
# track = disc.create_track()
# track.set_number(u'18')
# track.set_title(u'Pout-porri 7')
# track.set_length(None)
# track_artist = expected.create_artist()
# track_artist.set_name(u'Unknown Artist')
# track_artist.set_various(False)
# track_artist.append_type(expected.ArtistTypes.MAIN)
# track.append_artist(track_artist)
# disc.append_track(track)
track = disc.create_track()
track.set_number(u'19')
track.set_title(u'Marcha Do Cord\xe3o Do Bola Preta')
track.set_length(None)
disc.append_track(track)
# track = disc.create_track()
# track.set_number(u'20')
# track.set_title(u'Pout-porri 8')
# track.set_length(None)
# track_artist = expected.create_artist()
# track_artist.set_name(u'Unknown Artist')
# track_artist.set_various(False)
# track_artist.append_type(expected.ArtistTypes.MAIN)
# track.append_artist(track_artist)
# disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Various-Caribe-Grande-%C3%8Axitos-2014/release/5586877')
r = s.get_result()
self.assertEqual(expected, r)
def test_featuring_in_artist_and_track_column(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'20 Nov 1996')
release_event.set_country(u'Germany')
expected.append_release_event(release_event)
expected.set_format(u'2 \xd7 CD, Compilation')
label_id = expected.create_label_id()
label_id.set_label(u'Sony Music Media')
label_id.append_catalogue_nr(u'SMM 486760 2')
expected.append_label_id(label_id)
label_id = expected.create_label_id()
label_id.set_label(u'Sony Music Media')
label_id.append_catalogue_nr(u'486760 2')
expected.append_label_id(label_id)
expected.set_title(u'Dream Dance Vol. 3')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_style(u'Trance')
expected.set_url(u'http://www.discogs.com/Various-Dream-Dance-Vol-3/release/135664')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Salva Mea (Radio Edit)')
track.set_length(224)
track_artist = expected.create_artist()
track_artist.set_name(u'Faithless')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'A Neverending Dream (Dream Dance Anthem Mix)')
track.set_length(221)
track_artist = expected.create_artist()
track_artist.set_name(u'Trance X')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'7 Seconds (Radio-Video-Single)')
track.set_length(248)
track_artist = expected.create_artist()
track_artist.set_name(u'Nomansland')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'One And One (Space Edit)')
track.set_length(218)
track_artist = expected.create_artist()
track_artist.set_name(u'Ronald Snypes')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Sweet Memories (Radio Edit)')
track.set_length(234)
track_artist = expected.create_artist()
track_artist.set_name(u'Groove Solution')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Fall Down On Me (Zhi-Vago In Mission Radio Edit)')
track.set_length(245)
track_artist = expected.create_artist()
track_artist.set_name(u'Solid')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Zhi-Vago')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Cybertrance')
track.set_length(252)
track_artist = expected.create_artist()
track_artist.set_name(u'Blue Alphabet')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Beautiful Place (Airwaves Mix)')
track.set_length(204)
track_artist = expected.create_artist()
track_artist.set_name(u'Paul van Dyk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Floating (7\u2033-Mix)')
track.set_length(249)
track_artist = expected.create_artist()
track_artist.set_name(u'Terra Ferma')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'The Nighttrain (Dream Station Remix)')
track.set_length(369)
track_artist = expected.create_artist()
track_artist.set_name(u'Kadoc')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Silencer I (Extended Mix)')
track.set_length(336)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ The Crow')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title("Insomniak: I'll Be Your Nightmare (Industrial Mix)")
track.set_length(341)
track_artist = expected.create_artist()
track_artist.set_name(u'The Grooveman')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Masterpiece')
track.set_length(238)
track_artist = expected.create_artist()
track_artist.set_name(u'P-Casso')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'My Dimension (Radio Edit)')
track.set_length(205)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Panda')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Secret World (Radio Mix)')
track.set_length(222)
track_artist = expected.create_artist()
track_artist.set_name(u'Vector Mode')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Secret Love (Single Edit)')
track.set_length(234)
track_artist = expected.create_artist()
track_artist.set_name(u'Magnetic Pulstar')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'17')
track.set_title(u'Atlanta (Sunshine State Club Extravaganza)')
track.set_length(402)
track_artist = expected.create_artist()
track_artist.set_name(u'Sunshine State')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Snake Davis')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'18')
track.set_title("Walk On By (JPO's & Beam's Radio Mix)")
track.set_length(236)
track_artist = expected.create_artist()
track_artist.set_name(u'M.R.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'JPO & Beam')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Up To No Good (Radio Edit)')
track.set_length(210)
track_artist = expected.create_artist()
track_artist.set_name(u'Porn Kings')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Reality (Too Short Video Radio)')
track.set_length(231)
track_artist = expected.create_artist()
track_artist.set_name(u'RMB')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Da Beat Goes\u2026 (Radio Mix)')
track.set_length(228)
track_artist = expected.create_artist()
track_artist.set_name(u'Red 5')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title("Why Don't You Dance With Me (Phuture Mix)")
track.set_length(204)
track_artist = expected.create_artist()
track_artist.set_name(u'Futura Nostra')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Love And Fate (Part 2)')
track.set_length(296)
track_artist = expected.create_artist()
track_artist.set_name(u'Love And Fate')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Go (Woodtick Mix)')
track.set_length(323)
track_artist = expected.create_artist()
track_artist.set_name(u'Moby')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Waters (Phase 2)')
track.set_length(320)
track_artist = expected.create_artist()
track_artist.set_name(u'Taucher')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Do You See The Light (Dance 2 Trance Mix)')
track.set_length(481)
track_artist = expected.create_artist()
track_artist.set_name(u'Snap!')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Dance 2 Trance')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Right In The Night (Fall In Love With Music) (Microbots Remix)')
track.set_length(383)
track_artist = expected.create_artist()
track_artist.set_name(u'Jam & Spoon')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Plavka')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Microbots')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Te Quierro (Trance Mix)')
track.set_length(331)
track_artist = expected.create_artist()
track_artist.set_name(u'Intrance Feat. D-Sign')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Hablando (Acordeon Mix)')
track.set_length(391)
track_artist = expected.create_artist()
track_artist.set_name(u'Ramirez')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Pizarro')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Outsiders (Marusha 7\u2033 Edit)')
track.set_length(240)
track_artist = expected.create_artist()
track_artist.set_name(u'Yves Deruyter')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Marusha')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'The Wildlife')
track.set_length(217)
track_artist = expected.create_artist()
track_artist.set_name("Mijk's Magic Marble Box")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Rock Your Body (Radio Mix)')
track.set_length(221)
track_artist = expected.create_artist()
track_artist.set_name(u'The M. Experience III')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title("It's A Dream Song")
track.set_length(228)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Hooligan')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Rhapsody In E')
track.set_length(363)
track_artist = expected.create_artist()
track_artist.set_name(u'Scooter')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Various-Dream-Dance-Vol-3/release/135664')
r = s.get_result()
self.assertEqual(expected, r)
def test_special_track_row_class(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'1999')
release_event.set_country(u'UK')
expected.append_release_event(release_event)
expected.set_format(u'CD, Album')
label_id = expected.create_label_id()
label_id.set_label(u'Red Wharf')
label_id.append_catalogue_nr(u'RWCD004')
expected.append_label_id(label_id)
expected.set_title(u'Pilgrim')
artist = expected.create_artist()
artist.set_name(u'Graham Bowers')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_genre(u'Jazz')
expected.append_style(u'Modern Classical')
expected.set_url(u'http://www.discogs.com/Graham-Bowers-Pilgrim/release/728845')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1a')
track.set_title(u'Unconditional')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'1b')
track.set_title(u'Loss Of Innocence')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'1c')
track.set_title(u'Mechanistics')
track.set_length(None)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Graham-Bowers-Pilgrim/release/728845')
r = s.get_result()
self.assertEqual(expected, r)
def test_multiple_special_arists_for_same_track(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'2000')
release_event.set_country(u'UK')
expected.append_release_event(release_event)
expected.set_format(u'3 \xd7 CD, Mixed')
label_id = expected.create_label_id()
label_id.set_label(u'Pure Silk Records')
label_id.append_catalogue_nr(u'PURESCD3')
expected.append_label_id(label_id)
expected.set_title(u'Pure Silk: The Third Dimension')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_style(u'UK Garage')
expected.set_url(u'http://www.discogs.com/Various-Pure-Silk-The-Third-Dimension/release/463634')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Sunshine (Wookie Main Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Gabrielle')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Wookie')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Baby Gonna Rock Dis (Original Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Chris Mack')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'MC Nuts')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Girls Like Us')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'B-15 Project')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Crissy D')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Lady G')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Streetlife (Original 12" Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Daryl B')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Mark Yardley')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title("Don't Waste My Time (4 Beat Mix)")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'The Wideboys')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Everybody Come On (Stanton Warriors Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Skribble')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Stanton Warriors')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Get Loose')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Double G')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Right Before My Eyes (The Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name("N'n'G")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'MC Neat')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'What Ya Gonna Do')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Artful Dodger')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title("You Don't Know (Marvel & Eli Remix)")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'702')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Marvel & Eli')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'I Keep')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name("N'n'G")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Spirit Of The Sun (Bump & Flex Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Lenny Fontana')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Bump & Flex')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Celebrate Life (Zed Bias Vocal Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Brasstooth')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Sean Mitchell')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Zed Bias')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title("Baby (You're So Sexy) (Dub)")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Dem 2')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Glad You Came To Me (Steve Gurley VIP Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'United Grooves Collective')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Steve Gurley')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Find The Path (Sweet Release Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'New Horizons')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'17')
track.set_title(u'Give Some To Me (Masterplan)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Suiceyed')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'18')
track.set_title(u'That Sound')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Michael Moog')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Sweeter Than Wine')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Dionne Rakeem')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Dreem House Productions')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Feel It')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name("London's Unique 3")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Neighbourhood')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Zed Bias')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'MC Rumpus')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Madness On The Street')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Richie Boy')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Klasse')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title("Jump 'n' Shout (Dem 2 Mix)")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Basement Jaxx')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Dem 2')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title("Somebody Else's Guy (Stanton Warriors Vocal Mix)")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Jocelyn Brown')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Stanton Warriors')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Let Me Know')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'K.C.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Zed Bias')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'For Real')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'M Dubs')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'J.P.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Tingles 2000 (Zed Bias Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Valerie M')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Zed Bias')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title("Buddy X '99 (Original Dreem Teem Dub Mix)")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Dreem Teem')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Neneh Cherry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Apparently Nothing (Artful Dodger Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'The Brand New Heavies')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Artful Dodger')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Sometimes It Snows In April (Dreem House Dub Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Amar')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'MC Ranking')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Dreem House Productions')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Down On Me')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Wookie')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Hold Me Tight')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Lewi')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Beautiful (Dreem House Dub Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Groove Control')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Dreem House Productions')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Call It Fate')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Richie Dan')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'17')
track.set_title(u'A Little Bit Of Luck')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Luck & MC Neat')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'18')
track.set_title(u'I Want You')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Rosie Gaines')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(3)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Down On Me')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Wookie')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Everybody Come On (Stanton Warriors Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'DJ Skribble')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Stanton Warriors')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'I Keep')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name("N'n'G")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Celebrate Life (Zed Bias Vocal Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Brasstooth')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Sean Mitchell')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Zed Bias')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Sunshine (Wookie Main Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Gabrielle')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Wookie')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Neighbourhood')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Zed Bias')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'MC Rumpus')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Give Some To Me (Masterplan)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Suiceyed')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title("You Don't Know (Marvel & Eli Remix)")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'702')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Marvel & Eli')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title("Somebody Else's Guy (Stanton Warriors Filthy Silk Dub)")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Jocelyn Brown')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Stanton Warriors')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Right Before My Eyes (The Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name("N'n'G")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'MC Neat')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Baby Gonna Rock Dis')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Chris Mack')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'MC Nuts')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Girls Like Us')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'B-15 Project')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Crissy D')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Lady G')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title("Jump 'n' Shout (Dem 2 Mix)")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Basement Jaxx')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Dem 2')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Spirit Of The Sun (Bump & Flex Remix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Lenny Fontana')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Bump & Flex')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Glad You Came To Me (Steve Gurley VIP Mix)')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'United Grooves Collective')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Steve Gurley')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Call It Fate')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name(u'Richie Dan')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Various-Pure-Silk-The-Third-Dimension/release/463634')
r = s.get_result()
self.assertEqual(expected, r)
def test_weird_subtracks_in_tracklist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'2005')
release_event.set_country(u'Netherlands')
expected.append_release_event(release_event)
expected.set_format(u'CD, Compilation')
label_id = expected.create_label_id()
label_id.set_label(u'Stubko Entertainment')
label_id.append_catalogue_nr(u'255034')
expected.append_label_id(label_id)
expected.set_title(u'De Beste Liedjes Van Ome Henk')
artist = expected.create_artist()
artist.set_name(u'Ome Henk')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre(u'Electronic')
expected.append_genre(u'Non-Music')
expected.append_genre(u'Pop')
expected.append_style(u'Comedy')
expected.append_style(u'Euro House')
expected.append_style(u'Parody')
expected.set_url('http://www.discogs.com/Ome-Henk-De-Beste-Liedjes-Van-Ome-Henk/release/755732')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Het Is Weer Tijd Voor Ome Henk! (Radio Tune)')
track.set_length(85)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Opblaaskrokodil (Super Extra Mix)')
track.set_length(148)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Op De Camping')
track.set_length(213)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Ik Zing Dit Lied Voor Ome Henk!')
track.set_length(218)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Neem Een Ander In De Maling!')
track.set_length(198)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Mambo Nr. 6')
track.set_length(219)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Heftig!')
track.set_length(225)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Lekker Lekker (Ga Maar Met Me Mee)')
track.set_length(213)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Samba La Bamba!')
track.set_length(184)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u"'T Leven Gaat Niet Over Rozen!")
track.set_length(213)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Sieb van der Kast')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Ome Henk Viert Feest!')
track.set_length(236)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Sambal Bij?')
track.set_length(175)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Ik Ben Verkouwe!')
track.set_length(234)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Sju Tem')
track.set_length(195)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Kim Holland')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'Ploem Ploem Jenka (Hup Hop Versie Met Trea Dobbs)')
track.set_length(162)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Trea Dobbs')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Aaai Oehoe Aai')
track.set_length(191)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'17')
track.set_title(u'Oranje!! (We Worden Kampioen!) (Radio Mix)')
track.set_length(223)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'18')
track.set_title(u'Olee Olee Sinterklaas Is Here To Stay! (Single Versie)')
track.set_length(236)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'19')
track.set_title(u'Een Heel Gelukkig Kerstfeest')
track.set_length(214)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'20')
track.set_title(u'Opblaaskrokodil 2005 (Bonustrack)')
track.set_length(147)
track_artist = expected.create_artist()
track_artist.set_name(u'Ome Henk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = discogs.ReleaseScraper.from_string(u'http://www.discogs.com/Ome-Henk-De-Beste-Liedjes-Van-Ome-Henk/release/755732')
r = s.get_result()
self.assertEqual(expected, r)
def test_404(self):
expected = NotFoundResult()
expected.set_scraper_name(None)
s = discogs.ReleaseScraper.from_string('http://www.discogs.com/Various-Gothic-File-14/release/999999999')
r = s.get_result()
self.assertEqual(expected, r)
def test_search_scraper(self):
s = discogs.SearchScraper('love')
r = s.get_result()
self.assertTrue(len(r.get_items()) > 0)
class MusicbrainzTest(TestCase):
def test_simple_album(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2004-09-23')
release_event.set_country('Germany')
expected.append_release_event(release_event)
expected.set_format('CD, Album')
label_id = expected.create_label_id()
label_id.set_label('Trisol')
label_id.append_catalogue_nr('TRI 070 CD')
expected.append_label_id(label_id)
expected.set_title('Hast Du mich vermisst? Der schwarze Schmetterling, Teil I')
artist = expected.create_artist()
artist.set_name('ASP')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/e008606b-a1c9-48ab-8011-5dbf8b874f1b')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Intro: In meiner Vorstellung')
track.set_length(274)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Schwarzer Schmetterling')
track.set_length(290)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Where Do the Gods Go')
track.set_length(226)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Dancing')
track.set_length(345)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'K\xfcss mich')
track.set_length(311)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Sing Child')
track.set_length(238)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Teach Me War')
track.set_length(225)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Imbecile Anthem')
track.set_length(222)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Und wir tanzten (Ungeschickte Liebesbriefe)')
track.set_length(304)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Blinded')
track.set_length(444)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Where Do the Gods Go (re-unleashed club edit)')
track.set_length(279)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/e008606b-a1c9-48ab-8011-5dbf8b874f1b')
r = s.get_result()
self.assertEqual(expected, r)
def test_multiple_cds(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2007-05-25')
release_event.set_country('Germany')
expected.append_release_event(release_event)
expected.set_format(u'4\xd7CD, Album + Live')
label_id = expected.create_label_id()
label_id.set_label('Trisol')
label_id.append_catalogue_nr('TRI 303 CD')
expected.append_label_id(label_id)
expected.set_title('Once in a Lifetime')
artist = expected.create_artist()
artist.set_name('ASP')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
artist = expected.create_artist()
artist.set_name('Chamber')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/79de4a0c-b469-4dfd-b23c-129462b741fb')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Once in a Lifetime, Part 1')
track.set_length(351)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'A Dead Man\u2019s Song')
track.set_length(312)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Versuchung')
track.set_length(345)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Torn')
track.set_length(304)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Demon Love')
track.set_length(272)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'The Paperhearted Ghost')
track.set_length(283)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'A Tale of Real Love')
track.set_length(316)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Hunger')
track.set_length(289)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'The Truth About Snow-White')
track.set_length(240)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'She Wore Shadows')
track.set_length(276)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Und wir tanzten (Ungeschickte Liebesbriefe)')
track.set_length(317)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Once in a Lifetime, Part 2 (reprise)')
track.set_length(164)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'K\xfcss mich')
track.set_length(384)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Silence - Release')
track.set_length(225)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Solitude')
track.set_length(220)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Die Ballade von der Erweckung')
track.set_length(527)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Another Conversation')
track.set_length(201)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Sing Child')
track.set_length(449)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Ich will brennen')
track.set_length(300)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Toscana')
track.set_length(374)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Ride On')
track.set_length(222)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Hometown')
track.set_length(181)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Werben')
track.set_length(293)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Once in a Lifetime, Part 3 (Finale)')
track.set_length(608)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(3)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'H\xe4sslich')
track.set_length(145)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Backstage (All Areas)')
track.set_length(573)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Paracetamoltr\xe4ume')
track.set_length(517)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Auszug aus \u201eTremendista\u201c')
track.set_length(1473)
track_artist = expected.create_artist()
track_artist.set_name(u'Ralph M\xfcller')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Campari O')
track.set_length(159)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(4)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Sicamore Trees (ASP soundcheck out-take)')
track.set_length(94)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Demon Love')
track.set_length(275)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'The Truth About Snow-White')
track.set_length(275)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'She Wore Shadows')
track.set_length(319)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Sing Child')
track.set_length(469)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Hometown')
track.set_length(221)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Hunger')
track.set_length(274)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Silence - Release')
track.set_length(208)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'She Moved Through the Fair (ASP soundcheck out-take)')
track.set_length(120)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/79de4a0c-b469-4dfd-b23c-129462b741fb')
r = s.get_result()
self.assertEqual(expected, r)
def test_various_artists_and_track_remixer(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2010')
release_event.set_country('Germany')
expected.append_release_event(release_event)
expected.set_format('CD, Album + Compilation')
label_id = expected.create_label_id()
label_id.set_label('Batbeliever Releases')
label_id.append_catalogue_nr('BAT 065')
expected.append_label_id(label_id)
expected.set_title('Gothic File 11')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/9d78a55c-0eee-4b61-b6eb-b69765c37740')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Carrie Satan')
track.set_length(312)
track_artist = expected.create_artist()
track_artist.set_name('Spectra Paris')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Countdown')
track.set_length(253)
track_artist = expected.create_artist()
track_artist.set_name('Absurd Minds')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title(u'M\xe4dchen in Uniform (Faderhead remix)')
track.set_length(233)
track_artist = expected.create_artist()
track_artist.set_name('Nachtmahr')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Faderhead')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Fucking Invective')
track.set_length(273)
track_artist = expected.create_artist()
track_artist.set_name('Noisuf-X')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Loyal to My Hate (Solar Fake remix)')
track.set_length(264)
track_artist = expected.create_artist()
track_artist.set_name(':wumpscut:')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Solar Fake')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Melancholie (382edit)')
track.set_length(232)
track_artist = expected.create_artist()
track_artist.set_name('KiEw')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Gegen die Welt')
track.set_length(287)
track_artist = expected.create_artist()
track_artist.set_name('Mantus')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title("Ready or Not (I'm Coming)")
track.set_length(202)
track_artist = expected.create_artist()
track_artist.set_name('Oomph!')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('What?')
track.set_length(166)
track_artist = expected.create_artist()
track_artist.set_name('Rob Zombie')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Ebenbild (Die Krupps remix)')
track.set_length(343)
track_artist = expected.create_artist()
track_artist.set_name('Megaherz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Die Krupps')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Vergissmeinnicht (live)')
track.set_length(239)
track_artist = expected.create_artist()
track_artist.set_name('Eisbrecher')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Industrypeople')
track.set_length(254)
track_artist = expected.create_artist()
track_artist.set_name('Zeromancer')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Kick the Bass')
track.set_length(222)
track_artist = expected.create_artist()
track_artist.set_name('Julien-K')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('Black Hole')
track.set_length(325)
track_artist = expected.create_artist()
track_artist.set_name('Nosferatu')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('Swimming in Dirty Water')
track.set_length(264)
track_artist = expected.create_artist()
track_artist.set_name('Die Art')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title('Wreckhouse Stomp')
track.set_length(184)
track_artist = expected.create_artist()
track_artist.set_name('Mad Sin')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/9d78a55c-0eee-4b61-b6eb-b69765c37740')
r = s.get_result()
self.assertEqual(expected, r)
def test_disc_titles(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2008')
release_event.set_country('Europe')
expected.append_release_event(release_event)
expected.set_format(u'5\xd7CD, Album + Compilation')
label_id = expected.create_label_id()
label_id.set_label('Epic')
label_id.append_catalogue_nr('88697304842')
expected.append_label_id(label_id)
expected.set_title('Original Album Classics')
artist = expected.create_artist()
artist.set_name('The Isley Brothers')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/12c94a0f-828f-4ab3-8e0d-dfe4599dc310')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title('The Brothers: Isley')
track = disc.create_track()
track.set_number('1')
track.set_title('I Turned You On')
track.set_length(158)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Vacuum Cleaner')
track.set_length(176)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('I Got to Get Myself Together')
track.set_length(218)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Was It Good to You?')
track.set_length(164)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('The Blacker the Berry (a.k.a. Black Berries)')
track.set_length(353)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('My Little Girl')
track.set_length(221)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Get Down Off of the Train')
track.set_length(192)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Holding On')
track.set_length(156)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Feels Like the World')
track.set_length(206)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title('Get Into Something')
track = disc.create_track()
track.set_number('1')
track.set_title('Get Into Something')
track.set_length(450)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Freedom')
track.set_length(218)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Take Inventory')
track.set_length(167)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title("Keep on Doin'")
track.set_length(242)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Girls Will Be Girls')
track.set_length(171)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('I Need You So')
track.set_length(265)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('If He Can You Can')
track.set_length(225)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('I Got to Find Me One')
track.set_length(278)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Beautiful')
track.set_length(186)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Bless Your Heart')
track.set_length(183)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(3)
disc.set_title("Givin' It Back")
track = disc.create_track()
track.set_number('1')
track.set_title('Ohio - Machine Gun')
track.set_length(554)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Fire and Rain')
track.set_length(329)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Lay Lady Lay')
track.set_length(622)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Spill the Wine')
track.set_length(392)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Nothing to Do But Today')
track.set_length(219)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Cold Bologna')
track.set_length(179)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title("Love the One You're With")
track.set_length(219)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(4)
disc.set_title('Brother, Brother, Brother')
track = disc.create_track()
track.set_number('1')
track.set_title('Brother, Brother')
track.set_length(197)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Put A Little Love In Your Heart')
track.set_length(182)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title("Sweet Season / Keep On Walkin'")
track.set_length(313)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Work To Do')
track.set_length(192)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Pop That Thang')
track.set_length(174)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Lay Away')
track.set_length(203)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title("It's Too Late")
track.set_length(631)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Love Put Me On The Corner')
track.set_length(390)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(5)
disc.set_title('3 + 3')
track = disc.create_track()
track.set_number('1')
track.set_title('That Lady, Parts 1 & 2')
track.set_length(335)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title("Don't Let Me Be Lonely Tonight")
track.set_length(239)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('If You Were There')
track.set_length(203)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('You Walk Your Way')
track.set_length(186)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Listen to the Music')
track.set_length(246)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('What It Comes Down To')
track.set_length(234)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Sunshine (Go Away Today)')
track.set_length(262)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Summer Breeze')
track.set_length(372)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('The Highways of My Life')
track.set_length(293)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('That Lady (live)')
track.set_length(222)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/12c94a0f-828f-4ab3-8e0d-dfe4599dc310')
r = s.get_result()
self.assertEqual(expected, r)
def test_special_sub_heading(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2012-11-16')
release_event.set_country('Germany')
expected.append_release_event(release_event)
expected.set_format(u'2\xd7CD, EP')
label_id = expected.create_label_id()
label_id.set_label('Trisol')
label_id.append_catalogue_nr('TRI 460 CD')
expected.append_label_id(label_id)
expected.set_title('Die GeistErfahrer EP: Fremder-Zyklus, Teil 1.1')
artist = expected.create_artist()
artist.set_name('ASP')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/fc6ee7a8-c70a-4c8f-ab42-43a457a0731f')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'GeistErfahrer')
track.set_length(360)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'In Sack und Asche')
track.set_length(440)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'\xdcberH\xe4rte')
track.set_length(376)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Carpe noctem')
track.set_length(312)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Weichen(t)stellung (GeistErfahrer Reprise)')
track.set_length(274)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Danach')
track.set_length(516)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Sing Child')
track.set_length(404)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'Duett (Minnelied der Incubi)')
track.set_length(251)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Krabat')
track.set_length(358)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Unverwandt')
track.set_length(667)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Werben')
track.set_length(440)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/fc6ee7a8-c70a-4c8f-ab42-43a457a0731f')
r = s.get_result()
self.assertEqual(expected, r)
def test_release_group(self):
expected = ListResult()
expected.set_scraper_name(None)
item = expected.create_item()
item.set_name(u'ASP & Chamber \u2013 Humility')
item.set_info(u'CD | Tracks: 8 | Date: 2006-04-21 | Country: DE | Barcode: 4260063942730')
item.set_query('https://musicbrainz.org/release/58bad121-bfab-4dda-89f8-4b1bc092de44')
item.set_url('https://musicbrainz.org/release/58bad121-bfab-4dda-89f8-4b1bc092de44')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'ASP & Chamber \u2013 Humility: Die verschollenen Archive 2')
item.set_info(u'CD | Tracks: 7 | Date: 2012-03-09 | Country: DE | Barcode: 4260063944505')
item.set_query('https://musicbrainz.org/release/c2834b8b-77c4-4505-9b55-a31208eb98c3')
item.set_url('https://musicbrainz.org/release/c2834b8b-77c4-4505-9b55-a31208eb98c3')
expected.append_item(item)
s = musicbrainz.ReleaseGroupScraper.from_string('http://musicbrainz.org/release-group/9c8ec90f-dcef-3fc7-904d-27f535454e44')
r = s.get_result()
self.assertEqual(expected, r)
def test_vinyl_not_cd_in_sub_heading(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2004-02-24')
release_event.set_country('United States')
expected.append_release_event(release_event)
expected.set_format('12" Vinyl, Album')
label_id = expected.create_label_id()
label_id.set_label('Sundazed Music')
label_id.append_catalogue_nr('LP 5103')
expected.append_label_id(label_id)
expected.set_title('Four Sail')
artist = expected.create_artist()
artist.set_name('Love')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/bdb4ba37-bb4b-3d2a-bd58-c109dc4d72f0')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('A1')
track.set_title('August')
track.set_length(300)
disc.append_track(track)
track = disc.create_track()
track.set_number('A2')
track.set_title("Your Friend and Mine - Neil's Song")
track.set_length(220)
disc.append_track(track)
track = disc.create_track()
track.set_number('A3')
track.set_title("I'm With You")
track.set_length(165)
disc.append_track(track)
track = disc.create_track()
track.set_number('A4')
track.set_title('Good Times')
track.set_length(210)
disc.append_track(track)
track = disc.create_track()
track.set_number('A5')
track.set_title('Singing Cowboy')
track.set_length(270)
disc.append_track(track)
track = disc.create_track()
track.set_number('B1')
track.set_title('Dream')
track.set_length(169)
disc.append_track(track)
track = disc.create_track()
track.set_number('B2')
track.set_title('Robert Montgomery')
track.set_length(214)
disc.append_track(track)
track = disc.create_track()
track.set_number('B3')
track.set_title('Nothing')
track.set_length(284)
disc.append_track(track)
track = disc.create_track()
track.set_number('B4')
track.set_title('Talking in My Sleep')
track.set_length(170)
disc.append_track(track)
track = disc.create_track()
track.set_number('B5')
track.set_title('Always See Your Face')
track.set_length(210)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/bdb4ba37-bb4b-3d2a-bd58-c109dc4d72f0')
r = s.get_result()
self.assertEqual(expected, r)
def test_medium_in_sub_heading(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
expected.set_format('Album')
expected.set_title('Welcome to the Dopehouse')
artist = expected.create_artist()
artist.set_name('The Dayton Family')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/0e3b3c85-61b6-4a07-852b-26f7e8dd0ade')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Intro')
track.set_length(93)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Big Mac 11')
track.set_length(276)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Do You Remember?')
track.set_length(200)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Welcome to Flint')
track.set_length(228)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Feds')
track.set_length(237)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Gangstarism')
track.set_length(251)
track_artist = expected.create_artist()
track_artist.set_name('Goldfish')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Young Thugs')
track.set_length(241)
track_artist = expected.create_artist()
track_artist.set_name('Ghetto E')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Lori')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Drugstore')
track.set_length(207)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Set Up')
track.set_length(257)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('We Keep It Ghetto')
track.set_length(200)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Dope House')
track.set_length(231)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Shadows')
track.set_length(242)
track_artist = expected.create_artist()
track_artist.set_name('Kalonda & Ryan')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Outlaws')
track.set_length(226)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('Weed Song')
track.set_length(228)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('Simple Wish')
track.set_length(206)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title('Outro')
track.set_length(96)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/0e3b3c85-61b6-4a07-852b-26f7e8dd0ade')
r = s.get_result()
self.assertEqual(expected, r)
def test_digital_media_in_sub_heading(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2013-04-08')
release_event.set_country('Sweden')
expected.append_release_event(release_event)
expected.set_format('Digital Media, Album')
expected.set_title(u'J\xe4rnb\xe4rarland')
artist = expected.create_artist()
artist.set_name('Stiko Per Larsson')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/d37fc56e-4b9b-4c4c-9e9e-5d6d5a66944c')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'Zombien f\xf6ds')
track.set_length(167)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'J\xe4rnb\xe4rarland')
track.set_length(219)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Veteraner')
track.set_length(197)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'Resande Man')
track.set_length(192)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Du h\xf6r inte hemma h\xe4r')
track.set_length(185)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Irrbloss')
track.set_length(187)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'J\xe4mtlands president')
track.set_length(218)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Gilla falla')
track.set_length(257)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Inga v\xe4gar')
track.set_length(259)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'Krus & detaljer')
track.set_length(241)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'En kosmonauts testamente')
track.set_length(375)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/d37fc56e-4b9b-4c4c-9e9e-5d6d5a66944c')
r = s.get_result()
self.assertEqual(expected, r)
def test_other_in_sub_heading(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('1999')
release_event.set_country('United States')
expected.append_release_event(release_event)
expected.set_format('Other, Album')
label_id = expected.create_label_id()
label_id.set_label('Epic')
expected.append_label_id(label_id)
expected.set_title('Retro Futuristo')
artist = expected.create_artist()
artist.set_name('Jack Herrera')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/020fc291-af3e-45d7-a2f3-212d42fc260b')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'City Lights')
track.set_length(313)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'What U Feel')
track.set_length(295)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'Diamond in the Rough')
track.set_length(297)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'High Off You')
track.set_length(299)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'Say You Gotta Man')
track.set_length(262)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'Jack Shuffle (feat. Will.I.Am)')
track.set_length(240)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'Up Above My Head')
track.set_length(320)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'Jack Herrera for President')
track.set_length(272)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'Free to Believe')
track.set_length(329)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'For You')
track.set_length(378)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'Silver & Gold (feat. Black Thought)')
track.set_length(316)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'Revolution (interlude)')
track.set_length(78)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'Revolution')
track.set_length(315)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'Be Free')
track.set_length(341)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'15')
track.set_title(u'D\xe9ja Vu')
track.set_length(218)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'16')
track.set_title(u'Jewel')
track.set_length(339)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/020fc291-af3e-45d7-a2f3-212d42fc260b')
r = s.get_result()
self.assertEqual(expected, r)
def test_dvd_and_cd_in_sub_heading(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2009-01-14')
release_event.set_country('Japan')
expected.append_release_event(release_event)
expected.set_format('CD + DVD-Video, Album + Compilation')
label_id = expected.create_label_id()
label_id.set_label(u'MusicRay\u2019n')
label_id.append_catalogue_nr('SMCL-163')
expected.append_label_id(label_id)
label_id = expected.create_label_id()
label_id.set_label(u'MusicRay\u2019n')
label_id.append_catalogue_nr('SMCL-164')
expected.append_label_id(label_id)
expected.set_title('CODE GEASS COMPLETE BEST')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/f6fe7f52-b0c8-4bd8-af06-68af909e09ca')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'C.C.\u30e2\u30ce\u30ed\u30fc\u30b0 1')
track.set_length(31)
track_artist = expected.create_artist()
track_artist.set_name(u'C.C.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'\u3086\u304b\u306a')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'COLORS')
track.set_length(219)
track_artist = expected.create_artist()
track_artist.set_name(u'FLOW')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'\u52c7\u4fa0\u9752\u6625\u8b33')
track.set_length(262)
track_artist = expected.create_artist()
track_artist.set_name(u'ALI PROJECT')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'C.C.\u30e2\u30ce\u30ed\u30fc\u30b0 2')
track.set_length(35)
track_artist = expected.create_artist()
track_artist.set_name(u'C.C.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'\u3086\u304b\u306a')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'\u89e3\u8aad\u4e0d\u80fd')
track.set_length(186)
track_artist = expected.create_artist()
track_artist.set_name(u'\u30b8\u30f3')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'\u30e2\u30b6\u30a4\u30af\u30ab\u30b1\u30e9')
track.set_length(278)
track_artist = expected.create_artist()
track_artist.set_name(u'SunSet Swish')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'\u77b3\u30ce\u7ffc')
track.set_length(212)
track_artist = expected.create_artist()
track_artist.set_name(u'access')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'C.C.\u30e2\u30ce\u30ed\u30fc\u30b0 3')
track.set_length(35)
track_artist = expected.create_artist()
track_artist.set_name(u'C.C.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'\u3086\u304b\u306a')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'O2 \u301c\u30aa\u30fc\u30fb\u30c4\u30fc\u301c')
track.set_length(238)
track_artist = expected.create_artist()
track_artist.set_name(u'ORANGE RANGE')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'\u30b7\u30a2\u30ef\u30bb\u30cd\u30a4\u30ed')
track.set_length(259)
track_artist = expected.create_artist()
track_artist.set_name(u'ORANGE RANGE')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'C.C.\u30e2\u30ce\u30ed\u30fc\u30b0 4')
track.set_length(32)
track_artist = expected.create_artist()
track_artist.set_name(u'C.C.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'\u3086\u304b\u306a')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'WORLD END')
track.set_length(229)
track_artist = expected.create_artist()
track_artist.set_name(u'FLOW')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'\u308f\u304c\u81c8\u305f\u3057\u60aa\u306e\u83ef')
track.set_length(273)
track_artist = expected.create_artist()
track_artist.set_name(u'ALI PROJECT')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'C.C.\u30e2\u30ce\u30ed\u30fc\u30b0 5')
track.set_length(46)
track_artist = expected.create_artist()
track_artist.set_name(u'C.C.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'\u3086\u304b\u306a')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number(u'1')
track.set_title(u'COLORS type1')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'FLOW')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'2')
track.set_title(u'COLORS type2')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'FLOW')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'3')
track.set_title(u'\u52c7\u4fa0\u9752\u6625\u8b33')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'ALI PROJECT')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'4')
track.set_title(u'\u89e3\u8aad\u4e0d\u80fd type1')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'\u30b8\u30f3')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'5')
track.set_title(u'\u89e3\u8aad\u4e0d\u80fd type2')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'\u30b8\u30f3')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'6')
track.set_title(u'\u30e2\u30b6\u30a4\u30af\u30ab\u30b1\u30e9 type1')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'SunSet Swish')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'7')
track.set_title(u'\u30e2\u30b6\u30a4\u30af\u30ab\u30b1\u30e9 type2')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'SunSet Swish')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'8')
track.set_title(u'\u77b3\u30ce\u7ffc')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'access')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'9')
track.set_title(u'O2 \u301c\u30aa\u30fc\u30fb\u30c4\u30fc\u301c')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'ORANGE RANGE')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'10')
track.set_title(u'\u30b7\u30a2\u30ef\u30bb\u30cd\u30a4\u30ed')
track.set_length(96)
track_artist = expected.create_artist()
track_artist.set_name(u'ORANGE RANGE')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'11')
track.set_title(u'WORLD END type1')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'FLOW')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'12')
track.set_title(u'WORLD END type2')
track.set_length(93)
track_artist = expected.create_artist()
track_artist.set_name(u'FLOW')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'13')
track.set_title(u'\u308f\u304c\ufa1f\u305f\u3057\u60aa\u306e\u83ef type1')
track.set_length(96)
track_artist = expected.create_artist()
track_artist.set_name(u'ALI PROJECT')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number(u'14')
track.set_title(u'\u308f\u304c\ufa1f\u305f\u3057\u60aa\u306e\u83ef type2')
track.set_length(96)
track_artist = expected.create_artist()
track_artist.set_name(u'ALI PROJECT')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/f6fe7f52-b0c8-4bd8-af06-68af909e09ca')
r = s.get_result()
self.assertEqual(expected, r)
def test_cdr_in_sub_heading(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2013-06-27')
release_event.set_country('United States')
expected.append_release_event(release_event)
expected.set_format('CD-R, Album')
label_id = expected.create_label_id()
label_id.set_label('[no label]')
expected.append_label_id(label_id)
expected.set_title('Thaw')
artist = expected.create_artist()
artist.set_name('Buckethead')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/dd3f9b98-364c-4da0-b4d7-c79f1c20f1e6')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('[untitled]')
track.set_length(486)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('[untitled]')
track.set_length(147)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('[untitled]')
track.set_length(191)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('[untitled]')
track.set_length(166)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('[untitled]')
track.set_length(185)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('[untitled]')
track.set_length(413)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('[untitled]')
track.set_length(133)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('[untitled]')
track.set_length(113)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('[untitled]')
track.set_length(91)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/dd3f9b98-364c-4da0-b4d7-c79f1c20f1e6')
r = s.get_result()
self.assertEqual(expected, r)
def test_featuring_release_artists(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2012-03-19')
release_event.set_country('United Kingdom')
expected.append_release_event(release_event)
expected.set_format('CD, Single')
label_id = expected.create_label_id()
label_id.set_label('Interscope Records')
label_id.append_catalogue_nr('0602527974569')
expected.append_label_id(label_id)
expected.set_title("Give Me All Your Luvin'")
artist = expected.create_artist()
artist.set_name('Madonna')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
artist = expected.create_artist()
artist.set_name('Nicki Minaj')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.FEATURING)
expected.append_release_artist(artist)
artist = expected.create_artist()
artist.set_name('M.I.A.')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.FEATURING)
expected.append_release_artist(artist)
expected.set_url('http://musicbrainz.org/release/37df7664-0924-4594-8d07-9d48fa47ced0')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title("Give Me All Your Luvin'")
track.set_length(202)
track_artist = expected.create_artist()
track_artist.set_name('Nicki Minaj')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('M.I.A.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title("Give Me All Your Luvin' (Party Rock remix)")
track.set_length(243)
track_artist = expected.create_artist()
track_artist.set_name('LMFAO')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Nicki Minaj')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/37df7664-0924-4594-8d07-9d48fa47ced0')
r = s.get_result()
self.assertEqual(expected, r)
def test_404(self):
expected = NotFoundResult()
expected.set_scraper_name(None)
s = musicbrainz.ReleaseScraper.from_string('http://musicbrainz.org/release/12345-abcdefg')
r = s.get_result()
self.assertEqual(expected, r)
def test_search_scraper(self):
s = musicbrainz.SearchScraper('love')
r = s.get_result()
self.assertTrue(len(r.get_items()) > 0)
class BeatportTest(TestCase):
def test_remix_track_artist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2006-04-19')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
label_id = expected.create_label_id()
label_id.set_label('Karatemusik')
label_id.append_catalogue_nr('KM013')
expected.append_label_id(label_id)
expected.set_title('Love Spy / Love Dies')
artist = expected.create_artist()
artist.set_name(u'Polygamy Boys')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Electro House')
expected.append_genre('Tech House')
expected.set_url('http://www.beatport.com/release/love-spy-love-dies/27944')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title(u'Love Spy / Love Dies [Error Error Remix]')
track.set_length(447)
track_artist = expected.create_artist()
track_artist.set_name('Error Error')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Love Spy / Love Dies')
track.set_length(427)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Reply 23')
track.set_length(418)
disc.append_track(track)
expected.append_disc(disc)
s = beatport.ReleaseScraper.from_string('http://www.beatport.com/release/love-spy-love-dies/27944')
r = s.get_result()
self.assertEqual(expected, r)
def test_various_artists(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2012-01-05')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
label_id = expected.create_label_id()
label_id.set_label('Carlo Cavalli Music Group')
label_id.append_catalogue_nr('CMG117')
expected.append_label_id(label_id)
expected.set_title('DJ Tunes Compilation')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Progressive House')
expected.append_genre('House')
expected.append_genre('Deep House')
expected.append_genre('Tech House')
expected.append_genre('Minimal')
expected.set_url('http://www.beatport.com/release/dj-tunes-compilation/851318')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Forever Loved')
track.set_length(320)
track_artist = expected.create_artist()
track_artist.set_name('Sam Be-Kay')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title(u'Sweep [Alex Faraci Remix]')
track.set_length(398)
track_artist = expected.create_artist()
track_artist.set_name('Eros Locatelli')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Alex Faraci')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title(u'Humo Y Neon [David Ahumada Remix]')
track.set_length(298)
track_artist = expected.create_artist()
track_artist.set_name('Babette Duwez')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Joel Reichert')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('David Ahumada')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title(u'Night Melody [Massimo Russo La Guitarra Remix]')
track.set_length(377)
track_artist = expected.create_artist()
track_artist.set_name('Alex Faraci')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Massimo Russo')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('30 m')
track.set_length(393)
track_artist = expected.create_artist()
track_artist.set_name('Fingers Clear')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Just Begin')
track.set_length(429)
track_artist = expected.create_artist()
track_artist.set_name('Erion Gjuzi')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Achakkar')
track.set_length(388)
track_artist = expected.create_artist()
track_artist.set_name('Dany Cohiba')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title(u'Raveline [Italianbeat Guys Remix]')
track.set_length(406)
track_artist = expected.create_artist()
track_artist.set_name('Massimo Russo')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Italianbeat Guys')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title(u'Grey 2 Fade feat. Babette Duwez [Jurgen Cecconi Mix]')
track.set_length(653)
track_artist = expected.create_artist()
track_artist.set_name('Jurgen Cecconi')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Beethoven Tbs')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Tanzmania')
track.set_length(420)
track_artist = expected.create_artist()
track_artist.set_name('Carlo Cavalli')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = beatport.ReleaseScraper.from_string('http://www.beatport.com/release/dj-tunes-compilation/851318')
r = s.get_result()
self.assertEqual(expected, r)
def test_release_with_empty_track_length(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2008-10-13')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
label_id = expected.create_label_id()
label_id.set_label('Bedrock Records')
label_id.append_catalogue_nr('BEDPPF10')
expected.append_label_id(label_id)
expected.set_title('Bedrock 10: Past Present Future')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Progressive House')
expected.append_genre('House')
expected.append_genre('Breaks')
expected.append_genre('Techno')
expected.append_genre('Tech House')
expected.set_url('http://www.beatport.com/release/bedrock-10-past-present-future/138250')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title(u'Past Present Future (Part 1) [Continuous DJ Mix]')
track.set_length(4454)
track_artist = expected.create_artist()
track_artist.set_name('John Digweed')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title(u'Emerald [Seismic Dub]')
track.set_length(501)
track_artist = expected.create_artist()
track_artist.set_name('Bedrock')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Seizmic')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title(u'Imagine [Estroe Remix]')
track.set_length(383)
track_artist = expected.create_artist()
track_artist.set_name('Fortunato & Montresor')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Estroe')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title(u'Dust [Charlie Mayhem Cold Shoulder Dub]')
track.set_length(540)
track_artist = expected.create_artist()
track_artist.set_name('Pole Folder')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('CP')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Charlie May')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title(u'Chutney [Tom Mangan 2008 Remix]')
track.set_length(483)
track_artist = expected.create_artist()
track_artist.set_name('Tom Mangan')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title(u'Rise In [Steve Lawler Powder Powder Remix]')
track.set_length(712)
track_artist = expected.create_artist()
track_artist.set_name('Steve Lawler')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title(u'Santiago [Chab Remix]')
track.set_length(522)
track_artist = expected.create_artist()
track_artist.set_name('Bedrock')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Chab')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title(u'Warung Beach [Jamie Stevens Remix]')
track.set_length(516)
track_artist = expected.create_artist()
track_artist.set_name('John Digweed')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Jamie Stevens')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title(u'Emerald [Seismic Dub]')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('Bedrock')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Charlie May')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title(u"Dirtbox [Bruce Aisher's Resurrection Rewind Mix]")
track.set_length(464)
track_artist = expected.create_artist()
track_artist.set_name('Gutterstylz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Bruce Aisher')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title(u'Repercussion [Danny Howells Remix]')
track.set_length(594)
track_artist = expected.create_artist()
track_artist.set_name('Science Dept.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Danny Howells')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title(u"Beautiful Strange [John Digweed & Nick Muir's Even Stranger Remix]")
track.set_length(439)
track_artist = expected.create_artist()
track_artist.set_name('Nick Muir')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('John Digweed')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title(u'Save Me [James Talk Remix]')
track.set_length(485)
track_artist = expected.create_artist()
track_artist.set_name('Guy J')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('James Talk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title(u'Heaven Scent [M.O.D.E. Remix]')
track.set_length(675)
track_artist = expected.create_artist()
track_artist.set_name('Bedrock')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('M.O.D.E.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title(u'Past Present Future (Part 2) [Continuous DJ Mix]')
track.set_length(4333)
track_artist = expected.create_artist()
track_artist.set_name('John Digweed')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title(u'Forge [Tom Middleton Remix]')
track.set_length(481)
track_artist = expected.create_artist()
track_artist.set_name('Bedrock')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Tom Middleton')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('17')
track.set_title('Empathy')
track.set_length(478)
track_artist = expected.create_artist()
track_artist.set_name('Jim Rivers')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('18')
track.set_title(u'Pushin Too Hard [Bruce Aisher Remix]')
track.set_length(460)
track_artist = expected.create_artist()
track_artist.set_name('Saints & Sinners')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Bruce Aisher')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('19')
track.set_title(u'U Get So Give [Paolo Mojo Remix]')
track.set_length(497)
track_artist = expected.create_artist()
track_artist.set_name('Moonface')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Paolo Mojo')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('20')
track.set_title(u'Chilling Moments [Kazell Influx Audio Remix]')
track.set_length(422)
track_artist = expected.create_artist()
track_artist.set_name('Shmuel Flash')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Kazell')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('21')
track.set_title(u'K10 [Alan Fitzpatrick Remix]')
track.set_length(512)
track_artist = expected.create_artist()
track_artist.set_name('Misstress Barbara')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Alan Fitzpatrick')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('22')
track.set_title(u'Chutney [Size 9 Reinterpretation]')
track.set_length(506)
track_artist = expected.create_artist()
track_artist.set_name('Tom Mangan')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('WiNK')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('23')
track.set_title('Stoppage Time')
track.set_length(636)
track_artist = expected.create_artist()
track_artist.set_name('Guy Gerber')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('24')
track.set_title('Six Hours Later')
track.set_length(459)
track_artist = expected.create_artist()
track_artist.set_name('Bradler')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Dualton')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('25')
track.set_title(u'All I Know feat. Astrid Suryanto [Jim Rivers Space Disco Remix]')
track.set_length(476)
track_artist = expected.create_artist()
track_artist.set_name('Morgan Page')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Gregory Shiff')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Jim Rivers')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('26')
track.set_title('Gravitation')
track.set_length(323)
track_artist = expected.create_artist()
track_artist.set_name('Nick Muir')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('27')
track.set_title(u'Rise In [Steve Lawler Powder Powder Remix (Edit)]')
track.set_length(298)
track_artist = expected.create_artist()
track_artist.set_name('Steve Lawler')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('28')
track.set_title(u"Santiago [Guy Gerber's Hotrod Dub]")
track.set_length(512)
track_artist = expected.create_artist()
track_artist.set_name('Bedrock')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Guy Gerber')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('29')
track.set_title('Sensei')
track.set_length(441)
track_artist = expected.create_artist()
track_artist.set_name("Funk D'Void")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Toby Izui')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('30')
track.set_title('Madhouse')
track.set_length(649)
track_artist = expected.create_artist()
track_artist.set_name('Pindrop')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('31')
track.set_title(u'Lunar [Late Night Friday Remix]')
track.set_length(349)
track_artist = expected.create_artist()
track_artist.set_name('Guy J')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = beatport.ReleaseScraper.from_string('http://www.beatport.com/release/bedrock-10-past-present-future/138250')
r = s.get_result()
self.assertEqual(expected, r)
def test_release_with_various_artists_in_track_artist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2013-07-10')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
label_id = expected.create_label_id()
label_id.set_label('040 Recordings')
label_id.append_catalogue_nr('040REC012C')
expected.append_label_id(label_id)
expected.set_title('040 Recordings Ibiza 2013 Vol. 1')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Minimal')
expected.append_genre('Tech House')
expected.set_url('http://www.beatport.com/release/040-recordings-ibiza-2013-vol-1/1113652')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Japanese Floor')
track.set_length(422)
track_artist = expected.create_artist()
track_artist.set_name('Cudder & Mulder')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Persian')
track.set_length(379)
track_artist = expected.create_artist()
track_artist.set_name('Carlo Ruetz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Me And My Violin')
track.set_length(483)
track_artist = expected.create_artist()
track_artist.set_name('Andree Wischnewski')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Aurra Che')
track.set_length(432)
track_artist = expected.create_artist()
track_artist.set_name('Dompe')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Cuture Club')
track.set_length(432)
track_artist = expected.create_artist()
track_artist.set_name('Debirski')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Yaman!')
track.set_length(435)
track_artist = expected.create_artist()
track_artist.set_name('Robbe Rabone')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Flint Westwood')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title(u'Jazzy Groovie [Thomas Lizzara Remix]')
track.set_length(409)
track_artist = expected.create_artist()
track_artist.set_name('Kanzler & Wischnewski')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Thomas Lizzara')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Alright')
track.set_length(416)
track_artist = expected.create_artist()
track_artist.set_name('Dompe')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title(u'The Sun Ft. Jan Hilde [Heinrich & Heine Remix]')
track.set_length(372)
track_artist = expected.create_artist()
track_artist.set_name('NECRO & Reichmann')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Heinrich & Heine')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.REMIXER)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Libre')
track.set_length(376)
track_artist = expected.create_artist()
track_artist.set_name('Neal Porter')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Closer')
track.set_length(430)
track_artist = expected.create_artist()
track_artist.set_name('Heinrich & Heine')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Society Today')
track.set_length(343)
track_artist = expected.create_artist()
track_artist.set_name('Maurice Deek')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Music For The People')
track.set_length(428)
track_artist = expected.create_artist()
track_artist.set_name('Heinrich & Heine')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('Dont Touch My Phone')
track.set_length(472)
track_artist = expected.create_artist()
track_artist.set_name('Eric Kanzler')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('Glare')
track.set_length(399)
track_artist = expected.create_artist()
track_artist.set_name('Heinrich & Heine')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title(u'040 Recordings Ibiza 2013 Vol. 1 [Heinrich & Heine In The Mix]')
track.set_length(4440)
disc.append_track(track)
expected.append_disc(disc)
s = beatport.ReleaseScraper.from_string('http://www.beatport.com/release/040-recordings-ibiza-2013-vol-1/1113652')
r = s.get_result()
self.assertEqual(expected, r)
def test_404(self):
expected = NotFoundResult()
expected.set_scraper_name(None)
s = beatport.ReleaseScraper.from_string('http://www.beatport.com/release/blubb/123')
r = s.get_result()
self.assertEqual(expected, r)
def test_search_scraper(self):
s = beatport.SearchScraper('love')
r = s.get_result()
self.assertTrue(len(r.get_items()) > 0)
class MetalarchivesTest(TestCase):
def test_simple_album(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('June 24th, 2002')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format('Full-length, CD')
label_id = expected.create_label_id()
label_id.set_label('Spinefarm Records')
label_id.append_catalogue_nr('spi149CD / 018459-2')
expected.append_label_id(label_id)
expected.set_title('Century Child')
artist = expected.create_artist()
artist.set_name('Nightwish')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://www.metal-archives.com/albums/Nightwish/Century_Child/3719')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Bless the Child')
track.set_length(372)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('End of All Hope')
track.set_length(235)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Dead to the World')
track.set_length(260)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Ever Dream')
track.set_length(284)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Slaying the Dreamer')
track.set_length(272)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Forever Yours')
track.set_length(230)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Ocean Soul')
track.set_length(255)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Feel for You')
track.set_length(235)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('The Phantom of the Opera')
track.set_length(250)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Beauty of the Beast')
track.set_length(622)
disc.append_track(track)
expected.append_disc(disc)
s = metalarchives.ReleaseScraper.from_string('http://www.metal-archives.com/albums/Nightwish/Century_Child/3719')
r = s.get_result()
self.assertEqual(expected, r)
def test_multiple_cds(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('September 22nd, 2008')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(u'Live album, 2CD')
label_id = expected.create_label_id()
label_id.set_label('GUN Records')
label_id.append_catalogue_nr('88697 342672')
expected.append_label_id(label_id)
expected.set_title('Black Symphony')
artist = expected.create_artist()
artist.set_name('Within Temptation')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://www.metal-archives.com/albums/Within_Temptation/Black_Symphony/212779')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Ouverture')
track.set_length(463)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title("Jillian (I'd Give My Heart)")
track.set_length(279)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('The Howling')
track.set_length(391)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Stand My Ground')
track.set_length(273)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('The Cross')
track.set_length(322)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('What Have You Done?')
track.set_length(298)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Hand of Sorrow')
track.set_length(340)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('The Heart of Everything')
track.set_length(348)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Forgiven')
track.set_length(293)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Somewhere')
track.set_length(264)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('The Swan Song')
track.set_length(240)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Memories')
track.set_length(243)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Our Solemn Hour')
track.set_length(322)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('The Other Half (of Me)')
track.set_length(304)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Frozen')
track.set_length(360)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('The Promise')
track.set_length(272)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Angels')
track.set_length(495)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Mother Earth')
track.set_length(242)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('The Truth Beneath the Rose')
track.set_length(443)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Deceiver of Fools')
track.set_length(458)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('All I Need')
track.set_length(295)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Ice Queen')
track.set_length(435)
disc.append_track(track)
expected.append_disc(disc)
s = metalarchives.ReleaseScraper.from_string('http://www.metal-archives.com/albums/Within_Temptation/Black_Symphony/212779')
r = s.get_result()
self.assertEqual(expected, r)
def test_multiple_release_artists(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('April 14th, 2007')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format('Split, 7" vinyl (45 RPM)')
label_id = expected.create_label_id()
label_id.set_label('New Iron Age Records')
label_id.append_catalogue_nr('NIA 002')
expected.append_label_id(label_id)
expected.set_title('Clash of Iron Vol. I - Live at Keep It True')
artist = expected.create_artist()
artist.set_name('Manilla Road')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
artist = expected.create_artist()
artist.set_name('Brocas Helm')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://www.metal-archives.com/albums/Manilla_Road/Clash_of_Iron_Vol._I_-_Live_at_Keep_It_True/147439')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Manilla Road - Death by the Hammer')
track.set_length(None)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Brocas Helm - Ravenwreck')
track.set_length(None)
disc.append_track(track)
expected.append_disc(disc)
s = metalarchives.ReleaseScraper.from_string('http://www.metal-archives.com/albums/Manilla_Road/Clash_of_Iron_Vol._I_-_Live_at_Keep_It_True/147439')
r = s.get_result()
self.assertEqual(expected, r)
def test_404(self):
expected = NotFoundResult()
expected.set_scraper_name(None)
s = metalarchives.ReleaseScraper.from_string('http://www.metal-archives.com/albums/Within_Temptation/Black_Symphony/999999999')
r = s.get_result()
self.assertEqual(expected, r)
def test_search_scraper(self):
s = metalarchives.SearchScraper('love')
r = s.get_result()
self.assertTrue(len(r.get_items()) > 0)
class JunodownloadTest(TestCase):
def test_simple_album(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('3 July, 2011')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
label_id = expected.create_label_id()
label_id.set_label('3 Beat')
label_id.append_catalogue_nr('3BEAT 051')
expected.append_label_id(label_id)
expected.set_title('Love')
artist = expected.create_artist()
artist.set_name('Inna')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Funky')
expected.append_genre('Club House')
expected.set_url('http://www.junodownload.com/products/love/1774811-02/')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Love (UK radio edit)')
track.set_length(151)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Love (club mix)')
track.set_length(299)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Love (eSquire radio edit)')
track.set_length(233)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Love (eSquire mix)')
track.set_length(357)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Love (7th Heaven radio edit)')
track.set_length(230)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Love (7th Heaven mix)')
track.set_length(394)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Love (Dandeej mix)')
track.set_length(315)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Love (DJ Andi mix)')
track.set_length(341)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Love (Klubfiller mix)')
track.set_length(395)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Love (Klubfiller dub mix)')
track.set_length(389)
disc.append_track(track)
expected.append_disc(disc)
s = junodownload.ReleaseScraper.from_string('http://www.junodownload.com/products/love/1774811-02/')
r = s.get_result()
self.assertEqual(expected, r)
def test_featuring_main_artist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('12 November, 2010')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
label_id = expected.create_label_id()
label_id.set_label('Staff Productions')
label_id.append_catalogue_nr('SFP 012')
expected.append_label_id(label_id)
expected.set_title('Love')
artist = expected.create_artist()
artist.set_name('Mustafa')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
artist = expected.create_artist()
artist.set_name('Tasita D mour')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.FEATURING)
expected.append_release_artist(artist)
expected.append_genre('Broken Beat')
expected.append_genre('Nu Jazz')
expected.set_url('http://www.junodownload.com/products/love/1662955-02/')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Love (original Miami mix)')
track.set_length(301)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title("Love (Mustafa's Deep Piano mix)")
track.set_length(308)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Love (D-Malice Afro-edit vocal)')
track.set_length(381)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Love (RY meets Mustafa vocal mix)')
track.set_length(365)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Love (Ospina & Oscar P remix)')
track.set_length(365)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Love (Ospina & Oscar P Drum dub)')
track.set_length(365)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Love (Steven Stone remix)')
track.set_length(389)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Love (David Mateo & Rafix club mix)')
track.set_length(297)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Love (Rafael Yapudjian Meets RyB remix)')
track.set_length(449)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Love (acoustic mix)')
track.set_length(232)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Love (D-Malice Afro edit instrumental)')
track.set_length(381)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Love (Ospina & Oscar P intru-mental)')
track.set_length(365)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Love (Steven Stone instrumental remix)')
track.set_length(388)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('Love (David Mateo & Rafix radio club mix instrumental)')
track.set_length(297)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('Love (Rafael Yapudjian Meets RyB dub remix)')
track.set_length(449)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title('Love (RY Meets Mustafa instrumental mix)')
track.set_length(365)
disc.append_track(track)
expected.append_disc(disc)
s = junodownload.ReleaseScraper.from_string('http://www.junodownload.com/products/love/1662955-02/')
r = s.get_result()
self.assertEqual(expected, r)
def test_mixed_various_main_artists(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('25 July, 2011')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
label_id = expected.create_label_id()
label_id.set_label('Vacation Australia')
label_id.append_catalogue_nr('VRCD 003')
expected.append_label_id(label_id)
expected.set_title('Bass Kleph Presents')
artist = expected.create_artist()
artist.set_name('Bass Kleph')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Funky')
expected.append_genre('Club House')
expected.set_url('http://www.junodownload.com/products/bass-kleph-bass-kleph-presents/1789514-02/')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Bass Kleph & Filthy Rich - These Mornings')
track.set_length(368)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Bass Kleph & Alex Kenji - Melocoton')
track.set_length(370)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Bass Kleph - Hey Ya')
track.set_length(380)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Bass Kleph & Chris Arnott & BKCA - We Feel Love')
track.set_length(360)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Bass Kleph - Oh Yeah')
track.set_length(403)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Stella MC & Bass Kleph - $pend My Money (original club mix)')
track.set_length(490)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title("Bass Kleph - I'll Be OK")
track.set_length(434)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Danny T & Oh Snap!! - Whine Ya Waistline (Bass Kleph remix)')
track.set_length(404)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Joan Reyes - Shakedown (Bass Kleph remix)')
track.set_length(438)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Bass Kleph & Prok & Fitch - Disco Ate My Baby')
track.set_length(362)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Moguai & Westbam - Original Hardcore (Bass Kleph remix)')
track.set_length(420)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Jesse Vorn - Somewhere (Bass Kleph remix)')
track.set_length(376)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Bass Kleph & Chris Arnott & Kid Massive - All Right (Bass Kleph remix)')
track.set_length(456)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('Bass Kleph & Joan Reyes - Salida (original club mix)')
track.set_length(427)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('Bass Kleph & D Ramirez - Pulse')
track.set_length(396)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title('Various - Bass Kleph Presents (continuous DJ mix by Bass Kleph)')
track.set_length(4439)
disc.append_track(track)
expected.append_disc(disc)
s = junodownload.ReleaseScraper.from_string('http://www.junodownload.com/products/bass-kleph-bass-kleph-presents/1789514-02/')
r = s.get_result()
self.assertEqual(expected, r)
def test_various_artists(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('30 December, 2008')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
label_id = expected.create_label_id()
label_id.set_label('NuZone Tools')
label_id.append_catalogue_nr('NZT 015')
expected.append_label_id(label_id)
expected.set_title('2008 MOST USEFUL TOOLS')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Progressive House')
expected.set_url('http://www.junodownload.com/products/2008-most-useful-tools/1384246-02/')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Sygma - Nightlights')
track.set_length(522)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title("Adolfo Morrone - I'm Nervhouse")
track.set_length(455)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Jonathan Carey - The Science Of Music')
track.set_length(354)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Lorenzo Venturini - New Era')
track.set_length(415)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('E-Mark - Anthem For Deejays Part 2')
track.set_length(420)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Alex Spadoni - Sunset')
track.set_length(451)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Jordan Baxxter feat Aedo - What It Feels Like For A Girl?')
track.set_length(470)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Hildebrand - Raindrops')
track.set_length(519)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Dario Maffia - Phaelon')
track.set_length(545)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Emerald Coast - Exhausted')
track.set_length(338)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Sygma - Children')
track.set_length(539)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('GoldSaint - Tonight')
track.set_length(405)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Peter Santos - Back To You')
track.set_length(454)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('Oscar Burnside - Dark Side')
track.set_length(334)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('GoldSaint - Recharge')
track.set_length(510)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title('Luca Lux - Wildest Dream')
track.set_length(428)
disc.append_track(track)
track = disc.create_track()
track.set_number('17')
track.set_title('SimoX DJ - Star')
track.set_length(317)
disc.append_track(track)
track = disc.create_track()
track.set_number('18')
track.set_title('Greek S - The Sound (09 mix)')
track.set_length(517)
disc.append_track(track)
track = disc.create_track()
track.set_number('19')
track.set_title('Various - Mixed Tools 2008 (Part 1 - mixed by Sygma)')
track.set_length(2494)
disc.append_track(track)
track = disc.create_track()
track.set_number('20')
track.set_title('Various - Mixed Tools 2008 (Part 2 - mixed by Peter Santos)')
track.set_length(2334)
disc.append_track(track)
expected.append_disc(disc)
s = junodownload.ReleaseScraper.from_string('http://www.junodownload.com/products/2008-most-useful-tools/1384246-02/')
r = s.get_result()
self.assertEqual(expected, r)
def test_404(self):
expected = NotFoundResult()
expected.set_scraper_name(None)
s = junodownload.ReleaseScraper.from_string('http://www.junodownload.com/products/2008-most-useful-tools/99999999/')
r = s.get_result()
self.assertEqual(expected, r)
def test_search_scraper(self):
s = junodownload.SearchScraper('love')
r = s.get_result()
self.assertTrue(len(r.get_items()) > 0)
class ITunesTest(TestCase):
def test_simple_album(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('1985')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
expected.set_title('Love (Remastered)')
artist = expected.create_artist()
artist.set_name('The Cult')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Rock')
expected.append_genre('Adult Alternative')
expected.append_genre('Hard Rock')
expected.append_genre('Alternative')
expected.append_genre('Goth Rock')
expected.append_genre('College Rock')
expected.set_url('http://itunes.apple.com/us/album/love-remastered/id3022929?ign-mpt=uo%3D4')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Nirvana')
track.set_length(326)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Big Neon Glitter')
track.set_length(291)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Love')
track.set_length(329)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Brother Wolf, Sister Moon')
track.set_length(407)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Rain')
track.set_length(236)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Phoenix')
track.set_length(306)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Hollow Man')
track.set_length(285)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Revolution')
track.set_length(326)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('She Sells Sanctuary')
track.set_length(263)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Black Angel')
track.set_length(322)
disc.append_track(track)
expected.append_disc(disc)
s = itunes.ReleaseScraper.from_string('http://itunes.apple.com/us/album/love-remastered/id3022929?ign-mpt=uo%3D4')
r = s.get_result()
self.assertEqual(expected, r)
def test_multiple_cds(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('Aug 15, 2007')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
expected.set_title('Dark Passion Play (Double Disc Version)')
artist = expected.create_artist()
artist.set_name('Nightwish')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Rock')
expected.append_genre('Metal')
expected.append_genre('Alternative')
expected.append_genre('Goth Rock')
expected.append_genre('Death Metal/Black Metal')
expected.set_url('https://itunes.apple.com/us/album/dark-passion-play-double-disc/id264697038?uo=4')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('The Poet and the Pendulum')
track.set_length(834)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Bye Bye Beautiful')
track.set_length(254)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Amaranth')
track.set_length(231)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Cadence of Her Last Breath')
track.set_length(254)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Master Passion Greed')
track.set_length(362)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Eva')
track.set_length(265)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Sahara')
track.set_length(347)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Whoever Brings the Night')
track.set_length(257)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('For the Heart I Once Had')
track.set_length(235)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('The Islander')
track.set_length(305)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Last of the Wilds')
track.set_length(340)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('7 Days to the Wolves')
track.set_length(423)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Meadows of Heaven')
track.set_length(430)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('Reach (Amaranth Demo Version) [Bonus Track]')
track.set_length(232)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('The Poet and the Pendulum (Instrumental)')
track.set_length(834)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Bye Bye Beautiful (Instrumental)')
track.set_length(254)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Amaranth (Instrumental)')
track.set_length(231)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Cadence of Her Last Breath (Instrumental)')
track.set_length(254)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Master Passion Greed (Instrumental)')
track.set_length(362)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Eva (Instrumental)')
track.set_length(265)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Sahara (Instrumental)')
track.set_length(347)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Whoever Brings the Night (Instrumental)')
track.set_length(257)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('For the Heart I Once Had (Instrumental)')
track.set_length(236)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('The Islander (Instrumental)')
track.set_length(305)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Last of the Wilds (Instrumental)')
track.set_length(340)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('7 Days to the Wolves (Instrumental)')
track.set_length(424)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Meadows of Heaven (Instrumental)')
track.set_length(429)
disc.append_track(track)
expected.append_disc(disc)
s = itunes.ReleaseScraper.from_string('https://itunes.apple.com/us/album/dark-passion-play-double-disc/id264697038?uo=4')
r = s.get_result()
self.assertEqual(expected, r)
def test_various_artists(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('Oct 28, 2008')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
expected.set_title('Twilight (Original Motion Picture Soundtrack)')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Soundtrack')
expected.set_url('https://itunes.apple.com/us/album/twilight-original-motion-picture/id294342468?ign-mpt=uo%3D4')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Supermassive Black Hole')
track.set_length(209)
track_artist = expected.create_artist()
track_artist.set_name('Muse')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Decode')
track.set_length(261)
track_artist = expected.create_artist()
track_artist.set_name('Paramore')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Full Moon')
track.set_length(230)
track_artist = expected.create_artist()
track_artist.set_name('The Black Ghosts')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Leave Out All the Rest')
track.set_length(199)
track_artist = expected.create_artist()
track_artist.set_name('LINKIN PARK')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Spotlight (Twilight Mix)')
track.set_length(200)
track_artist = expected.create_artist()
track_artist.set_name('MuteMath')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Go All the Way (Into the Twilight)')
track.set_length(207)
track_artist = expected.create_artist()
track_artist.set_name('Perry Farrell')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Tremble for My Beloved')
track.set_length(233)
track_artist = expected.create_artist()
track_artist.set_name('Collective Soul')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('I Caught Myself')
track.set_length(235)
track_artist = expected.create_artist()
track_artist.set_name('Paramore')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Eyes On Fire')
track.set_length(301)
track_artist = expected.create_artist()
track_artist.set_name('Blue Foundation')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Never Think')
track.set_length(269)
track_artist = expected.create_artist()
track_artist.set_name('Rob Pattinson')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Flightless Bird, American Mouth')
track.set_length(240)
track_artist = expected.create_artist()
track_artist.set_name('Iron & Wine')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title("Bella's Lullaby")
track.set_length(138)
track_artist = expected.create_artist()
track_artist.set_name('Carter Burwell')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Let Me Sign (Bonus Track)')
track.set_length(138)
track_artist = expected.create_artist()
track_artist.set_name('Rob Pattinson')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('La Traviata (Bonus Track)')
track.set_length(185)
track_artist = expected.create_artist()
track_artist.set_name('Royal Philharmonic Orchestra')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('Clair de Lune (Bonus Track)')
track.set_length(358)
track_artist = expected.create_artist()
track_artist.set_name('The APM Orchestra')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = itunes.ReleaseScraper.from_string('https://itunes.apple.com/us/album/twilight-original-motion-picture/id294342468?ign-mpt=uo%3D4')
r = s.get_result()
self.assertEqual(expected, r)
def test_non_us_store(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('1985')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
expected.set_title('Love (Remastered)')
artist = expected.create_artist()
artist.set_name('The Cult')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Rock')
expected.append_genre('Musique')
expected.append_genre('Alternative adulte')
expected.append_genre('Hard rock')
expected.append_genre('Alternative')
expected.append_genre('Rock gothique')
expected.append_genre('College rock')
expected.set_url('http://itunes.apple.com/fr/album/love-remastered/id3022929?ign-mpt=uo%3D4')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Nirvana')
track.set_length(326)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Big Neon Glitter')
track.set_length(291)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Love')
track.set_length(329)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Brother Wolf, Sister Moon')
track.set_length(407)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Rain')
track.set_length(236)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Phoenix')
track.set_length(306)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Hollow Man')
track.set_length(285)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Revolution')
track.set_length(326)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('She Sells Sanctuary')
track.set_length(263)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Black Angel')
track.set_length(322)
disc.append_track(track)
expected.append_disc(disc)
s = itunes.ReleaseScraper.from_string('http://itunes.apple.com/fr/album/love-remastered/id3022929?ign-mpt=uo%3D4')
r = s.get_result()
self.assertEqual(expected, r)
def test_tracknum_in_name_column(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('Jun 01, 2005')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format(None)
expected.set_title('Chopin: Piano Works')
artist = expected.create_artist()
artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Classical')
expected.set_url('https://itunes.apple.com/us/album/chopin-piano-works/id77261376')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('12 Etudes, Op. 10: No. 1. in C')
track.set_length(136)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('12 Etudes, Op.10: No. 2. in A Minor "chromatique"')
track.set_length(84)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('12 Etudes, Op.10: No. 3. in E "Tristesse"')
track.set_length(243)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('12 Etudes, Op.10: No. 4. in C-Sharp Minor')
track.set_length(128)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('12 Etudes, Op.10: No. 5. in G-Flat "Black Keys"')
track.set_length(102)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('12 Etudes, Op.10: No. 6. in E-Flat Minor')
track.set_length(195)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('12 Etudes, Op.10: No. 7. in C')
track.set_length(92)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('12 Etudes, Op.10: No. 8. in F')
track.set_length(163)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('12 Etudes, Op.10: No. 9. in F Minor')
track.set_length(137)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('12 Etudes, Op.10: No. 10. in A-Flat')
track.set_length(140)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('12 Etudes, Op.10: No. 11. in E-Flat')
track.set_length(135)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('12 Etudes, Op.10: No. 12. in C Minor "Revolutionary"')
track.set_length(173)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('12 Etudes, Op. 25: No. 1 in A-Flat - "Harp Study"')
track.set_length(171)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('12 Etudes, Op.25: No. 2 in F Minor')
track.set_length(92)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('12 Etudes, Op.25: No. 3 in F Major')
track.set_length(103)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title('12 Etudes, Op.25: No. 4 in A Minor')
track.set_length(88)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('17')
track.set_title('12 Etudes, Op.25: No. 5 in E Minor')
track.set_length(198)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('18')
track.set_title('12 Etudes, Op.25: No. 6 in G-Sharp Minor')
track.set_length(117)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('19')
track.set_title('12 Etudes, Op.25: No. 7 in C-Sharp Minor')
track.set_length(312)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('20')
track.set_title('12 Etudes, Op.25: No. 8 in D-Flat')
track.set_length(66)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('21')
track.set_title('12 Etudes, Op.25: No. 9 in G-Flat, "Butterfly Wings"')
track.set_length(62)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('22')
track.set_title('12 Etudes, Op.25: No. 10 in B Minor')
track.set_length(244)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('23')
track.set_title('12 Etudes, Op.25: No. 11 in A Minor "Winter Wind"')
track.set_length(215)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('24')
track.set_title('12 Etudes, Op. 25: No. 12 in C Minor')
track.set_length(168)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('25')
track.set_title('Impromptu No. 1 in A-Flat, Op.29')
track.set_length(233)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('26')
track.set_title('Impromptu No. 2 in F-Sharp, Op.36')
track.set_length(351)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('27')
track.set_title('Impromptu No. 3 in G-Flat, Op.51')
track.set_length(284)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('28')
track.set_title('Impromptu No. 4 in C-Sharp Minor, Op. 66 "Fantaisie-Impromptu"')
track.set_length(291)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Piano Sonata No. 2 in B-Flat Minor, Op. 35: I. Grave - Doppio Movimento')
track.set_length(331)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title(u'Piano Sonata No. 2 in B-Flat Minor, Op. 35: II. Scherzo - Pi\xf9 Lento - Tempo I')
track.set_length(397)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title(u'Piano Sonata No. 2 in B-Flat Minor, Op. 35: III. Marche Fun\xe8bre (Lento)')
track.set_length(503)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Piano Sonata No. 2 in B-Flat Minor, Op. 35: IV. Finale (Presto)')
track.set_length(97)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Piano Sonata No. 3 in B Minor, Op. 58: I. Allegro Maestoso')
track.set_length(533)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Piano Sonata No. 3 in B Minor, Op. 58: II. Scherzo (Molto Vivace)')
track.set_length(170)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Piano Sonata No. 3 in B Minor, Op. 58: III. Largo')
track.set_length(561)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Piano Sonata No. 3 in B Minor, Op. 58: IV. Finale (Presto Non Tanto)')
track.set_length(309)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Mazurka No. 54 in D: Allegro Non Troppo')
track.set_length(71)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Mazurka No. 46 in C Op.67 No.3: Allegretto')
track.set_length(88)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Mazurka No. 49 in A Minor Op. 68, No. 2: Lento')
track.set_length(155)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Mazurka No. 5 in B-Flat Op. 7, No. 1: Vivace')
track.set_length(140)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Introduction and Variations On a German National Air Op.posth. (KK 925-927)')
track.set_length(387)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('Mazurka No. 58 in A-Flat: Poco Mosso')
track.set_length(77)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('Berceuse in D-Flat, Op. 57: Andante')
track.set_length(316)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title('Polonaise No. 6 in A-Flat, Op. 53 -"Heroic": Maestoso')
track.set_length(413)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(3)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Piano Concerto No. 1 in E Minor, Op. 11: I. Allegro Maestoso')
track.set_length(1215)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Jerzy Semkow')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Berlin Philharmonic')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Piano Concerto No. 1 in E Minor, Op. 11: II. Romance (Larghetto)')
track.set_length(636)
track_artist = expected.create_artist()
track_artist.set_name(u'Jerzy Semkow')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Berlin Philharmonic')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Piano Concerto No. 1 in E Minor, Op. 11: III. Rondo (Vivace)')
track.set_length(619)
track_artist = expected.create_artist()
track_artist.set_name(u'Jerzy Semkow')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Berlin Philharmonic')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Piano Concerto No. 2 in F Minor, Op. 21: I. Maestoso')
track.set_length(901)
track_artist = expected.create_artist()
track_artist.set_name(u'Janos Kulka')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Berlin Philharmonic')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Piano Concerto No. 2 in F Minor, Op. 21: II. Larghetto')
track.set_length(583)
track_artist = expected.create_artist()
track_artist.set_name(u'Janos Kulka')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Berlin Philharmonic')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Piano Concerto No. 2 in F Minor, Op. 21: III. Allegro Vivace')
track.set_length(524)
track_artist = expected.create_artist()
track_artist.set_name(u'Janos Kulka')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Berlin Philharmonic')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name(u'Tam\xe1s V\xe1s\xe1ry')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = itunes.ReleaseScraper.from_string('https://itunes.apple.com/us/album/chopin-piano-works/id77261376')
r = s.get_result()
self.assertEqual(expected, r)
def test_404(self):
expected = NotFoundResult()
expected.set_scraper_name(None)
s = itunes.ReleaseScraper.from_string('http://itunes.apple.com/us/album/blubb/id999999999999')
r = s.get_result()
self.assertEqual(expected, r)
def test_non_us_404(self):
expected = NotFoundResult()
expected.set_scraper_name(None)
s = itunes.ReleaseScraper.from_string('http://itunes.apple.com/fr/album/blubb/id999999999999')
r = s.get_result()
self.assertEqual(expected, r)
def test_search_scraper(self):
s = itunes.SearchScraper('love')
r = s.get_result()
self.assertTrue(len(r.get_items()) > 0)
class BandcampTest(TestCase):
def test_album_with_band_name(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'2012')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format('WEB release')
expected.set_title(u'Love Sex Machine')
artist = expected.create_artist()
artist.set_name(u'Love Sex Machine')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://music.throatruinerrecords.com/album/love-sex-machine')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title(u'Anal On Deceased Virgin')
track.set_length(335)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title(u'Deafening Peepshow')
track.set_length(270)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title(u'Fucking Battle')
track.set_length(157)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title(u'Antagonism Can STFU')
track.set_length(179)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title(u'Plenty Of Feelings')
track.set_length(147)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title(u'Vagina Curse')
track.set_length(320)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title(u'Killed With A Monster Cock')
track.set_length(284)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title(u'Warstrike Takes The Piss')
track.set_length(275)
disc.append_track(track)
expected.append_disc(disc)
s = bandcamp.ReleaseScraper.from_string('http://music.throatruinerrecords.com/album/love-sex-machine')
r = s.get_result()
self.assertEqual(expected, r)
# there was a test with this name, but I don't know what it did...
# def test_album_without_band_name(self):
# pass
def test_album_with_various_artists(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'2013')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format('WEB release')
expected.set_title(u'Indietracks Compilation 2013')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://indietracks.bandcamp.com/album/indietracks-compilation-2013')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title(u'If You Still Want Him')
track.set_length(250)
track_artist = expected.create_artist()
track_artist.set_name(u'The French Defence')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title(u'Is Anybody Out There?')
track.set_length(246)
track_artist = expected.create_artist()
track_artist.set_name(u'The Ballet')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title(u'Rulers And The States')
track.set_length(165)
track_artist = expected.create_artist()
track_artist.set_name(u'bis')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title(u'Temporary Tattoo')
track.set_length(171)
track_artist = expected.create_artist()
track_artist.set_name(u'Lardpony')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title(u'Always Want Us To')
track.set_length(192)
track_artist = expected.create_artist()
track_artist.set_name(u'EXPENSIVE')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title(u'Stockport')
track.set_length(328)
track_artist = expected.create_artist()
track_artist.set_name(u'The Wake')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title(u'Secret')
track.set_length(132)
track_artist = expected.create_artist()
track_artist.set_name(u'Frozy')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title(u'Jackie')
track.set_length(218)
track_artist = expected.create_artist()
track_artist.set_name(u'The Understudies')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title(u'Ticket Machine')
track.set_length(184)
track_artist = expected.create_artist()
track_artist.set_name(u'Making Marks')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title(u'Echoing Days')
track.set_length(204)
track_artist = expected.create_artist()
track_artist.set_name(u'Monnone Alone')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title(u'Swanwick Junction')
track.set_length(172)
track_artist = expected.create_artist()
track_artist.set_name(u'Northern Spies')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title(u'Terrible Things')
track.set_length(141)
track_artist = expected.create_artist()
track_artist.set_name(u'Owl & Mouse')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title(u"She'll Come Back for Indian Summer")
track.set_length(218)
track_artist = expected.create_artist()
track_artist.set_name(u'Alpaca Sports')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title(u'Glockelbar')
track.set_length(137)
track_artist = expected.create_artist()
track_artist.set_name(u'Haiku Salut')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title(u'Astronaut')
track.set_length(190)
track_artist = expected.create_artist()
track_artist.set_name(u'Woog Riots')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title(u'Tut Tut Tut')
track.set_length(150)
track_artist = expected.create_artist()
track_artist.set_name(u'The Tuts')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('17')
track.set_title(u'Mosaic')
track.set_length(161)
track_artist = expected.create_artist()
track_artist.set_name(u'Fear Of Men')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('18')
track.set_title(u'Only You')
track.set_length(194)
track_artist = expected.create_artist()
track_artist.set_name(u'Big Wave')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('19')
track.set_title(u'The Things That You Said')
track.set_length(200)
track_artist = expected.create_artist()
track_artist.set_name(u'The Fireworks')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('20')
track.set_title(u'Glue')
track.set_length(276)
track_artist = expected.create_artist()
track_artist.set_name(u'Fever Dream')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('21')
track.set_title(u'Slackjawed')
track.set_length(175)
track_artist = expected.create_artist()
track_artist.set_name(u'Tunabunny')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('22')
track.set_title(u'Lie')
track.set_length(224)
track_artist = expected.create_artist()
track_artist.set_name(u'Cars Can Be Blue')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('23')
track.set_title(u'Br\xe4nn\xf6')
track.set_length(223)
track_artist = expected.create_artist()
track_artist.set_name(u'Finnmark!')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('24')
track.set_title(u'Sorry')
track.set_length(166)
track_artist = expected.create_artist()
track_artist.set_name(u'The Art Club')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('25')
track.set_title(u'Food')
track.set_length(181)
track_artist = expected.create_artist()
track_artist.set_name(u'The Lovely Eggs')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('26')
track.set_title(u'Clean Up Yr Own Shit, Pal')
track.set_length(132)
track_artist = expected.create_artist()
track_artist.set_name(u'Good Grief')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('27')
track.set_title(u'Sycamore')
track.set_length(162)
track_artist = expected.create_artist()
track_artist.set_name(u'Martha')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('28')
track.set_title(u'Disappear')
track.set_length(147)
track_artist = expected.create_artist()
track_artist.set_name(u'Bloomer')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('29')
track.set_title(u'You Held My Hand')
track.set_length(158)
track_artist = expected.create_artist()
track_artist.set_name(u'Flowers')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('30')
track.set_title(u'J.K.')
track.set_length(139)
track_artist = expected.create_artist()
track_artist.set_name(u'David Leach')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('31')
track.set_title(u'Always Thought')
track.set_length(294)
track_artist = expected.create_artist()
track_artist.set_name(u'Jupiter In Jars')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('32')
track.set_title(u'My Old Friend')
track.set_length(164)
track_artist = expected.create_artist()
track_artist.set_name(u"Enderby's Room")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('33')
track.set_title(u'I Got The Answer')
track.set_length(172)
track_artist = expected.create_artist()
track_artist.set_name(u'The Magic Theatre')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('34')
track.set_title(u'I Love You')
track.set_length(178)
track_artist = expected.create_artist()
track_artist.set_name(u'The Wave Pictures')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('35')
track.set_title(u'Pilot Light')
track.set_length(234)
track_artist = expected.create_artist()
track_artist.set_name(u'Pete Green')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('36')
track.set_title(u"Let's Go Surfing")
track.set_length(181)
track_artist = expected.create_artist()
track_artist.set_name(u'Helen Love')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('37')
track.set_title(u'Summer, You And Me')
track.set_length(180)
track_artist = expected.create_artist()
track_artist.set_name(u'When Nalda Became Punk')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('38')
track.set_title(u'Secret Wish')
track.set_length(89)
track_artist = expected.create_artist()
track_artist.set_name(u'The McTells')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('39')
track.set_title(u'Better Than Love')
track.set_length(163)
track_artist = expected.create_artist()
track_artist.set_name(u'Pale Spectres')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('40')
track.set_title(u'Without You')
track.set_length(147)
track_artist = expected.create_artist()
track_artist.set_name(u'Milky Wimpshake')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('41')
track.set_title(u"Let's Stay Undecided")
track.set_length(181)
track_artist = expected.create_artist()
track_artist.set_name(u'The Soulboy Collective mit Antenne Danger')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('42')
track.set_title(u'Age Of Victoria')
track.set_length(261)
track_artist = expected.create_artist()
track_artist.set_name(u'The Secret History')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('43')
track.set_title(u'Eating Me, Eating You')
track.set_length(202)
track_artist = expected.create_artist()
track_artist.set_name(u'The Beautiful Word')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('44')
track.set_title(u'Scared And Worried')
track.set_length(142)
track_artist = expected.create_artist()
track_artist.set_name(u'Without Feathers')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('45')
track.set_title(u'Save Me')
track.set_length(155)
track_artist = expected.create_artist()
track_artist.set_name(u'The Choo Choo Trains')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('46')
track.set_title(u'Evil/Shy (Acoustic Version)')
track.set_length(187)
track_artist = expected.create_artist()
track_artist.set_name(u'The Mini Skips')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('47')
track.set_title(u'Slow Trains')
track.set_length(201)
track_artist = expected.create_artist()
track_artist.set_name(u'anaesthetics')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = bandcamp.ReleaseScraper.from_string('http://indietracks.bandcamp.com/album/indietracks-compilation-2013')
r = s.get_result()
self.assertEqual(expected, r)
def test_album_with_track_artist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'2012')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format('Free WEB release')
expected.set_title(u'Love Everyday EP')
artist = expected.create_artist()
artist.set_name(u'Dumbfoundead')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://music.dumbfoundead.com/album/love-everyday-ep')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title(u'For You')
track.set_length(91)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title(u'Love Everyday')
track.set_length(211)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title(u'Stole the Show')
track.set_length(177)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title(u'Love is a Song')
track.set_length(292)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title(u'Body High ft. Breezy Lovejoy & Jose Rios')
track.set_length(267)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title(u'Not Right Now ft. Wax')
track.set_length(173)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title(u'Breezy Lovejoy - Paradise')
track.set_length(202)
track_artist = expected.create_artist()
track_artist.set_name(u'Breezy Lovejoy')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = bandcamp.ReleaseScraper.from_string('http://music.dumbfoundead.com/album/love-everyday-ep')
r = s.get_result()
self.assertEqual(expected, r)
def test_album_with_utf8_characters(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(u'2012')
release_event.set_country(None)
expected.append_release_event(release_event)
expected.set_format('WEB release')
expected.set_title(u'Illusions')
artist = expected.create_artist()
artist.set_name(u'Approaching Nirvana')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.set_url('http://music.approachingnirvana.com/album/illusions')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title(u'Sugar High')
track.set_length(162)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title(u'Illusion (feat. Brenton Mattheus)')
track.set_length(267)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title(u'Beer Remastered')
track.set_length(281)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title(u'Snowfall')
track.set_length(270)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title(u'Love Theory')
track.set_length(157)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title(u'Canc\xfan')
track.set_length(257)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title(u'South Side')
track.set_length(268)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title(u'Illusion (Instrumental)')
track.set_length(267)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title(u'Love Theory (Instrumental)')
track.set_length(157)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title(u'Illusion (Extended Mix) [feat. Brenton Mattheus]')
track.set_length(372)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title(u'Beer Remastered (Extended Mix)')
track.set_length(420)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title(u'Snowfall (Extended Mix)')
track.set_length(424)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title(u'Love Theory (Extended Mix)')
track.set_length(299)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title(u'Canc\xfan (Extended Mix)')
track.set_length(374)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title(u'South Side (Extended Mix)')
track.set_length(374)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title(u'Illusions Continuous Mix')
track.set_length(2018)
disc.append_track(track)
track = disc.create_track()
track.set_number('17')
track.set_title(u'Illusions Continuous Instrumental Mix')
track.set_length(2018)
disc.append_track(track)
expected.append_disc(disc)
s = bandcamp.ReleaseScraper.from_string('http://music.approachingnirvana.com/album/illusions')
r = s.get_result()
self.assertEqual(expected, r)
def test_band_discography(self):
expected = ListResult()
expected.set_scraper_name(None)
item = expected.create_item()
item.set_name(u'Amanda Palmer \u2013 Who Killed Amanda Palmer [Alternate Tracks]')
item.set_info('Release date: 2008-12-24')
item.set_query(u'http://amandapalmer.bandcamp.com/album/who-killed-amanda-palmer-alternate-tracks?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/who-killed-amanda-palmer-alternate-tracks?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'8in8 \u2013 Nighty Night')
item.set_info('Release date: 2011-04-26')
item.set_query(u'http://amandapalmer.bandcamp.com/album/nighty-night?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/nighty-night?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Amanda Palmer & The Grand Theft Orchestra \u2013 Theatre Is Evil')
item.set_info('Release date: 2012-09-11')
item.set_query(u'http://amandapalmer.bandcamp.com/album/theatre-is-evil-2?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/theatre-is-evil-2?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Amanda Palmer \u2013 The Art of Asking Playlist')
item.set_info('Release date: 2014-11-11')
item.set_query(u'http://amandapalmer.bandcamp.com/album/the-art-of-asking-playlist?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/the-art-of-asking-playlist?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Amanda Palmer \u2013 Amanda Palmer Performs The Popular Hits Of Radiohead On Her Magical Ukulele')
item.set_info('Release date: 2010-07-20')
item.set_query(u'http://amandapalmer.bandcamp.com/album/amanda-palmer-performs-the-popular-hits-of-radiohead-on-her-magical-ukulele?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/amanda-palmer-performs-the-popular-hits-of-radiohead-on-her-magical-ukulele?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Tristan Allen \u2013 Tristan Allen')
item.set_info('Release date: 2010-12-12')
item.set_query(u'http://amandapalmer.bandcamp.com/album/tristan-allen?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/tristan-allen?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u"Amanda Palmer & Friends \u2013 AFP's Webcastacular NYC Extravaganzaca!")
item.set_info('Release date: 2010-09-23')
item.set_query(u'http://amandapalmer.bandcamp.com/album/afps-webcastacular-nyc-extravaganzaca?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/afps-webcastacular-nyc-extravaganzaca?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Amanda Palmer \u2013 Who Killed Amanda Palmer')
item.set_info('Release date: 2008-09-16')
item.set_query(u'http://amandapalmer.bandcamp.com/album/who-killed-amanda-palmer?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/who-killed-amanda-palmer?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Amanda Palmer & Murder By Death \u2013 7 Series (Part 3)')
item.set_info('Release date: 2009-05-15')
item.set_query(u'http://amandapalmer.bandcamp.com/album/7-series-part-3?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/7-series-part-3?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Amanda Palmer \u2013 Amanda Palmer Goes Down Under')
item.set_info('Release date: 2011-01-21')
item.set_query(u'http://amandapalmer.bandcamp.com/album/amanda-palmer-goes-down-under?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/amanda-palmer-goes-down-under?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Amanda Palmer, The Young Punx, and Peaches \u2013 Map of Tasmania: The Remix Project')
item.set_info('Release date: 2011-04-10')
item.set_query(u'http://amandapalmer.bandcamp.com/album/map-of-tasmania-the-remix-project?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/map-of-tasmania-the-remix-project?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Neil Gaiman and Amanda Palmer \u2013 An Evening With Neil Gaiman and Amanda Palmer')
item.set_info('Release date: 2013-11-19')
item.set_query(u'http://amandapalmer.bandcamp.com/album/an-evening-with-neil-gaiman-and-amanda-palmer?pk=459')
item.set_url(u'http://amandapalmer.bandcamp.com/album/an-evening-with-neil-gaiman-and-amanda-palmer?pk=459')
expected.append_item(item)
s = bandcamp.DiscographyScraper.from_string('http://amandapalmer.bandcamp.com')
r = s.get_result()
self.assertEqual(expected, r)
def test_band_discogrpahy_non_bandcamp_url(self):
expected = ListResult()
expected.set_scraper_name(None)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 All Delighted People EP')
item.set_info('Release date: 2010-08-20')
item.set_query(u'http://music.sufjan.com/album/all-delighted-people-ep?pk=459')
item.set_url(u'http://music.sufjan.com/album/all-delighted-people-ep?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 Illinois')
item.set_info('Release date: 2005-07-05')
item.set_query(u'http://music.sufjan.com/album/illinois?pk=459')
item.set_url(u'http://music.sufjan.com/album/illinois?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 Songs for Christmas')
item.set_info('Release date: 2006-11-21')
item.set_query(u'http://music.sufjan.com/album/songs-for-christmas?pk=459')
item.set_url(u'http://music.sufjan.com/album/songs-for-christmas?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 A Sun Came')
item.set_info('Release date: 2004-07-20')
item.set_query(u'http://music.sufjan.com/album/a-sun-came?pk=459')
item.set_url(u'http://music.sufjan.com/album/a-sun-came?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 The Avalanche')
item.set_info('Release date: 2006-07-11')
item.set_query(u'http://music.sufjan.com/album/the-avalanche?pk=459')
item.set_url(u'http://music.sufjan.com/album/the-avalanche?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 The BQE')
item.set_info('Release date: 2009-10-20')
item.set_query(u'http://music.sufjan.com/album/the-bqe?pk=459')
item.set_url(u'http://music.sufjan.com/album/the-bqe?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 Silver & Gold')
item.set_info('Release date: 2012-11-13')
item.set_query(u'http://music.sufjan.com/album/silver-gold?pk=459')
item.set_url(u'http://music.sufjan.com/album/silver-gold?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 Michigan')
item.set_info('Release date: 2003-07-01')
item.set_query(u'http://music.sufjan.com/album/michigan?pk=459')
item.set_url(u'http://music.sufjan.com/album/michigan?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 Carrie & Lowell')
item.set_info('Release date: 2015-03-31')
item.set_query(u'http://music.sufjan.com/album/carrie-lowell?pk=459')
item.set_url(u'http://music.sufjan.com/album/carrie-lowell?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 Enjoy Your Rabbit')
item.set_info('Release date: 2002-04-16')
item.set_query(u'http://music.sufjan.com/album/enjoy-your-rabbit?pk=459')
item.set_url(u'http://music.sufjan.com/album/enjoy-your-rabbit?pk=459')
expected.append_item(item)
item = expected.create_item()
item.set_name(u'Sufjan Stevens \u2013 The Age of Adz')
item.set_info('Release date: 2010-10-12')
item.set_query(u'http://music.sufjan.com/album/the-age-of-adz?pk=459')
item.set_url(u'http://music.sufjan.com/album/the-age-of-adz?pk=459')
expected.append_item(item)
s = bandcamp.DiscographyScraper('http://music.sufjan.com')
r = s.get_result()
self.assertEqual(expected, r)
def test_404(self):
expected = NotFoundResult()
expected.set_scraper_name(None)
s = bandcamp.ReleaseScraper.from_string('http://blubb.bla.com/album/blubb')
r = s.get_result()
self.assertEqual(expected, r)
@unittest.skip("skipping Musik-Sammler tests until scraper is fixed")
class MusikSammlerTest(TestCase):
def test_simple_album(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('1994')
release_event.set_country('Niederlande')
expected.append_release_event(release_event)
expected.set_format(u'CD, Re-Release, Remastered')
label_id = expected.create_label_id()
label_id.set_label('EMI Records Ltd.')
label_id.append_catalogue_nr('7243 8 29752 2 9')
expected.append_label_id(label_id)
expected.set_title('Dark Side Of The Moon')
artist = expected.create_artist()
artist.set_name('Pink Floyd')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Rock')
expected.append_genre('Progressive Rock')
expected.append_genre('Psychedelic Rock')
expected.set_url('http://www.musik-sammler.de/media/830798/')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('(a) Speak To Me (b) Breathe')
track.set_length(237)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('On The Run')
track.set_length(215)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Time')
track.set_length(424)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('The Great Gig In The Sky')
track.set_length(287)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Money')
track.set_length(382)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Us And Them')
track.set_length(470)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Any Colour You Like')
track.set_length(205)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Brain Damage')
track.set_length(230)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Eclipse')
track.set_length(121)
disc.append_track(track)
expected.append_disc(disc)
s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/830798/')
r = s.get_result()
self.assertEqual(expected, r)
def test_multiple_discs(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2011')
release_event.set_country('Japan')
expected.append_release_event(release_event)
expected.set_format(u'2-CD, Pappschuber, Re-Release, Remastered, Digisleeve')
label_id = expected.create_label_id()
label_id.set_label('EMI Japan')
label_id.append_catalogue_nr('TOCP 71163 64')
expected.append_label_id(label_id)
expected.set_title('The Dark Side Of The Moon')
artist = expected.create_artist()
artist.set_name('Pink Floyd')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Rock')
expected.append_genre('Psychedelic Rock')
expected.set_url('http://www.musik-sammler.de/media/883773')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Speak To Me')
track.set_length(67)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Breathe (In The Air)')
track.set_length(169)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('On The Run')
track.set_length(225)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Time')
track.set_length(413)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('The Great Gig In The Sky')
track.set_length(284)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Money')
track.set_length(383)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Us And Them')
track.set_length(469)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Any Colour You Like')
track.set_length(206)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Brain Damage')
track.set_length(226)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Eclipse')
track.set_length(132)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Speak To Me')
track.set_length(165)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Breathe (In The Air)')
track.set_length(170)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('On The Run')
track.set_length(308)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Time')
track.set_length(391)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('The Great Gig In The Sky')
track.set_length(410)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Money')
track.set_length(521)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Us And Them')
track.set_length(489)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Any Colour You Like')
track.set_length(490)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Brain Damage')
track.set_length(223)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Eclipse')
track.set_length(139)
disc.append_track(track)
expected.append_disc(disc)
s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/883773')
r = s.get_result()
self.assertEqual(expected, r)
def test_track_artist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2002')
release_event.set_country(u'\xd6sterreich')
expected.append_release_event(release_event)
expected.set_format(u'Split-CD, Cardsleeve')
label_id = expected.create_label_id()
label_id.set_label('Din Records')
label_id.append_catalogue_nr('din cds 2 / EFA 51665-2')
expected.append_label_id(label_id)
expected.set_title('Icol Diston')
artist = expected.create_artist()
artist.set_name('Arovane')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
artist = expected.create_artist()
artist.set_name('Dynamo')
artist.set_various(False)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Techno')
expected.append_genre('Electronic')
expected.append_genre('Ambient')
expected.append_genre('Electro')
expected.append_genre('Freeform')
expected.set_url('http://www.musik-sammler.de/media/512755')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('I.O.')
track.set_length(374)
track_artist = expected.create_artist()
track_artist.set_name('Arovane')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Parf')
track.set_length(374)
track_artist = expected.create_artist()
track_artist.set_name('Arovane')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Torn')
track.set_length(417)
track_artist = expected.create_artist()
track_artist.set_name('Arovane')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Andar')
track.set_length(464)
track_artist = expected.create_artist()
track_artist.set_name('Arovane')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Icol Diston')
track.set_length(19)
track_artist = expected.create_artist()
track_artist.set_name('Arovane')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Yua:E')
track.set_length(491)
track_artist = expected.create_artist()
track_artist.set_name('Arovane')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Icol Vern')
track.set_length(303)
track_artist = expected.create_artist()
track_artist.set_name('Arovane')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Nacrath')
track.set_length(298)
track_artist = expected.create_artist()
track_artist.set_name('Arovane')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Acval')
track.set_length(306)
track_artist = expected.create_artist()
track_artist.set_name('Arovane')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title(u'Au\xdfen Vor Amx')
track.set_length(560)
track_artist = expected.create_artist()
track_artist.set_name('Dynamo')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('No. 8 Amx')
track.set_length(825)
track_artist = expected.create_artist()
track_artist.set_name(None)
track_artist.set_various(True)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/512755')
r = s.get_result()
self.assertEqual(expected, r)
def test_various_artists(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date(None)
release_event.set_country('Deutschland')
expected.append_release_event(release_event)
expected.set_format(u'2-CD, Erstauflage')
label_id = expected.create_label_id()
label_id.set_label('BCM Records GmbH')
label_id.append_catalogue_nr('55359')
expected.append_label_id(label_id)
expected.set_title('Grooves Loops & Patterns Vol.1 + Vol.2')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Techno')
expected.append_genre('Electronic')
expected.append_genre('Breakbeat')
expected.append_genre('Electro')
expected.set_url('http://www.musik-sammler.de/media/313881')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Latin Disco [130 Bpm]')
track.set_length(174)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Straight Disco [131 Bpm]')
track.set_length(187)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Medium Disco [116 Bpm]')
track.set_length(195)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Slow Disco [87 Bpm]')
track.set_length(215)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('UK Happy Disco I [118 Bpm]')
track.set_length(238)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('UK Happy Disco II [116 Bpm]')
track.set_length(242)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('UK Happy Disco III [121 Bpm]')
track.set_length(250)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Sexy Disco [107 Bpm]')
track.set_length(288)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Ethno Disco [98 Bpm]')
track.set_length(275)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Us Disco [120 Bpm]')
track.set_length(160)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Cuba Disco [122 Bpm]')
track.set_length(169)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Dance Floor Disco I [125 Bpm]')
track.set_length(242)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Dance Floor Disco II [122,5 Bpm]')
track.set_length(240)
disc.append_track(track)
expected.append_disc(disc)
disc = expected.create_disc()
disc.set_number(2)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('Straight Rock [120 Bpm]')
track.set_length(175)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Medium Rock [132 Bpm]')
track.set_length(158)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Fast Rock [160 Bpm]')
track.set_length(162)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Rock Ballad [71 Bpm]')
track.set_length(238)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Medium Rock Balad [106 Bpm]')
track.set_length(195)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Funk Rock [108 Bpm]')
track.set_length(191)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Latin Rock [122 Bpm]')
track.set_length(175)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Hard Rock Shuffle [132 Bpm]')
track.set_length(158)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Medium Rock Shuffle [99 Bpm]')
track.set_length(170)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Rhythm & Blues [118 Bpm]')
track.set_length(159)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('5/4 Freak Rock [165 Bpm]')
track.set_length(140)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Rockabilly [123 Bpm]')
track.set_length(154)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('Country Rock [92 Bpm]')
track.set_length(204)
disc.append_track(track)
expected.append_disc(disc)
s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/313881')
r = s.get_result()
self.assertEqual(expected, r)
def test_va_album(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('1986')
release_event.set_country('USA')
expected.append_release_event(release_event)
expected.set_format(u'LP')
label_id = expected.create_label_id()
label_id.set_label('Capitol Records, Inc.')
label_id.append_catalogue_nr('SV-12499')
expected.append_label_id(label_id)
expected.set_title('Iron Eagle - Original Motion Picture Soundtrack')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Soundtrack')
expected.set_url('http://www.musik-sammler.de/media/43567')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('One Vision')
track.set_length(240)
track_artist = expected.create_artist()
track_artist.set_name('Queen')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Iron Eagle (Never Say Die)')
track.set_length(208)
track_artist = expected.create_artist()
track_artist.set_name('King Kobra')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('These Are The Good Times')
track.set_length(225)
track_artist = expected.create_artist()
track_artist.set_name('Eric Martin')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('Maniac House')
track.set_length(294)
track_artist = expected.create_artist()
track_artist.set_name('Katrina & The Waves')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Intense')
track.set_length(270)
track_artist = expected.create_artist()
track_artist.set_name('George Clinton')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Hide The Rainbow')
track.set_length(235)
track_artist = expected.create_artist()
track_artist.set_name('Dio')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title("It's Too Late")
track.set_length(186)
track_artist = expected.create_artist()
track_artist.set_name('Helix')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title('Road Of The Gypsy')
track.set_length(268)
track_artist = expected.create_artist()
track_artist.set_name('Adrenalin')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Love Can Make You Cry')
track.set_length(258)
track_artist = expected.create_artist()
track_artist.set_name('Urgent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('This Raging Fire')
track.set_length(246)
track_artist = expected.create_artist()
track_artist.set_name('Jon Butcher Axis')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/43567')
r = s.get_result()
self.assertEqual(expected, r)
def test_featuring_track_artist(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2008')
release_event.set_country('Deutschland')
expected.append_release_event(release_event)
expected.set_format(u'CD, Heftbeilage, Digipak')
label_id = expected.create_label_id()
label_id.set_label('Batbeliever Releases')
label_id.append_catalogue_nr('BAT 048')
expected.append_label_id(label_id)
expected.set_title('Gothic File 05')
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Dark Wave')
expected.append_genre('Gothic')
expected.set_url('http://www.musik-sammler.de/media/257802')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title('O Varium Fortune')
track.set_length(352)
track_artist = expected.create_artist()
track_artist.set_name('Corvus Corax')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('Zaubererbruder [EP-Version]')
track.set_length(285)
track_artist = expected.create_artist()
track_artist.set_name('ASP')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Eric Fish')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Mein Gral')
track.set_length(236)
track_artist = expected.create_artist()
track_artist.set_name('Megaherz')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title(u'Komm S\xfc\xdfer Tod')
track.set_length(275)
track_artist = expected.create_artist()
track_artist.set_name('Eisbrecher')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('Get Some Sleep [Exclusive Version]')
track.set_length(252)
track_artist = expected.create_artist()
track_artist.set_name('Mono Inc.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Prayer Before Birth [Underwaterpilots Remix]')
track.set_length(263)
track_artist = expected.create_artist()
track_artist.set_name('Anne Clark')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('Haufenweise Scheisse (XL)')
track.set_length(320)
track_artist = expected.create_artist()
track_artist.set_name(u'Grossstadtgefl\xfcster')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title(u'D\xe9cadence')
track.set_length(191)
track_artist = expected.create_artist()
track_artist.set_name('Charles De Goal')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Burning Up')
track.set_length(248)
track_artist = expected.create_artist()
track_artist.set_name('Ladytron')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Horizon [Remastered]')
track.set_length(326)
track_artist = expected.create_artist()
track_artist.set_name('Black Orchid')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Play Games')
track.set_length(212)
track_artist = expected.create_artist()
track_artist.set_name('The Rorschach Garden')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('All Ends')
track.set_length(256)
track_artist = expected.create_artist()
track_artist.set_name('Imatem')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('All About The Now')
track.set_length(289)
track_artist = expected.create_artist()
track_artist.set_name('Miserylab')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title('Hymn Of The Shades')
track.set_length(202)
track_artist = expected.create_artist()
track_artist.set_name('Descendants Of Cain')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('Bleed')
track.set_length(244)
track_artist = expected.create_artist()
track_artist.set_name('ELA')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title('Never Stop Crying')
track.set_length(254)
track_artist = expected.create_artist()
track_artist.set_name("Jennie Tebler's Out Of Oblivion")
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('17')
track.set_title('Killhoney')
track.set_length(299)
track_artist = expected.create_artist()
track_artist.set_name('End Of Green')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/257802')
r = s.get_result()
self.assertEqual(expected, r)
def test_album_with_multiple_unsplit_artist_names(self):
expected = ReleaseResult()
expected.set_scraper_name(None)
release_event = expected.create_release_event()
release_event.set_date('2005')
release_event.set_country('USA')
expected.append_release_event(release_event)
expected.set_format(u'CD')
label_id = expected.create_label_id()
label_id.set_label('G Unit / Interscope Records')
expected.append_label_id(label_id)
expected.set_title("Get Rich Or Die Tryin' (Music From And Inspired By The Motion Picture)")
artist = expected.create_artist()
artist.set_name(None)
artist.set_various(True)
artist.append_type(expected.ArtistTypes.MAIN)
expected.append_release_artist(artist)
expected.append_genre('Soundtrack')
expected.set_url('http://www.musik-sammler.de/media/154887')
disc = expected.create_disc()
disc.set_number(1)
disc.set_title(None)
track = disc.create_track()
track.set_number('1')
track.set_title("Hustler's Ambition")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('2')
track.set_title('What If')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('3')
track.set_title('Things Change')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('Spider Loc')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent & Lloyd Banks')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('4')
track.set_title('You Already Know')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('Lloyd Banks')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent & Young Buck')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('5')
track.set_title('When Death Becomes You')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('M.O.P.')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('6')
track.set_title('Have A Party')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('Mobb Deep')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent & Nate Dogg')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('7')
track.set_title('We Both Think Alike')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Olivia')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('8')
track.set_title("Don't Need No Help")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('Young Buck')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('9')
track.set_title('Get Low')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('Lloyd Banks')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('10')
track.set_title('Fake Love')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('Tony Yayo')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('11')
track.set_title('Window Shopper')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('12')
track.set_title('Born Alone, Die Alone')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('Lloyd Banks')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('13')
track.set_title('You A Shooter')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('Mobb Deep')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('14')
track.set_title("I Don't Know Officer")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
track_artist = expected.create_artist()
track_artist.set_name('Lloyd Banks, Prodigy, Spider Loc & Mase')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.FEATURING)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('15')
track.set_title('Talk About Me')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('16')
track.set_title('When It Rains It Pours')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('17')
track.set_title('Best Friend')
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
track = disc.create_track()
track.set_number('18')
track.set_title("I'll Whip Ya Head Boy")
track.set_length(None)
track_artist = expected.create_artist()
track_artist.set_name('50 Cent & Young Buck')
track_artist.set_various(False)
track_artist.append_type(expected.ArtistTypes.MAIN)
track.append_artist(track_artist)
disc.append_track(track)
expected.append_disc(disc)
s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/154887')
r = s.get_result()
self.assertEqual(expected, r)
def test_404(self):
expected = NotFoundResult()
expected.set_scraper_name(None)
s = musiksammler.ReleaseScraper.from_string('http://www.musik-sammler.de/media/99999999999999')
r = s.get_result()
self.assertEqual(expected, r)
def test_search_scraper(self):
s = musiksammler.SearchScraper('love')
r = s.get_result()
self.assertTrue(len(r.get_items()) > 0)
|
Slack06/yadg
|
descgen/tests.py
|
Python
|
mit
| 429,432
|
# -*- coding: utf-8 -*-
#
# -----------------------------------------------------------------------------------
# Copyright (c) Microsoft Open Technologies (Shanghai) Co. Ltd. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# -----------------------------------------------------------------------------------
import sys
sys.path.append("..")
from hackathon import (
RequiredFeature,
Component,
Context,
)
from hackathon.database.models import (
Experiment,
DockerContainer,
HackathonAzureKey,
PortBinding,
DockerHostServer,
)
from hackathon.constants import (
EStatus,
PortBindingType,
VEStatus,
HEALTH,
)
from compiler.ast import (
flatten,
)
from threading import (
Lock,
)
from hackathon.template.docker_template_unit import (
DockerTemplateUnit,
)
from hackathon.azureformation.endpoint import (
Endpoint
)
from docker_formation_base import (
DockerFormationBase,
)
from hackathon.azureformation.service import (
Service,
)
from hackathon.hackathon_response import (
internal_server_error
)
from hackathon.constants import (
HEALTH_STATUS,
)
import json
import requests
from datetime import timedelta
class HostedDockerFormation(DockerFormationBase, Component):
template_manager = RequiredFeature("template_manager")
hackathon_manager = RequiredFeature("hackathon_manager")
scheduler = RequiredFeature("scheduler")
"""
Docker resource management based on docker remote api v1.18
Host resource are required. Azure key required in case of azure.
"""
application_json = {'content-type': 'application/json'}
host_ports = []
host_port_max_num = 30
docker_host_manager = RequiredFeature("docker_host_manager")
def __init__(self):
self.lock = Lock()
def report_health(self):
"""Report health of DockerHostServers
:rtype: dict
:return health status item of docker. OK when all servers running, Warning if some of them working, Error if no server running
"""
try:
hosts = self.db.find_all_objects(DockerHostServer)
alive = 0
for host in hosts:
if self.ping(host):
alive += 1
if alive == len(hosts):
return {
HEALTH.STATUS: HEALTH_STATUS.OK
}
elif alive > 0:
return {
HEALTH.STATUS: HEALTH_STATUS.WARNING,
HEALTH.DESCRIPTION: 'at least one docker host servers are down'
}
else:
return {
HEALTH.STATUS: HEALTH_STATUS.ERROR,
HEALTH.DESCRIPTION: 'all docker host servers are down'
}
except Exception as e:
return {
HEALTH.STATUS: HEALTH_STATUS.ERROR,
HEALTH.DESCRIPTION: e.message
}
def get_available_host_port(self, docker_host, private_port):
"""
We use double operation to ensure ports not conflicted, first we get ports from host machine, but in multiple
threads situation, the interval between two requests is too short, maybe the first thread do not get port
ended, so the host machine don't update ports in time, thus the second thread may get the same port.
To avoid this condition, we use static variable host_ports to cache the latest host_port_max_num ports.
Every thread visit variable host_ports is synchronized.
To save space, we will release the ports if the number over host_port_max_num.
:param docker_host:
:param private_port:
:return:
"""
self.log.debug("try to assign docker port %d on server %r" % (private_port, docker_host))
containers = self.__containers_info(docker_host)
host_ports = flatten(map(lambda p: p['Ports'], containers))
# todo if azure return -1
def sub(port):
return port["PublicPort"] if "PublicPort" in port else -1
host_public_ports = map(lambda x: sub(x), host_ports)
return self.__get_available_host_port(host_public_ports, private_port)
def stop(self, name, **kwargs):
"""
stop a container
:param name: container's name
:param docker_host: host machine where container running
:return:
"""
container = kwargs["container"]
expr_id = kwargs["expr_id"]
docker_host = self.docker_host_manager.get_host_server_by_id(container.host_server_id)
if self.__get_container(name, docker_host) is not None:
containers_url = '%s/containers/%s/stop' % (self.get_vm_url(docker_host), name)
req = requests.post(containers_url)
self.log.debug(req.content)
self.__stop_container(expr_id, container, docker_host)
def delete(self, name, **kwargs):
"""
delete a container
:param name:
:param docker_host:
:return:
"""
container = kwargs["container"]
expr_id = kwargs["expr_id"]
docker_host = self.docker_host_manager.get_host_server_by_id(container.host_server_id)
containers_url = '%s/containers/%s?force=1' % (self.get_vm_url(docker_host), name)
req = requests.delete(containers_url)
self.log.debug(req.content)
self.__stop_container(expr_id, container, docker_host)
def start(self, unit, **kwargs):
"""
In this function, we create a container and then start a container
:param unit: docker template unit
:param docker_host:
:return:
"""
virtual_environment = kwargs["virtual_environment"]
hackathon = kwargs["hackathon"]
experiment = kwargs["experiment"]
container_name = unit.get_name()
host_server = self.docker_host_manager.get_available_docker_host(1, hackathon)
container = DockerContainer(experiment,
name=container_name,
host_server_id=host_server.id,
virtual_environment=virtual_environment,
image=unit.get_image_with_tag())
self.db.add_object(container)
self.db.commit()
# port binding
ps = map(lambda p:
[p.port_from, p.port_to],
self.__assign_ports(experiment, host_server, virtual_environment, unit.get_ports()))
# guacamole config
guacamole = unit.get_remote()
port_cfg = filter(lambda p:
p[DockerTemplateUnit.PORTS_PORT] == guacamole[DockerTemplateUnit.REMOTE_PORT],
unit.get_ports())
if len(port_cfg) > 0:
gc = {
"displayname": container_name,
"name": container_name,
"protocol": guacamole[DockerTemplateUnit.REMOTE_PROTOCOL],
"hostname": host_server.public_ip,
"port": port_cfg[0]["public_port"]
}
if DockerTemplateUnit.REMOTE_USERNAME in guacamole:
gc["username"] = guacamole[DockerTemplateUnit.REMOTE_USERNAME]
if DockerTemplateUnit.REMOTE_PASSWORD in guacamole:
gc["password"] = guacamole[DockerTemplateUnit.REMOTE_PASSWORD]
# save guacamole config into DB
virtual_environment.remote_paras = json.dumps(gc)
exist = self.__get_container(container_name, host_server)
if exist is not None:
container.container_id = exist["Id"]
host_server.container_count += 1
self.db.commit()
else:
container_config = unit.get_container_config()
# create container
try:
container_create_result = self.__create(host_server, container_config, container_name)
except Exception as e:
self.log.error(e)
self.log.error("container %s fail to create" % container_name)
return None
container.container_id = container_create_result["Id"]
# start container
try:
self.__start(host_server, container_create_result["Id"])
host_server.container_count += 1
self.db.commit()
except Exception as e:
self.log.error(e)
self.log.error("container %s fail to start" % container["Id"])
return None
# check
if self.__get_container(container_name, host_server) is None:
self.log.error(
"container %s has started, but can not find it in containers' info, maybe it exited again."
% container_name)
return None
self.log.debug("starting container %s is ended ... " % container_name)
virtual_environment.status = VEStatus.RUNNING
self.db.commit()
return container
def get_vm_url(self, docker_host):
return 'http://%s:%d' % (docker_host.public_dns, docker_host.public_docker_api_port)
def pull_image(self, context):
docker_host, image_name, tag = context.docker_host, context.image_name, context.tag
pull_image_url = self.get_vm_url(docker_host) + "/images/create?fromImage=" + image_name + '&tag=' + tag
self.log.debug(" send request to pull image:" + pull_image_url)
return requests.post(pull_image_url)
def get_pulled_images(self, docker_host):
get_images_url = self.get_vm_url(docker_host) + "/images/json?all=0"
current_images_info = json.loads(self.util.get_remote(get_images_url)) # [{},{},{}]
current_images_tags = map(lambda x: x['RepoTags'], current_images_info) # [[],[],[]]
return flatten(current_images_tags) # [ imange:tag, image:tag ]
def ensure_images(self):
hackathons = self.hackathon_manager.get_online_hackathons()
map(lambda h: self.__ensure_images_for_hackathon(h), hackathons)
def check_container_status_is_normal(self, docker_container):
"""check container's running status on docker host
if status is Running or Restarting returns True , else returns False
:type docker_container: DockerContainer
:param docker_container: the container that you want to check
:type boolean
:return True: the container running status is running or restarting , else returns False
"""
docker_host = self.db.find_first_object_by(DockerHostServer, id=docker_container.host_server_id)
if docker_host is not None:
container_info = self.__get_container_info_by_container_id(docker_host, docker_container.container_id)
if container_info is None:
return False
return container_info['State']['Running'] or container_info['State']['Restarting']
else:
return False
def ping(self, docker_host):
"""Ping docker host to check running status
:type docker_host : DockerHostServer
:param docker_host: the hots that you want to check docker service running status
:type boolean
:return: True: running status is OK, else return False
"""
try:
ping_url = '%s/_ping' % self.__get_vm_url(docker_host)
req = requests.get(ping_url)
self.log.debug(req.content)
return req.status_code == 200 and req.content == 'OK'
except Exception as e:
self.log.error(e)
return False
# --------------------------------------------- helper function ---------------------------------------------#
def __name_match(self, id, lists):
for list in lists:
if id in list:
return True
return False
def __get_schedule_job_id(self, hackathon):
return "pull_images_for_hackathon_%s" % hackathon.id
def __ensure_images_for_hackathon(self, hackathon):
# only ensure those alauda is disabled
if hackathon.is_alauda_enabled():
self.log.debug("schedule job of hackathon '%s(%d)' removed for alauda enabled" %
(hackathon.name, hackathon.id))
self.scheduler.remove_job(self.__get_schedule_job_id(hackathon))
return
self.log.debug("adding schedule job to ensure images for hackathon [%d]%s" % (hackathon.id, hackathon.name))
next_run_time = self.util.get_now() + timedelta(seconds=3)
context = Context(hackathon_id=hackathon.id)
self.scheduler.add_interval(feature="template_manager",
method="pull_images_for_hackathon",
id=self.__get_schedule_job_id(hackathon),
context=context,
next_run_time=next_run_time,
minutes=60)
def __get_vm_url(self, docker_host):
return 'http://%s:%d' % (docker_host.public_dns, docker_host.public_docker_api_port)
def __clear_ports_cache(self):
"""
cache ports, if ports' number more than host_port_max_num, release the ports.
But if there is a thread apply new ports, we will do this operation in the next loop.
Because the host machine do not update the ports information,
if we release ports now, the new ports will be lost.
:return:
"""
num = self.db.count(Experiment, Experiment.status == EStatus.STARTING)
if num > 0:
self.log.debug("there are %d experiment is starting, host ports will updated in next loop" % num)
return
self.log.debug("-----release ports cache successfully------")
self.host_ports = []
def __stop_container(self, expr_id, container, docker_host):
self.__release_ports(expr_id, docker_host)
docker_host.container_count -= 1
if docker_host.container_count < 0:
docker_host.container_count = 0
self.db.commit()
def __containers_info(self, docker_host):
containers_url = '%s/containers/json' % self.get_vm_url(docker_host)
req = requests.get(containers_url)
self.log.debug(req.content)
return self.util.convert(json.loads(req.content))
def __get_available_host_port(self, port_bindings, port):
"""
simple lock mechanism, visit static variable ports synchronize, because port_bindings is not in real-time,
so we should cache the latest ports, when the cache ports number is more than host_port_max_num,
we will release it to save space.
:param port_bindings:
:param port:
:return:
"""
self.lock.acquire()
try:
host_port = port + 10000
while host_port in port_bindings or host_port in self.host_ports:
host_port += 1
if host_port >= 65535:
self.log.error("port used up on this host server")
raise Exception("no port available")
if len(self.host_ports) >= self.host_port_max_num:
self.__clear_ports_cache()
self.host_ports.append(host_port)
self.log.debug("host_port is %d " % host_port)
return host_port
finally:
self.lock.release()
def __get_container(self, name, docker_host):
containers = self.__containers_info(docker_host)
return next((c for c in containers if name in c["Names"] or '/' + name in c["Names"]), None)
def __create(self, docker_host, container_config, container_name):
"""
only create a container, in this step, we cannot start a container.
:param docker_host:
:param container_config:
:param container_name:
:return:
"""
containers_url = '%s/containers/create?name=%s' % (self.get_vm_url(docker_host), container_name)
req = requests.post(containers_url, data=json.dumps(container_config), headers=self.application_json)
self.log.debug(req.content)
container = json.loads(req.content)
if container is None:
raise AssertionError("container is none")
return container
def __start(self, docker_host, container_id):
"""
start a container
:param docker_host:
:param container_id:
:return:
"""
url = '%s/containers/%s/start' % (self.get_vm_url(docker_host), container_id)
req = requests.post(url, headers=self.application_json)
self.log.debug(req.content)
def __get_available_public_ports(self, expr_id, host_server, host_ports):
self.log.debug("starting to get azure ports")
ep = Endpoint(Service(self.load_azure_key_id(expr_id)))
host_server_name = host_server.vm_name
host_server_dns = host_server.public_dns.split('.')[0]
public_endpoints = ep.assign_public_endpoints(host_server_dns, 'Production', host_server_name, host_ports)
if not isinstance(public_endpoints, list):
self.log.debug("failed to get public ports")
return internal_server_error('cannot get public ports')
self.log.debug("public ports : %s" % public_endpoints)
return public_endpoints
def load_azure_key_id(self, expr_id):
expr = self.db.get_object(Experiment, expr_id)
hak = self.db.find_first_object_by(HackathonAzureKey, hackathon_id=expr.hackathon_id)
return hak.azure_key_id
def __assign_ports(self, expr, host_server, ve, port_cfg):
"""
assign ports from host server
:param expr:
:param host_server:
:param ve:
:param port_cfg:
:return:
"""
# get 'host_port'
map(lambda p:
p.update(
{DockerTemplateUnit.PORTS_HOST_PORT: self.get_available_host_port(host_server, p[
DockerTemplateUnit.PORTS_PORT])}
),
port_cfg)
# get 'public' cfg
public_ports_cfg = filter(lambda p: DockerTemplateUnit.PORTS_PUBLIC in p, port_cfg)
host_ports = [u[DockerTemplateUnit.PORTS_HOST_PORT] for u in public_ports_cfg]
if self.util.safe_get_config("environment", "prod") == "local":
map(lambda cfg: cfg.update({DockerTemplateUnit.PORTS_PUBLIC_PORT: cfg[DockerTemplateUnit.PORTS_HOST_PORT]}),
public_ports_cfg)
else:
public_ports = self.__get_available_public_ports(expr.id, host_server, host_ports)
for i in range(len(public_ports_cfg)):
public_ports_cfg[i][DockerTemplateUnit.PORTS_PUBLIC_PORT] = public_ports[i]
binding_dockers = []
# update port binding
for public_cfg in public_ports_cfg:
binding_cloud_service = PortBinding(name=public_cfg[DockerTemplateUnit.PORTS_NAME],
port_from=public_cfg[DockerTemplateUnit.PORTS_PUBLIC_PORT],
port_to=public_cfg[DockerTemplateUnit.PORTS_HOST_PORT],
binding_type=PortBindingType.CLOUD_SERVICE,
binding_resource_id=host_server.id,
virtual_environment=ve,
experiment=expr,
url=public_cfg[DockerTemplateUnit.PORTS_URL]
if DockerTemplateUnit.PORTS_URL in public_cfg else None)
binding_docker = PortBinding(name=public_cfg[DockerTemplateUnit.PORTS_NAME],
port_from=public_cfg[DockerTemplateUnit.PORTS_HOST_PORT],
port_to=public_cfg[DockerTemplateUnit.PORTS_PORT],
binding_type=PortBindingType.DOCKER,
binding_resource_id=host_server.id,
virtual_environment=ve,
experiment=expr)
binding_dockers.append(binding_docker)
self.db.add_object(binding_cloud_service)
self.db.add_object(binding_docker)
self.db.commit()
local_ports_cfg = filter(lambda p: DockerTemplateUnit.PORTS_PUBLIC not in p, port_cfg)
for local_cfg in local_ports_cfg:
port_binding = PortBinding(name=local_cfg[DockerTemplateUnit.PORTS_NAME],
port_from=local_cfg[DockerTemplateUnit.PORTS_HOST_PORT],
port_to=local_cfg[DockerTemplateUnit.PORTS_PORT],
binding_type=PortBindingType.DOCKER,
binding_resource_id=host_server.id,
virtual_environment=ve,
experiment=expr)
binding_dockers.append(port_binding)
self.db.add_object(port_binding)
self.db.commit()
return binding_dockers
def __release_ports(self, expr_id, host_server):
"""
release the specified experiment's ports
"""
self.log.debug("Begin to release ports: expr_id: %d, host_server: %r" % (expr_id, host_server))
ports_binding = self.db.find_all_objects_by(PortBinding, experiment_id=expr_id)
if ports_binding is not None:
docker_binding = filter(
lambda u: self.util.safe_get_config("environment", "prod") != "local" and u.binding_type == 1,
ports_binding)
ports_to = [d.port_to for d in docker_binding]
if len(ports_to) != 0:
self.__release_public_ports(expr_id, host_server, ports_to)
for port in ports_binding:
self.db.delete_object(port)
self.db.commit()
self.log.debug("End to release ports: expr_id: %d, host_server: %r" % (expr_id, host_server))
def __release_public_ports(self, expr_id, host_server, host_ports):
ep = Endpoint(Service(self.load_azure_key_id(expr_id)))
host_server_name = host_server.vm_name
host_server_dns = host_server.public_dns.split('.')[0]
self.log.debug("starting to release ports ... ")
ep.release_public_endpoints(host_server_dns, 'Production', host_server_name, host_ports)
def __get_container_info_by_container_id(self, docker_host, container_id):
"""get a container info by container_id from a docker host
:type docker_host: str|unicode
:param: the docker host which you want to search container from
:type container_id: str|unicode
:param as a parameter that you want to search container though docker remote API
:return dic object of the container info if not None
"""
try:
get_container_url = self.get_vm_url(docker_host) + "/container/%s/json?all=0" % container_id
req = requests.get(get_container_url)
if req.status_code >= 200 and req.status_code < 300 :
container_info = json.loads(req.content)
return container_info
return None
except Exception as ex:
self.log.error(ex)
return None
|
xunxunzgq/open-hackathon-bak_01
|
open-hackathon-server/src/hackathon/docker/hosted_docker.py
|
Python
|
mit
| 24,562
|
# Human friendly input/output in Python.
#
# Author: Peter Odding <peter@peterodding.com>
# Last Change: March 2, 2020
# URL: https://humanfriendly.readthedocs.io
"""
Support for deprecation warnings when importing names from old locations.
When software evolves, things tend to move around. This is usually detrimental
to backwards compatibility (in Python this primarily manifests itself as
:exc:`~exceptions.ImportError` exceptions).
While backwards compatibility is very important, it should not get in the way
of progress. It would be great to have the agility to move things around
without breaking backwards compatibility.
This is where the :mod:`humanfriendly.deprecation` module comes in: It enables
the definition of backwards compatible aliases that emit a deprecation warning
when they are accessed.
The way it works is that it wraps the original module in an :class:`DeprecationProxy`
object that defines a :func:`~DeprecationProxy.__getattr__()` special method to
override attribute access of the module.
"""
# Standard library modules.
import collections
import functools
import importlib
import inspect
import sys
import types
import warnings
# Modules included in our package.
from humanfriendly.text import format
# Registry of known aliases (used by humanfriendly.sphinx).
REGISTRY = collections.defaultdict(dict)
# Public identifiers that require documentation.
__all__ = ("DeprecationProxy", "define_aliases", "deprecated_args", "get_aliases", "is_method")
def define_aliases(module_name, **aliases):
"""
Update a module with backwards compatible aliases.
:param module_name: The ``__name__`` of the module (a string).
:param aliases: Each keyword argument defines an alias. The values
are expected to be "dotted paths" (strings).
The behavior of this function depends on whether the Sphinx documentation
generator is active, because the use of :class:`DeprecationProxy` to shadow the
real module in :data:`sys.modules` has the unintended side effect of
breaking autodoc support for ``:data:`` members (module variables).
To avoid breaking Sphinx the proxy object is omitted and instead the
aliased names are injected into the original module namespace, to make sure
that imports can be satisfied when the documentation is being rendered.
If you run into cyclic dependencies caused by :func:`define_aliases()` when
running Sphinx, you can try moving the call to :func:`define_aliases()` to
the bottom of the Python module you're working on.
"""
module = sys.modules[module_name]
proxy = DeprecationProxy(module, aliases)
# Populate the registry of aliases.
for name, target in aliases.items():
REGISTRY[module.__name__][name] = target
# Avoid confusing Sphinx.
if "sphinx" in sys.modules:
for name, target in aliases.items():
setattr(module, name, proxy.resolve(target))
else:
# Install a proxy object to raise DeprecationWarning.
sys.modules[module_name] = proxy
def get_aliases(module_name):
"""
Get the aliases defined by a module.
:param module_name: The ``__name__`` of the module (a string).
:returns: A dictionary with string keys and values:
1. Each key gives the name of an alias
created for backwards compatibility.
2. Each value gives the dotted path of
the proper location of the identifier.
An empty dictionary is returned for modules that
don't define any backwards compatible aliases.
"""
return REGISTRY.get(module_name, {})
def deprecated_args(*names):
"""
Deprecate positional arguments without dropping backwards compatibility.
:param names:
The positional arguments to :func:`deprecated_args()` give the names of
the positional arguments that the to-be-decorated function should warn
about being deprecated and translate to keyword arguments.
:returns: A decorator function specialized to `names`.
The :func:`deprecated_args()` decorator function was created to make it
easy to switch from positional arguments to keyword arguments [#]_ while
preserving backwards compatibility [#]_ and informing call sites
about the change.
.. [#] Increased flexibility is the main reason why I find myself switching
from positional arguments to (optional) keyword arguments as my code
evolves to support more use cases.
.. [#] In my experience positional argument order implicitly becomes part
of API compatibility whether intended or not. While this makes sense
for functions that over time adopt more and more optional arguments,
at a certain point it becomes an inconvenience to code maintenance.
Here's an example of how to use the decorator::
@deprecated_args('text')
def report_choice(**options):
print(options['text'])
When the decorated function is called with positional arguments
a deprecation warning is given::
>>> report_choice('this will give a deprecation warning')
DeprecationWarning: report_choice has deprecated positional arguments, please switch to keyword arguments
this will give a deprecation warning
But when the function is called with keyword arguments no deprecation
warning is emitted::
>>> report_choice(text='this will not give a deprecation warning')
this will not give a deprecation warning
"""
def decorator(function):
def translate(args, kw):
# Raise TypeError when too many positional arguments are passed to the decorated function.
if len(args) > len(names):
raise TypeError(
format(
"{name} expected at most {limit} arguments, got {count}",
name=function.__name__,
limit=len(names),
count=len(args),
)
)
# Emit a deprecation warning when positional arguments are used.
if args:
warnings.warn(
format(
"{name} has deprecated positional arguments, please switch to keyword arguments",
name=function.__name__,
),
category=DeprecationWarning,
stacklevel=3,
)
# Translate positional arguments to keyword arguments.
for name, value in zip(names, args):
kw[name] = value
if is_method(function):
@functools.wraps(function)
def wrapper(*args, **kw):
"""Wrapper for instance methods."""
args = list(args)
self = args.pop(0)
translate(args, kw)
return function(self, **kw)
else:
@functools.wraps(function)
def wrapper(*args, **kw):
"""Wrapper for module level functions."""
translate(args, kw)
return function(**kw)
return wrapper
return decorator
def is_method(function):
"""Check if the expected usage of the given function is as an instance method."""
try:
# Python 3.3 and newer.
signature = inspect.signature(function)
return "self" in signature.parameters
except AttributeError:
# Python 3.2 and older.
metadata = inspect.getargspec(function)
return "self" in metadata.args
class DeprecationProxy(types.ModuleType):
"""Emit deprecation warnings for imports that should be updated."""
def __init__(self, module, aliases):
"""
Initialize an :class:`DeprecationProxy` object.
:param module: The original module object.
:param aliases: A dictionary of aliases.
"""
# Initialize our superclass.
super(DeprecationProxy, self).__init__(name=module.__name__)
# Store initializer arguments.
self.module = module
self.aliases = aliases
def __getattr__(self, name):
"""
Override module attribute lookup.
:param name: The name to look up (a string).
:returns: The attribute value.
"""
# Check if the given name is an alias.
target = self.aliases.get(name)
if target is not None:
# Emit the deprecation warning.
warnings.warn(
format("%s.%s was moved to %s, please update your imports", self.module.__name__, name, target),
category=DeprecationWarning,
stacklevel=2,
)
# Resolve the dotted path.
return self.resolve(target)
# Look up the name in the original module namespace.
value = getattr(self.module, name, None)
if value is not None:
return value
# Fall back to the default behavior.
raise AttributeError(format("module '%s' has no attribute '%s'", self.module.__name__, name))
def resolve(self, target):
"""
Look up the target of an alias.
:param target: The fully qualified dotted path (a string).
:returns: The value of the given target.
"""
module_name, _, member = target.rpartition(".")
module = importlib.import_module(module_name)
return getattr(module, member)
|
xolox/python-humanfriendly
|
humanfriendly/deprecation.py
|
Python
|
mit
| 9,499
|
import sys
sys.path.append("..")
from data_mining.association_rule.base import rules, lift, support
from data_mining.association_rule.apriori import apriori
from data_mining.association_rule.liftmin import apriorilift
from pat_data_association_rules import compare
LE = "leite"
PA = "pao"
SU = "suco"
OV = "ovos"
CA = "cafe"
BI = "biscoito"
AR = "arroz"
FE = "feijao"
CE = "cerveja"
MA = "manteiga"
data = [[CA, PA, MA], [LE, CE, PA, MA], [CA, PA, MA], [LE, CA, PA, MA],
[CE], [MA], [PA], [FE], [AR, FE], [AR]]
compare(data, 0.0000000001, 5.0, 0)
|
JoaoFelipe/data-mining-algorithms
|
examples/custom_association_rules.py
|
Python
|
mit
| 560
|
from django.conf import settings
BACKEND_CLASS = getattr(
settings, "COURRIERS_BACKEND_CLASS", "courriers.backends.simple.SimpleBackend"
)
MAILCHIMP_API_KEY = getattr(settings, "COURRIERS_MAILCHIMP_API_KEY", "")
MAILJET_API_KEY = getattr(settings, "COURRIERS_MAILJET_API_KEY", "")
MAILJET_CONTACTSLIST_LIMIT = getattr(
settings, "COURRIERS_MAILJET_CONTACTSLIST_LIMIT", 1000
)
MAILJET_CONTACTFILTER_LIMIT = getattr(
settings, "COURRIERS_MAILJET_CONTACTFILTER_LIMIT", 1000
)
MAILJET_API_SECRET_KEY = getattr(settings, "COURRIERS_MAILJET_API_SECRET_KEY", "")
DEFAULT_FROM_EMAIL = getattr(
settings, "COURRIERS_DEFAULT_FROM_EMAIL", settings.DEFAULT_FROM_EMAIL
)
DEFAULT_FROM_NAME = getattr(settings, "COURRIERS_DEFAULT_FROM_NAME", "")
ALLOWED_LANGUAGES = getattr(settings, "COURRIERS_ALLOWED_LANGUAGES", settings.LANGUAGES)
PRE_PROCESSORS = getattr(settings, "COURRIERS_PRE_PROCESSORS", ())
PAGINATE_BY = getattr(settings, "COURRIERS_PAGINATE_BY", 9)
FAIL_SILENTLY = getattr(settings, "COURRIERS_FAIL_SILENTLY", False)
NEWSLETTERLIST_MODEL = getattr(
settings,
"COURRIERS_NEWSLETTERLIST_MODEL",
"courriers.models.newsletterlist.NewsletterList",
)
NEWSLETTER_MODEL = getattr(
settings, "COURRIERS_NEWSLETTER_MODEL", "courriers.models.newsletter.Newsletter"
)
NEWSLETTERITEM_MODEL = getattr(
settings,
"COURRIERS_NEWSLETTERITEM_MODEL",
"courriers.models.newsletteritem.NewsletterItem",
)
NEWSLETTERSEGMENT_MODEL = getattr(
settings,
"COURRIERS_NEWSLETTERSEGMENT_MODEL",
"courriers.models.newslettersegment.NewsletterSegment",
)
|
ulule/django-courriers
|
courriers/settings.py
|
Python
|
mit
| 1,595
|
import numpy as np
import logic
from unittest import TestCase
import graphs
import sympy
from collections import namedtuple
import random
from attractors import find_num_attractors_onestage, \
vertex_model_impact_scores, stochastic_vertex_model_impact_scores, find_num_steady_states, \
find_attractors_dubrova, find_attractors_onestage_enumeration, ImpactType, \
vertex_state_impact_scores, stochastic_vertex_state_impact_scores, graph_model_impact_score, \
graph_state_impact_score, stochastic_graph_model_impact_score, stochastic_graph_state_impact_score
import attractors
dubrova_path = "../" + attractors.dubrova_path
ILPAttractorExperimentParameters = namedtuple("AttractorExperimentParameters", "G T P n_attractors")
VertexModelImpactExperimentParameters = namedtuple("VertexModelImpactExperimentParameters", "G current_attractors T P "
"impact_types relative_basins "
"maximal_bits "
"impacts")
VertexStateImpactExperimentParameters = namedtuple("VertexStateImpactExperimentParameters", "G current_attractors "
"relative_basins "
"max_transient_len "
"impacts")
StochasticVertexModelImpactExperimentParameters = namedtuple(
"StochasticVertexModelImpactExperimentParameters", "G current_attractors "
"bits_of_change relative_basins impact_type impacts")
StochasticVertexStateImpactExperimentParameters = namedtuple(
"StochasticVertexStateImpactExperimentParameters", "G impacts")
GraphModelImpactExperimentParameters = namedtuple("GraphModelImpactExperimentParameters", "G current_attractors T P "
"impact_types relative_basins "
"maximal_bits "
"impact")
GraphStateImpactExperimentParameters = namedtuple("GraphStateImpactExperimentParameters", "G current_attractors "
"relative_basins "
"max_transient_len maximal_bits "
"impact")
StochasticGraphModelImpactExperimentParameters = namedtuple(
"StochasticGraphModelImpactExperimentParameters", "G current_attractors "
"bits_of_change relative_basins impact_type impact")
StochasticGraphStateImpactExperimentParameters = namedtuple(
"StochasticGraphStateImpactExperimentParameters", "G bits_of_change impact")
DubrovaExperimentParameters = namedtuple("DubrovaExperimentParameters", "G mutate n_attractors")
class TestAttractors(TestCase):
def test_num_attractors_onestage(self):
experiments = []
"""test on known toy models"""
# 0, 1
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=1, n_attractors=0))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=3, n_attractors=1))
# 2, 3
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[logic.SymmetricThresholdFunction(signs=[-1], threshold=1)])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=1, n_attractors=0))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=3, n_attractors=1))
# 4, 5
G = graphs.Network(vertex_names=["A"], edges=[],
vertex_functions=[None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=3, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=3, n_attractors=2))
# 6, 7
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[logic.SymmetricThresholdFunction(signs=[1], threshold=1),
None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=5, n_attractors=4))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=5, n_attractors=4))
# 8, 9
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[logic.SymmetricThresholdFunction(signs=[-1], threshold=1),
None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=1, n_attractors=0))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=3, n_attractors=2))
# 10, 11
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=2, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=3, P=1, n_attractors=1))
# 12, 13
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=3, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=3, n_attractors=2))
# 14, 15
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[None, None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=5, n_attractors=4))
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=6, n_attractors=4))
# 16, 17
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[None, True])
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=5, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=6, n_attractors=2))
# 18, 19, 20
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[sympy.Nand, sympy.And])
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=3, n_attractors=0))
experiments.append(ILPAttractorExperimentParameters(G=G, T=4, P=2, n_attractors=1))
experiments.append(ILPAttractorExperimentParameters(G=G, T=4, P=1, n_attractors=1))
# 21, 22, 23
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[sympy.Nand, sympy.Nand])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=3, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=3, n_attractors=3))
experiments.append(ILPAttractorExperimentParameters(G=G, T=15, P=15, n_attractors=3))
# 24, 25
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[lambda x: True, lambda x: False])
experiments.append(ILPAttractorExperimentParameters(G=G, T=4, P=2, n_attractors=1))
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=2, n_attractors=1))
# 26, 27
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[None, sympy.And])
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=4, n_attractors=3))
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=4, n_attractors=2))
# 28, 29
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[None, lambda _: True])
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=3, n_attractors=1))
experiments.append(ILPAttractorExperimentParameters(G=G, T=4, P=2, n_attractors=1))
# 30, 31
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[None, None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=6, n_attractors=3))
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=6, n_attractors=2))
# 32, 33, 34, 35, 36
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "A"), ("C", "A")],
vertex_functions=[logic.SymmetricThresholdFunction.from_function(sympy.Nand, 2),
logic.SymmetricThresholdFunction.from_function(sympy.Nand, 1),
logic.SymmetricThresholdFunction.from_function(sympy.Nand, 0)])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=3, n_attractors=3))
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=4, n_attractors=3))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=3, n_attractors=3))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=4, n_attractors=4))
experiments.append(ILPAttractorExperimentParameters(G=G, T=3, P=4, n_attractors=4))
# 37
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "A"), ("C", "A")],
vertex_functions=[logic.SymmetricThresholdFunction.from_function(sympy.Nand, 2),
logic.SymmetricThresholdFunction.from_function(sympy.Nand, 1),
None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=3, P=3, n_attractors=3))
# 38, 39, 40
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand]*3)
experiments.append(ILPAttractorExperimentParameters(G=G, T=6, P=2, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=10, P=10, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=5, P=10, n_attractors=1))
# 41, 42
# acyclic, should have 2**#input_nodes attractors of length 1
G = graphs.Network(vertex_names=["v1", "v2", "v3", "v4", "v5", "v6"],
edges=[("v1", "v4"), ("v2", "v4"), ("v1", "v5"), ("v4", "v6")],
vertex_functions=[sympy.Nand]*6)
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=10, n_attractors=8))
experiments.append(ILPAttractorExperimentParameters(G=G, T=6, P=10, n_attractors=8))
# 43, 44, 45
G = graphs.Network(vertex_names=["A1", "B1", "B2", "C1", "C2"],
edges=[("A1", "A1"), ("B1", "B2"), ("B2", "B1"), ("C1", "C2"), ("C2", "C1")],
vertex_functions=[sympy.And]*5)
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=10, n_attractors=8))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=18, n_attractors=18))
experiments.append(ILPAttractorExperimentParameters(G=G, T=3, P=40, n_attractors=20)) # offsets!
# 46, 47, 48
# a failed random graph added as a constant test
G = graphs.Network(
vertex_names=['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16',
'17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31',
'32', '33', '34'],
edges=[('1', '2'), ('2', '16'), ('3', '17'), ('5', '15'), ('6', '29'), ('7', '28'), ('8', '22'),
('9', '28'), ('10', '18'), ('11', '15'), ('12', '24'), ('13', '14'), ('15', '18'), ('16', '26'),
('17', '27'), ('18', '20'), ('19', '23'), ('20', '27'), ('23', '26'), ('24', '29'), ('25', '33'),
('26', '30'), ('27', '32'), ('28', '32'), ('30', '32'), ('31', '34'), ('32', '33'), ('33', '34')],
vertex_functions=[None, None, sympy.Nand, None, None, None, None, None, None, None, None, None, None, None,
sympy.Or, sympy.Nand,
sympy.Nand, sympy.Nand, sympy.Nand, None, sympy.Xor, None, sympy.And, sympy.Nand,
sympy.Xor, None, sympy.And, sympy.Nand, sympy.And, sympy.Xor, sympy.Or, None, sympy.Or,
sympy.And, sympy.And])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=6, n_attractors=6))
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=10, n_attractors=10))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=10, n_attractors=10))
# 49, 50, 51
# G = graphs.Network.parse_cnet("C:\\Users\\ariel\\Downloads\\Attractors - for Ariel"
# "\\Attractors - for Ariel\\BNS_Dubrova_2011\\MAPK_large2.cnet")
# experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=15, n_attractors=12))
# experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=15, n_attractors=14))
# experiments.append(ILPAttractorExperimentParameters(G=G, T=3, P=15, n_attractors=14))
G = graphs.Network.parse_cnet("C:\\Users\\ariel\\Downloads\\Attractors - for Ariel"
"\\Attractors - for Ariel\\BNS_Dubrova_2011\\tcr.cnet")
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=15, n_attractors=8))
experiments.append(ILPAttractorExperimentParameters(G=G, T=6, P=15, n_attractors=9))
experiments.append(ILPAttractorExperimentParameters(G=G, T=7, P=15, n_attractors=9))
# for _ in range(5):
# size = 35
# G = graphs.Network(vertex_names=list(range(size)),
# edges=[(i, random.choice(list(range(i+1, size)))) for i in range(size)
# if random.random() < 0.8 and i != size-1],
# vertex_functions=[random.choice([sympy.And, sympy.Nand, sympy.Or, sympy.Xor])
# for _ in range(size)])
# input_nodes = 0
# for v in G.vertices:
# is_input = True
# for e in G.edges:
# if e[1] == v:
# is_input = False
# break
# if is_input:
# input_nodes += 1
# attractor_number = 2**input_nodes
# experiments.append(ExperimentParameters(G=G, T=1, P=3, n_attractors=min(3, attractor_number)))
# experiments.append(ExperimentParameters(G=G, T=2, P=10, n_attractors=min(10, attractor_number)))
# experiments.append(ExperimentParameters(G=G, T=10, P=3, n_attractors=min(3, attractor_number)))
# TODO: figure out how disjoint long attractors work together (multiplying doesn't account for offsets)
# """test on basic semi-random networks: create connectivity components of acyclis networks and simple cycles"""
# n_random_experiment = 0
# while n_random_experiment < 10:
# n_components = random.randint(1, 3)
# attractor_number = 1
# max_attractor_len = 0
# cur_graph = None
# for n_component in range(n_components): # TODO: change to graph union method
# comp_size = random.randint(1, 5)
# V = [i for i in range(comp_size)]
# E = []
# comp_type =random.choice(["cycle", "acyclic"])
# if comp_type == "acyclic":
# for i in range(len(V) - 1): # create only forward facing edges
# for j in range(i+1, len(V)):
# if random.random() <= 0.8:
# E.append((V[i], V[j]))
# component_graph = graphs.Network(vertex_names=V, edges=E)
# restriction_level = random.choice([graphs.FunctionTypeRestriction.NONE,
# graphs.FunctionTypeRestriction.SYMMETRIC_THRESHOLD,
# graphs.FunctionTypeRestriction.SIMPLE_GATES])
# component_graph.randomize_functions(function_type_restriction=restriction_level)
# input_nodes = 0
# for v in V:
# is_input = True
# for e in E:
# if e[1] == v:
# is_input = False
# break
# if is_input:
# input_nodes += 1
# attractor_number *= 2**input_nodes
# max_attractor_len = max(max_attractor_len, 1)
# elif comp_type == "cycle":
# """currently supports only a cycle of identity function, using a group theory theorem from
# https://www.quora.com/How-many-unique-binary-matrices-are-there-up-to-rotations-translations-and-flips
# , can later add negation cycles"""
# for i in range(len(V)):
# E.append((V[i], V[(i + 1) % len(V)]))
# component_graph = graphs.Network(vertex_names=V, edges=E, vertex_functions=[sympy.And]*len(V))
# attractor_number *= binary_necklaces(len(V))
# max_attractor_len = max(max_attractor_len, len(V))
# cur_graph = component_graph if cur_graph is None else cur_graph + component_graph
# if attractor_number * len(cur_graph.vertices) * max_attractor_len <= 250:
# experiments.append(ExperimentParameters(G=cur_graph, T=max_attractor_len,
# P=attractor_number + 1,
# n_attractors=attractor_number))
# n_random_experiment += 1
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "n={}, T={}, P={}, expected_n_attractors={}".format(len(experiment.G.vertices),
experiment.T, experiment.P, experiment.n_attractors)
# continue
use_sampling = bool(random.randint(0, 1))
use_sampling_for_mip_start = bool(random.randint(0, 1))
simplify = bool(random.randint(0, 1))
key_slice_size = random.randint(1, 15)
print "key_slice_size={}".format(key_slice_size)
n_attractors = find_num_attractors_onestage(G=experiment.G, max_len=experiment.T, max_num=experiment.P,
use_sat=False, verbose=False,
sampling_bounds=(3, 3) if use_sampling else None,
use_sampling_for_mip_start=use_sampling_for_mip_start,
simplify_general_boolean=simplify,
key_slice_size=key_slice_size)
try:
self.assertEqual(n_attractors, experiment.n_attractors)
except AssertionError as e:
print e
print experiment.G
raise e
except Exception as e:
raise e
# print "number of experiments (without keys)={}".format(len(experiments))
# for i, experiment in enumerate(experiments):
# print "experiment #{}".format(i)h
# print "n={}, T={}, P={}, expected_n_attractors={}".format(len(experiment.G.vertices),
# experiment.T, experiment.P, experiment.n_attractors)
# # continue
# n_attractors = find_num_attractors_onestage(G=experiment.G, max_len=experiment.T, max_num=experiment.P,
# use_sat=False, verbose=False,
# use_state_keys=False, require_result=experiment.n_attractors)
# try:
# self.assertEqual(n_attractors, experiment.n_attractors)
# except AssertionError as e:
# print e
# print experiment.G
# raise e
def test_vertex_degeneracy_scores(self):
self.assertTrue(False) # TODO: write...
def test_graph_state_impact_scores(self):
experiments = []
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #0
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=1,
maximal_bits=1,
impact=0))
# experiment #1
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=1,
impact=0))
# experiment #2
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
maximal_bits=1,
impact=0))
# experiment #3
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
maximal_bits=10,
impact=0))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.Nand, None])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #4
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=1,
maximal_bits=1,
impact=0))
# experiment #5
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=1,
impact=0))
# experiment #6
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
maximal_bits=1,
impact=0))
# experiment #7
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
maximal_bits=10,
impact=0))
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #8
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=5,
maximal_bits=1,
impact=1))
# experiment #9
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=1,
impact=1))
# experiment #10
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=5,
maximal_bits=5,
impact=1))
# experiment #11
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
max_transient_len=5,
maximal_bits=5,
impact=1))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.And, None])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #12
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
max_transient_len=5,
maximal_bits=5,
impact=1))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #13
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=1,
impact=1))
# experiment #14
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
max_transient_len=5,
maximal_bits=5,
impact=1))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #15
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=1,
impact=1))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, lambda _: True])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #16
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=1,
impact=0))
# experiment #17
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=3,
impact=0))
# experiment #18
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=5,
maximal_bits=2,
impact=0))
G = graphs.Network(vertex_names=["A", "B", "C", "D"], edges=[("B", "A"), ("C", "A"), ("D", "A"),
("A", "B"), ("C", "B"), ("D", "B"),
("A", "C"), ("B", "C"), ("D", "C"),
("A", "D"), ("B", "D"), ("C", "D")],
vertex_functions=[lambda a, b, c: a + b + c > 1 for _ in range(4)])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# 0000 and 1111 are stable points, and attract everything with hamming distance <= 1,
# where 2 bits of change land right into another attractor.
# Other three two-state attractors are unstable under one bit change, with transient length of 1,
# Or they can be switched between eachother/stables with 2 (same as 0000/1111 ones, if needed)
# bits of change.
# experiment #19
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=1,
impact=0))
# experiment #20
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=1,
maximal_bits=1,
impact=3 / 5.0))
# experiment #21
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=5,
maximal_bits=1,
impact=3 / 5.0))
# experiment #22
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=2,
impact=1))
# experiment #23
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=3,
maximal_bits=2,
impact=1))
relative_basins = [5 / float(16) if len(attractor) == 1 else 2 / float(16) for
attractor in current_attractors]
# experiment #24
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=relative_basins,
max_transient_len=5,
maximal_bits=1,
impact=6 / 16.0))
# experiment #25
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=relative_basins,
max_transient_len=0,
maximal_bits=2,
impact=1))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "C")],
vertex_functions=[None, sympy.And, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #19
# 000, 110 and 111 are the steady states. First is stable, other can change on
# right vertex change, B with one step and C immediately.
# experiment #26
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=1,
impact=2 / 3.0))
# experiment #27
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=2,
impact=2 / 3.0))
# experiment #28
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=5,
maximal_bits=5,
impact=2 / 3.0))
relative_len_decider = lambda attractor: 0.5 if [
int(s) for s in attractor[0]] == [0, 0, 0] else 3 / float(8) if [
int(s) for s in attractor[0]] == [1, 1, 0] else 1 / float(8)
relative_basins = [relative_len_decider(att) for att in current_attractors]
# experiment #29
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=relative_basins,
max_transient_len=5,
maximal_bits=2,
impact=0.5))
# experiment #30
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=relative_basins,
max_transient_len=0,
maximal_bits=1,
impact=0.5))
G = graphs.Network(vertex_names=["A", "B", "C", "D"], edges=[("A", "B"), ("B", "C"), ("C", "D"),
("D", "D")],
vertex_functions=[None, sympy.And, sympy.And, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# Now 0000 is stable, 1110 changes immediently on last vertex change, 1111 can change in 2, 1, or 0
# steps on change of second, third or last vertex.
# experiment #31
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
maximal_bits=1,
impact=2 / 3.0))
# experiment #31
experiments.append(GraphStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=3,
maximal_bits=3,
impact=2 / 3.0))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "attractors:"
print experiment.current_attractors
print "n={}, relative_basins={}, expected_impacts={}".\
format(len(experiment.G.vertices), experiment.relative_basins, experiment.impact)
impact = graph_state_impact_score(G=experiment.G, current_attractors=experiment.current_attractors,
max_transient_len=experiment.max_transient_len,
relative_attractor_basin_sizes=experiment.relative_basins,
key_slice_size=15, maximal_bits_of_change=experiment.maximal_bits)
# (from vertex version) got numeric problems with test #16 regardless of key_slice
impact = round(impact, 5)
experiment_impact = round(experiment.impact, 5)
print "expected impact:"
print experiment_impact
print "got impact:"
print impact
try:
self.assertEqual(impact, experiment_impact)
except AssertionError as e:
print e
print experiment.G
raise e
def test_vertex_state_impact_scores(self):
# TODO: test stochastic kind
experiments = []
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #0
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=1,
impacts=[0]))
# experiment #1
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
impacts=[0]))
# experiment #2
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
impacts=[0]))
# experiment #3
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=[1],
max_transient_len=30,
impacts=[0]))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.Nand, None])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #4
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
impacts=[0, np.nan]))
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #5
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
impacts=[1]))
# experiment #6
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
impacts=[1]))
# experiment #7
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
impacts=[1]))
# experiment #8
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
max_transient_len=1,
impacts=[1]))
# experiment #9
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
max_transient_len=0,
impacts=[1]))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.And, None])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #10
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
impacts=[1, np.nan]))
# experiment #11
experiments.append(VertexStateImpactExperimentParameters(G=G,
current_attractors=current_attractors,
relative_basins=[0.1, 0.4, 0.4, 0.1],
max_transient_len=0,
impacts=[1, np.nan]))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #12
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
impacts=[1] * 3))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #13
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
impacts=[1, 1, 1]))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, lambda _: True])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #14
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
impacts=[0, 0, 0]))
G = graphs.Network(vertex_names=["A", "B", "C", "D"], edges=[("B", "A"), ("C", "A"), ("D", "A"),
("A", "B"), ("C", "B"), ("D", "B"),
("A", "C"), ("B", "C"), ("D", "C"),
("A", "D"), ("B", "D"), ("C", "D")],
vertex_functions=[lambda a, b, c: a + b + c > 1 for _ in range(4)])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #15
# 0000 and 1111 are stable points, and attract everything with hamming distance <= 1.
# Other three two-state attractors are unstable under one bit change, with transient length of 1.
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
impacts=[0] * 4))
# experiment #16
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=1,
impacts=[3 / 5.0] * 4))
# experiment #17
relative_basins = [5 / float(16) if len(attractor) == 1 else 2 / float(16) for
attractor in current_attractors]
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=relative_basins,
max_transient_len=1,
impacts=[6 / 16.0, 6 / 16.0,
6 / 16.0, 6 / 16.0]))
# experiment #18
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=relative_basins,
max_transient_len=2,
impacts=[6 / 16.0, 6 / 16.0,
6 / 16.0, 6 / 16.0]))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "C")],
vertex_functions=[None, sympy.And, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #19
# 000, 110 and 111 are the steady states. First is stable, other can change on
# right vertex change, B with one step and C immediately.
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
impacts=[np.nan, 0, 2 / 3.0]))
# experiment #20
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=1,
impacts=[np.nan, 1 / 3.0, 2/ 3.0]))
relative_len_decider = lambda attractor: 0.5 if [
int(s) for s in attractor[0]] == [0, 0, 0] else 3 / float(8) if [
int(s) for s in attractor[0]] == [1, 1, 0] else 1 / float(8)
relative_basins = [relative_len_decider(att) for att in current_attractors]
# experiment #21
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=relative_basins,
max_transient_len=1,
impacts=[np.nan, 1 / 8.0, 0.5]))
G = graphs.Network(vertex_names=["A", "B", "C", "D"], edges=[("A", "B"), ("B", "C"), ("C", "D"),
("D", "D")],
vertex_functions=[None, sympy.And, sympy.And, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# Now 0000 is stable, 1110 changes immediently on last vertex change, 1111 can change in 2, 1, or 0
# steps on change of second, third or last vertex.
# experiment #22
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=0,
impacts=[np.nan, 0, 0, 2 / float(3)]))
# experiment #23
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=1,
impacts=[np.nan, 0, 1 / float(3),
2 / float(3)]))
# experiment #24
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=2,
impacts=[np.nan, 1 / float(3), 1 / float(3),
2 / float(3)]))
# experiment #25
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=3,
impacts=[np.nan, 1 / float(3), 1 / float(3),
2 / float(3)]))
# experiment #26
experiments.append(VertexStateImpactExperimentParameters(G=G, current_attractors=current_attractors,
relative_basins=None,
max_transient_len=30,
impacts=[np.nan, 1 / float(3), 1 / float(3),
2 / float(3)]))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "attractors:"
print experiment.current_attractors
print "n={}, relative_basins={}, expected_impacts={}".\
format(len(experiment.G.vertices), experiment.relative_basins, experiment.impacts)
impacts = vertex_state_impact_scores(G=experiment.G, current_attractors=experiment.current_attractors,
max_transient_len=experiment.max_transient_len,
relative_attractor_basin_sizes=experiment.relative_basins,
key_slice_size=15)
# got numeric problems with test #16 regardless of key_slice
impacts = [round(x, 5) if not np.isnan(x) else x for x in impacts]
experiment_impacts = [round(x, 5) if not np.isnan(x) else x for x in experiment.impacts]
print "expected impacts:"
print impacts
print "got impacts:"
print experiment_impacts
try:
self.assertEqual(impacts, experiment_impacts)
except AssertionError as e:
print e
print experiment.G
raise e
def test_graph_model_impact_scores(self):
# TODO: also test the resulting models (assure they have the correct number of attractors)
experiments = []
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #0
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #1
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #2
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=2))
# experiment #3
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=1.5))
# experiment #4
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #5
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[1],
impact=1.5))
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #6
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impact=0.5))
# experiment #7
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impact=0.9))
# experiment #8
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Invalidation,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #9
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impact=0.75))
# experiment #10
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=0.5))
# experiment #11
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=0))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.And, None])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #12
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Invalidation,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #13
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[0.1, 0.4, 0.4, 0.1],
impact=0.75))
# experiment #14
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=0.5))
# experiment #15
experiments.append(GraphModelImpactExperimentParameters(G=G, T=3, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=0.25))
# experiment #16
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=0))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #17
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #18
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #19
experiments.append(GraphModelImpactExperimentParameters(G=G, T=6, P=5, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=2))
# experiment #20
experiments.append(GraphModelImpactExperimentParameters(G=G, T=6, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impact=1.25))
# experiment #21
experiments.append(GraphModelImpactExperimentParameters(G=G, T=6, P=5, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impact=1.5))
# experiment #22
experiments.append(GraphModelImpactExperimentParameters(G=G, T=6, P=5, impact_types=ImpactType.Addition,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impact=0.5))
# experiment #23
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=0.5))
# experiment #24
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=5, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #25
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impact=0.75))
# experiment #26
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #27
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=0.5))
# experiment #28
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=0.75))
# experiment #29
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Addition,
maximal_bits=3,
current_attractors=current_attractors,
relative_basins=None,
impact=0.5))
# experiment #30
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Addition,
maximal_bits=4,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #31
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=5, impact_types=ImpactType.Addition,
maximal_bits=4,
current_attractors=current_attractors,
relative_basins=None,
impact=0.5))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, lambda _: True])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #32
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #33
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=3, impact_types=ImpactType.Addition,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impact=3))
# experiment #34
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=6, impact_types=ImpactType.Addition,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impact=3))
# experiment #35
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=6, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=3))
# experiment #36
experiments.append(GraphModelImpactExperimentParameters(G=G, T=1, P=6, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impact=1))
# experiment #36
experiments.append(GraphModelImpactExperimentParameters(G=G, T=7, P=6, impact_types=ImpactType.Addition,
maximal_bits=3,
current_attractors=current_attractors,
relative_basins=None,
impact=4))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "n={}, T={}, P={}, maximal_bits={}, relative_basins={}, expected_impact={}".\
format(len(experiment.G.vertices),
experiment.T, experiment.P, experiment.maximal_bits, experiment.relative_basins,
experiment.impact)
print experiment.current_attractors
impact = graph_model_impact_score(G=experiment.G, current_attractors=experiment.current_attractors,
max_len=experiment.T,
max_num=experiment.P,
impact_types=experiment.impact_types,
relative_attractor_basin_sizes=experiment.relative_basins,
maximal_bits_of_change=experiment.maximal_bits)
try:
self.assertEqual(impact, experiment.impact)
except AssertionError as e:
print e
print experiment.G
raise e
def test_vertex_model_impact_scores(self):
# TODO: also test the resulting models (assure they have the correct number of attractors)
# TODO: test stochastic kind
experiments = []
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #0
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1]))
# experiment #1
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1]))
# experiment #2
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[2]))
# experiment #3
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1.5]))
# experiment #4
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1]))
# experiment #5
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[1],
impacts=[1.5]))
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #6
experiments.append(VertexModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0.5]))
# experiment #7
experiments.append(VertexModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impacts=[0.9]))
# experiment #8
experiments.append(VertexModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Invalidation,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1]))
# experiment #9
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impacts=[0.75]))
# experiment #10
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0.5]))
# experiment #11
experiments.append(VertexModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0]))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.And, None])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #12
experiments.append(VertexModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Invalidation,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1, np.nan]))
# experiment #13
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[0.1, 0.4, 0.4, 0.1],
impacts=[0.75, np.nan]))
# experiment #14
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=3, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0.5, np.nan]))
# experiment #15
experiments.append(VertexModelImpactExperimentParameters(G=G, T=3, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0.25, np.nan]))
# experiment #16
experiments.append(VertexModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0, np.nan]))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #17
experiments.append(VertexModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1] * 3))
# experiment #18
experiments.append(VertexModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1] * 3))
# experiment #19
experiments.append(VertexModelImpactExperimentParameters(G=G, T=6, P=5, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[2] * 3))
# experiment #20
experiments.append(VertexModelImpactExperimentParameters(G=G, T=6, P=3, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impacts=[1.25] * 3))
# experiment #21
experiments.append(VertexModelImpactExperimentParameters(G=G, T=6, P=5, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impacts=[1.5] * 3))
# experiment #22
experiments.append(VertexModelImpactExperimentParameters(G=G, T=6, P=2, impact_types=ImpactType.Addition,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0.5] * 3))
# experiment #23
experiments.append(VertexModelImpactExperimentParameters(G=G, T=1, P=1, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0.5] * 3))
# experiment #24
experiments.append(VertexModelImpactExperimentParameters(G=G, T=1, P=5, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1] * 3))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #25
experiments.append(VertexModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0.75, 0.75, 0.75]))
# experiment #26
experiments.append(VertexModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1, 1, 1]))
# experiment #27
experiments.append(VertexModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0.5, 0.5, 0.5]))
# experiment #28
experiments.append(VertexModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Both,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[0.75, 0.75, 0.75]))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, lambda _: True])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #29
experiments.append(VertexModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Invalidation,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1, 1, 1]))
# experiment #30
experiments.append(VertexModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Addition,
maximal_bits=1,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1, 1, 3]))
# experiment #31
experiments.append(VertexModelImpactExperimentParameters(G=G, T=7, P=5, impact_types=ImpactType.Addition,
maximal_bits=2,
current_attractors=current_attractors,
relative_basins=None,
impacts=[1, 1, 3]))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "n={}, T={}, P={}, maximal_bits={}, relative_basins={}, expected_impacts={}".\
format(len(experiment.G.vertices),
experiment.T, experiment.P, experiment.maximal_bits, experiment.relative_basins,
experiment.impacts)
print experiment.current_attractors
impacts = vertex_model_impact_scores(G=experiment.G, current_attractors=experiment.current_attractors,
max_len=experiment.T,
max_num=experiment.P,
impact_types=experiment.impact_types,
relative_attractor_basin_sizes=experiment.relative_basins,
maximal_bits_of_change=experiment.maximal_bits)
try:
self.assertEqual(impacts, experiment.impacts)
except AssertionError as e:
print e
print experiment.G
raise e
def test_stochastic_graph_state_impact_scores(self):
experiments = []
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
# experiment #0
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1, impact=0))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.Nand, None])
# experiment #1
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1, impact=0))
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
# experiment #2
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1, impact=1))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.And, None])
# experiment #3
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1, impact=1))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.Nand])
# experiment #4
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1, impact=0.5))
# experiment #5
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=2, impact=0.5))
# experiment #6
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=3, impact=0))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.And])
# experiment #7
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1, impact=1))
# experiment #8
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=2, impact=0.5))
# experiment #9
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=3, impact=1))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, lambda _: True])
# experiment #10
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1, impact=0))
# experiment #11
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=2, impact=0))
# experiment #12
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=3, impact=0))
G = graphs.Network(vertex_names=["A", "B", "C", "D"], edges=[("B", "A"), ("C", "A"), ("D", "A"),
("A", "B"), ("C", "B"), ("D", "B"),
("A", "C"), ("B", "C"), ("D", "C"),
("A", "D"), ("B", "D"), ("C", "D")],
vertex_functions=[lambda a, b, c: a + b + c > 1 for _ in range(4)])
# experiment #13
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1, impact=3 / 8.0))
# experiment #14
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=2, impact=1))
# experiment #15
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=3, impact=1))
# experiment #16
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=4, impact=10 / 16.0))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "C")],
vertex_functions=[None, sympy.And, sympy.And])
# 000, 110 and 111 are the steady states. First is stable, other can change on
# right vertex change, B with one step and C immediately.
# experiment #17
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1,
impact=(3 / 8.0 * 0) + (3 / 8.0 * 0.5) +
(1 / 8.0 * 0.5) + (1 / 8.0 * 0)))
# experiment #18
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=2, impact=1 / 16.0))
G = graphs.Network(vertex_names=["A", "B", "C", "D"], edges=[("A", "B"), ("B", "C"), ("C", "D"),
("D", "D")],
vertex_functions=[None, sympy.And, sympy.And, sympy.And])
# Now 0000 is stable, 1110 changes immediently on last vertex change, 1111 can change in 2, 1, or 0
# steps on change of second, third or last vertex.
# experiment #19
experiments.append(StochasticGraphStateImpactExperimentParameters(G=G, bits_of_change=1,
impact=0.20833333333))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "n={}, expected_impact={}".\
format(len(experiment.G.vertices), experiment.impact)
for iteration in range(10):
n_iter = random.randint(700, 1400)
parallel_n_jobs = random.choice([None, 1, 2, 3])
estimated_impact = stochastic_graph_state_impact_score(G=experiment.G, n_iter=n_iter,
bits_of_change=experiment.bits_of_change,
parallel_n_jobs=parallel_n_jobs)
print "estimated_impact={}".format(estimated_impact)
self.assertTrue(abs(estimated_impact - experiment.impact) < 0.1)
def test_stochastic_vertex_state_impact_scores(self):
experiments = []
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
# experiment #0
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G, impacts=[0]))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.Nand, None])
# experiment #1
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G, impacts=[0, np.nan]))
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
# experiment #2
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G, impacts=[1]))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.And, None])
# experiment #3
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G, impacts=[1, np.nan]))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.Nand])
# experiment #4
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G, impacts=[0.5] * 3))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.And])
# experiment #5
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G, impacts=[1, 1, 1]))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, lambda _: True])
# experiment #6
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G, impacts=[0, 0, 0]))
G = graphs.Network(vertex_names=["A", "B", "C", "D"], edges=[("B", "A"), ("C", "A"), ("D", "A"),
("A", "B"), ("C", "B"), ("D", "B"),
("A", "C"), ("B", "C"), ("D", "C"),
("A", "D"), ("B", "D"), ("C", "D")],
vertex_functions=[lambda a, b, c: a + b + c > 1 for _ in range(4)])
# experiment #7
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G, impacts=[3 / 8.0] * 4))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "C")],
vertex_functions=[None, sympy.And, sympy.And])
# experiment #8
# 000, 110 and 111 are the steady states. First is stable, other can change on
# right vertex change, B with one step and C immediately.
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G,
impacts=[np.nan, 1/8.0, 0.5]))
G = graphs.Network(vertex_names=["A", "B", "C", "D"], edges=[("A", "B"), ("B", "C"), ("C", "D"),
("D", "D")],
vertex_functions=[None, sympy.And, sympy.And, sympy.And])
# Now 0000 is stable, 1110 changes immediently on last vertex change, 1111 can change in 2, 1, or 0
# steps on change of second, third or last vertex.
# experiment #9
experiments.append(StochasticVertexStateImpactExperimentParameters(G=G,
impacts=[np.nan,
1/16.0, 1/16.0,
0.5]))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "n={}, expected_impacts={}".\
format(len(experiment.G.vertices), experiment.impacts)
for iteration in range(10):
n_iter = random.randint(700, 1400)
parallel_n_jobs = random.choice([None, 1, 2, 3])
estimated_impacts = stochastic_vertex_state_impact_scores(G=experiment.G, n_iter=n_iter,
parallel_n_jobs=parallel_n_jobs)
print "estimated_impacts={}".format(estimated_impacts)
self.assertTrue(len(experiment.impacts) == len(estimated_impacts))
for calculated_impact, estimated_impact in zip(experiment.impacts, estimated_impacts):
if np.isnan(calculated_impact):
self.assertTrue(np.isnan(estimated_impact))
else:
self.assertTrue(abs(estimated_impact - calculated_impact) < 0.1)
def test_stochastic_graph_model_impact_scores(self):
# TODO: also test the resulting models (assure they have the correct number of attractors)
experiments = []
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #0
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=1))
# experiment #1
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=1))
# experiment #2
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=1))
# experiment #3
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=2))
# experiment #4
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impact=1.5))
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #5
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=0.5))
# experiment #6
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impact_type=ImpactType.Invalidation,
impact=0.5))
# experiment #7
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=1))
# experiment #8
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=0))
# experiment #9
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=0.5))
# experiment #10
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impact=0.75))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.And, None])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #11
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=0.5))
# experiment #12
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=1))
# experiment #13
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=0.5))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #14
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=1))
# experiment #15
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=1))
# experiment #16
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=0.5))
# experiment #17
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=(3 / 15.0) * 2 + (12 / 15.0) * 0.5))
# experiment #18
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=3,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=0.5))
# experiment #19
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=4,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=(3 / 15.0) * 1 + (12 / 15.0) * 0.5))
# experiment #20
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impact=0.75))
# experiment #21
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impact=(3 / 15.0) * 1.5 + (12 / 15.0) * 0.75))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #22
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=3 / 4.0))
# experiment #23
basin_sizes = [3 / 8.0 if len(att) > 1 else 1 / 8.0 for att in current_attractors]
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=basin_sizes,
impact_type=ImpactType.Invalidation,
impact=7 / 8.0))
# experiment #24
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=(3 / 15.0) * 1 + (12 / 15.0) *
(0.5 * 3 / 4.0 + 0.5 * 1)))
# experiment #25
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=0))
# experiment #26
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=(3 / 15.0) * 0.5 + (12 / 15.0) *
(0.5 * 0 + 0.5 * 0.25)))
# experiment #27
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impact=7 / 16.0))
# experiment #28
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impact=(3 / 15.0) * 0.75 + (12 / 15.0) *
(0.5 * (3/8.0 + 0) + 0.5 * (3/8.0 + 0.125))))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, lambda _: True])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #29
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=0.5))
# experiment #30
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=(3 / 15.0) * 1 + (12 / 15.0) * 3 / 4.0))
# experiment #31
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=(2 / 3.0 * 0.5 + 1 / 3.0 * 2.5)))
# experiment #32
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impact=(2 / 3.0 * 0.5 + 1 / 3.0 * (
0.5 * 1.5 + 0.5 * 1.5))))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A"), ("B", "B")],
vertex_functions=[sympy.And, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #33
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=(1 / 3.0 * 0.5 + 2 / 3.0 * 0.25)))
# experiment #34
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impact_type=ImpactType.Invalidation,
impact=(1 / 3.0 * 0.5 + 2 / 3.0 * 0.25)))
# experiment #35
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impact=(1 / 15.0 * 1 +
6 / 15.0 * 3.5 / 6.0 +
8 / 15.0 * 5 / 8.0)))
# experiment #36
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=(1 / 3.0 * 0.25 +
2 / 3.0 * 1 / 8.0)))
# experiment #37
experiments.append(StochasticGraphModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impact=(1 / 15.0 * 0.5 +
6 / 15.0 * 1 / 4.0 +
8 / 15.0 * 2 * 0.5 / 8.0)))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "n={}, bits_of_change={}, relative_basins={}, impact_type={}, expected_impact={}".\
format(len(experiment.G.vertices),
experiment.bits_of_change, experiment.relative_basins, experiment.impact_type,
experiment.impact)
print experiment.current_attractors
for use_dubrova in [False, True]:
n_iter = random.randint(800, 880)
attractor_estimation_n_iter = random.randint(50, 55)
parallel_n_jobs = random.choice([None, 1, 2, 3])
estimated_impact = stochastic_graph_model_impact_score(
G=experiment.G, current_attractors=experiment.current_attractors, n_iter=n_iter, use_dubrova=use_dubrova,
bits_of_change=experiment.bits_of_change,
relative_attractor_basin_sizes=experiment.relative_basins,
attractor_estimation_n_iter=attractor_estimation_n_iter,
impact_type=experiment.impact_type,
cur_dubrova_path=dubrova_path,
parallel_n_jobs=parallel_n_jobs)
print "estimated_impact={}".format(estimated_impact)
print "expected_impacts={}".format(experiment.impact)
self.assertTrue(abs(estimated_impact - experiment.impact) < 0.15)
def test_stochastic_vertex_model_impact_scores(self):
# TODO: also test the resulting models (assure they have the correct number of attractors)
experiments = []
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #0
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[1]))
# experiment #1
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[1]))
# experiment #2
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,
bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[1]))
# experiment #3
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[2]))
# experiment #4
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,
bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impacts=[1.5]))
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #5
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[0.5]))
# experiment #6
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impact_type=ImpactType.Invalidation,
impacts=[0.5]))
# experiment #7
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[1]))
# experiment #8
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[0]))
# experiment #9
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[0.5]))
# experiment #10
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impacts=[0.75]))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[sympy.And, None])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #11
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[0.5, np.nan]))
# experiment #12
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[1, np.nan]))
# experiment #13
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[0.5, np.nan]))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.Nand])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #14
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[1] * 3))
# experiment #15
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[1] * 3))
# experiment #16
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[0.5] * 3))
# experiment #17
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[2] * 3))
# experiment #18
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impacts=[0.75] * 3))
# experiment #19
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G,bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impacts=[1.5] * 3))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #20
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[3 / 4.0] * 3))
# experiment #21
basin_sizes = [3 / 8.0 if len(att) > 1 else 1 / 8.0 for att in current_attractors]
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=basin_sizes,
impact_type=ImpactType.Invalidation,
impacts=[7 / 8.0] * 3))
# experiment #22
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[1, 1, 1]))
# experiment #23
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[0] * 3))
# experiment #24
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[0.5, 0.5, 0.5]))
# experiment #25
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impacts=[7 / 16.0] * 3))
# experiment #26
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impacts=[0.75] * 3))
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand, sympy.Nand, lambda _: True])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #27
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[0.5] * 3))
# experiment #28
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[1] * 3))
# experiment #29
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[0.5, 0.5, 2.5]))
# experiment #30
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[1, 1, 1]))
# experiment #31
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Both,
impacts=[0.5, 0.5, 1.5]))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A"), ("B", "B")],
vertex_functions=[sympy.And, sympy.And])
current_attractors = find_attractors_dubrova(G, dubrova_path, mutate_input_nodes=True)
# experiment #32
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[0.5, 0.25]))
# experiment #33
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=[0.1, 0.9],
impact_type=ImpactType.Invalidation,
impacts=[0.5, 0.25]))
# experiment #34
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Invalidation,
impacts=[1, 0.5]))
# experiment #35
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=1,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[0.25, 1 / 8.0]))
# experiment #36
experiments.append(StochasticVertexModelImpactExperimentParameters(G=G, bits_of_change=2,
current_attractors=current_attractors,
relative_basins=None,
impact_type=ImpactType.Addition,
impacts=[0.5, 1 / 4.0]))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "n={}, bits_of_change={}, relative_basins={}, impact_type={}, expected_impacts={}".\
format(len(experiment.G.vertices),
experiment.bits_of_change, experiment.relative_basins, experiment.impact_type,
experiment.impacts)
print experiment.current_attractors
for use_dubrova in [False, True]:
n_iter = random.randint(400, 440)
attractor_estimation_n_iter = random.randint(30, 35)
parallel_n_jobs = random.choice([None, 1, 2, 3])
estimated_impacts = stochastic_vertex_model_impact_scores(
G=experiment.G, current_attractors=experiment.current_attractors, n_iter=n_iter, use_dubrova=use_dubrova,
bits_of_change=experiment.bits_of_change,
relative_attractor_basin_sizes=experiment.relative_basins,
attractor_estimation_n_iter=attractor_estimation_n_iter,
impact_type=experiment.impact_type,
cur_dubrova_path=dubrova_path,
parallel_n_jobs=parallel_n_jobs)
self.assertTrue(len(experiment.impacts) == len(estimated_impacts))
print "estimated_impacts={}".format(estimated_impacts)
for calculated_impact, estimated_impact in zip(experiment.impacts, estimated_impacts):
if np.isnan(calculated_impact):
self.assertTrue(np.isnan(estimated_impact))
else:
self.assertTrue(abs(estimated_impact - calculated_impact) < 0.15)
def test_find_num_steady_states(self):
"""test on known toy models"""
# 0, 1
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 0)
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=True), 0)
G = graphs.Network(vertex_names=["A"], edges=[],
vertex_functions=[None])
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 2)
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=True), 2)
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 2)
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=True), 2)
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[sympy.Nand, sympy.And])
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 0)
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=True), 0)
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[sympy.Nand, sympy.Nand])
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 2)
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[lambda x: True, lambda x: False])
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 1)
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=True), 1)
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand]*3)
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 0)
G = graphs.Network(vertex_names=["A", "B", "C", "D"], edges=[("A", "B"), ("B", "C"), ("C", "D"), ("D", "A")],
vertex_functions=[sympy.Nand]*4)
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 2)
# acyclic, should have 2**#input_nodes attractors of length 1
G = graphs.Network(vertex_names=["v1", "v2", "v3", "v4", "v5", "v6"],
edges=[("v1", "v4"), ("v2", "v4"), ("v1", "v5"), ("v4", "v6")],
vertex_functions=[sympy.Nand]*6)
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 8)
G = graphs.Network(vertex_names=["A1", "B1", "B2", "C1", "C2"],
edges=[("A1", "A1"), ("B1", "B2"), ("B2", "B1"), ("C1", "C2"), ("C2", "C1")],
vertex_functions=[sympy.And]*5)
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand]*3)
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 0)
G = graphs.Network.parse_cnet("C:\\Users\\ariel\\Downloads\\Attractors - for Ariel"
"\\Attractors - for Ariel\\BNS_Dubrova_2011\\tcr.cnet")
self.assertEqual(find_num_steady_states(G, verbose=False, simplify_general_boolean=False), 8)
def test_find_attractors_dubrova(self):
experiments = []
"""test on known toy models"""
# 0, 1
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
experiments.append(DubrovaExperimentParameters(G=G, mutate=False, n_attractors=1))
experiments.append(DubrovaExperimentParameters(G=G, mutate=True, n_attractors=1))
# 2
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[logic.SymmetricThresholdFunction(signs=[-1], threshold=1)])
experiments.append(DubrovaExperimentParameters(G=G, mutate=False, n_attractors=1))
# 3, 4
G = graphs.Network(vertex_names=["A"], edges=[],
vertex_functions=[None])
experiments.append(DubrovaExperimentParameters(G=G, mutate=False, n_attractors=1))
experiments.append(DubrovaExperimentParameters(G=G, mutate=True, n_attractors=2))
# 5, 6
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[sympy.Nand, sympy.And])
experiments.append(DubrovaExperimentParameters(G=G, mutate=False, n_attractors=1))
experiments.append(DubrovaExperimentParameters(G=G, mutate=True, n_attractors=1))
# 7, 8
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[lambda x: True, lambda x: False])
experiments.append(DubrovaExperimentParameters(G=G, mutate=False, n_attractors=1))
experiments.append(DubrovaExperimentParameters(G=G, mutate=True, n_attractors=1))
# 9, 10
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "A"), ("C", "A")],
vertex_functions=[logic.SymmetricThresholdFunction.from_function(sympy.Nand, 2),
logic.SymmetricThresholdFunction.from_function(sympy.Nand, 1),
True])
experiments.append(DubrovaExperimentParameters(G=G, mutate=False, n_attractors=3))
experiments.append(DubrovaExperimentParameters(G=G, mutate=True, n_attractors=3))
# 11, 12
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "A"), ("C", "A")],
vertex_functions=[logic.SymmetricThresholdFunction.from_function(sympy.Nand, 2),
logic.SymmetricThresholdFunction.from_function(sympy.Nand, 1),
False])
experiments.append(DubrovaExperimentParameters(G=G, mutate=False, n_attractors=1))
experiments.append(DubrovaExperimentParameters(G=G, mutate=True, n_attractors=1))
# 13, 14
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "A"), ("C", "A")],
vertex_functions=[logic.SymmetricThresholdFunction.from_function(sympy.Nand, 2),
logic.SymmetricThresholdFunction.from_function(sympy.Nand, 1),
None])
experiments.append(DubrovaExperimentParameters(G=G, mutate=False, n_attractors=1))
experiments.append(DubrovaExperimentParameters(G=G, mutate=True, n_attractors=4))
# 15
G = graphs.Network.parse_cnet("C:\\Users\\ariel\\Downloads\\Attractors - for Ariel"
"\\Attractors - for Ariel\\BNS_Dubrova_2011\\tcr.cnet")
# G = graphs.Network.parse_cnet("C:\\Users\\ariel\\Downloads\\Attractors - for Ariel"
# "\\Attractors - for Ariel\\BNS_Dubrova_2011\\MAPK_large.cnet")
experiments.append(DubrovaExperimentParameters(G=G, mutate=False, n_attractors=9))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "n={}, mutate={}, expected_n_attractors={}".format(len(experiment.G.vertices),
experiment.mutate, experiment.n_attractors)
# continue
attractors = find_attractors_dubrova(G=experiment.G,
dubrova_path="../bns_dubrova.exe",
mutate_input_nodes=experiment.mutate)
n_attractors = len(attractors)
try:
self.assertEqual(n_attractors, experiment.n_attractors)
except AssertionError as e:
print e
print experiment.G
raise e
except Exception as e:
raise e
print "testing state order in attractor"
# TODO: expand? random graphs, compare ILP attractors with Dubrova's
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[sympy.And, sympy.Nand, True])
desired_attractor = [[0, 0, 1], [0, 1, 1], [1, 1, 1], [1, 0, 1]]
# repeat manually, (otherwise there's mutual dependence of tests).
possible_attractors = [desired_attractor[shift:] + desired_attractor[:shift] for shift in range(4)]
# print possible_attractors
found_attractors = find_attractors_dubrova(G, dubrova_path="../bns_dubrova.exe", mutate_input_nodes=True)
self.assertTrue(len(found_attractors) == 1)
found_attractor = [[int(v) for v in state] for state in found_attractors[0]]
# print found_attractor
self.assertTrue(any(found_attractor == possible_attractors[i] for i in range(len(possible_attractors))))
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[sympy.And, sympy.Nand])
desired_attractor = [[0, 0], [0, 1], [1, 1], [1, 0]]
# repeat manually, (otherwise there's mutual dependence of tests).
possible_attractors = [desired_attractor[shift:] + desired_attractor[:shift] for shift in range(4)]
# print possible_attractors
found_attractors = find_attractors_dubrova(G, dubrova_path="../bns_dubrova.exe", mutate_input_nodes=True)
self.assertTrue(len(found_attractors) == 1)
found_attractor = [[int(v) for v in state] for state in found_attractors[0]]
# print found_attractor
self.assertTrue(any(found_attractor == possible_attractor for possible_attractor in possible_attractors))
def test_find_attractors_enumerate(self):
experiments = []
"""test on known toy models"""
# 0, 1
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.Nand])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=0))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=None, n_attractors=1))
# 2, 3
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[logic.SymmetricThresholdFunction(signs=[-1], threshold=1)])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=0))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=None, n_attractors=1))
# 4, 5
G = graphs.Network(vertex_names=["A"], edges=[],
vertex_functions=[None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=None, n_attractors=2))
# 6, 7
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "A")],
vertex_functions=[logic.SymmetricThresholdFunction(signs=[-1], threshold=1),
None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=0))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=None, n_attractors=2))
# 8, 9
G = graphs.Network(vertex_names=["A"], edges=[("A", "A")],
vertex_functions=[sympy.And])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=3, P=None, n_attractors=2))
# 10, 11
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[sympy.Nand, sympy.And])
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=None, n_attractors=0))
experiments.append(ILPAttractorExperimentParameters(G=G, T=4, P=None, n_attractors=1))
# 12, 13, 14
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[sympy.Nand, sympy.Nand])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=None, n_attractors=3))
experiments.append(ILPAttractorExperimentParameters(G=G, T=15, P=None, n_attractors=3))
# 15, 16
G = graphs.Network(vertex_names=["A", "B"], edges=[("A", "B"), ("B", "A")],
vertex_functions=[lambda x: True, lambda x: False])
experiments.append(ILPAttractorExperimentParameters(G=G, T=4, P=None, n_attractors=1))
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=1))
# 17, 18, 19
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "A"), ("C", "A")],
vertex_functions=[logic.SymmetricThresholdFunction.from_function(sympy.Nand, 2),
logic.SymmetricThresholdFunction.from_function(sympy.Nand, 1),
logic.SymmetricThresholdFunction.from_function(sympy.Nand, 0)])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=3))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=None, n_attractors=4))
experiments.append(ILPAttractorExperimentParameters(G=G, T=3, P=None, n_attractors=4))
# 20
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "A"), ("C", "A")],
vertex_functions=[logic.SymmetricThresholdFunction.from_function(sympy.Nand, 2),
logic.SymmetricThresholdFunction.from_function(sympy.Nand, 1),
None])
experiments.append(ILPAttractorExperimentParameters(G=G, T=3, P=None, n_attractors=4))
# 21, 22, 23
G = graphs.Network(vertex_names=["A", "B", "C"], edges=[("A", "B"), ("B", "C"), ("C", "A")],
vertex_functions=[sympy.Nand]*3)
experiments.append(ILPAttractorExperimentParameters(G=G, T=6, P=None, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=10, P=None, n_attractors=2))
experiments.append(ILPAttractorExperimentParameters(G=G, T=5, P=None, n_attractors=1))
# 24, 25
# acyclic, should have 2**#input_nodes attractors of length 1
G = graphs.Network(vertex_names=["v1", "v2", "v3", "v4", "v5", "v6"],
edges=[("v1", "v4"), ("v2", "v4"), ("v1", "v5"), ("v4", "v6")],
vertex_functions=[sympy.Nand]*6)
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=8))
experiments.append(ILPAttractorExperimentParameters(G=G, T=6, P=None, n_attractors=8))
# 26, 27
G = graphs.Network(vertex_names=["A1", "B1", "B2", "C1", "C2"],
edges=[("A1", "A1"), ("B1", "B2"), ("B2", "B1"), ("C1", "C2"), ("C2", "C1")],
vertex_functions=[sympy.And]*5)
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=8))
experiments.append(ILPAttractorExperimentParameters(G=G, T=3, P=None, n_attractors=20)) # offsets!
# 28, 29
# a failed random graph added as a constant test
G = graphs.Network(
vertex_names=['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16',
'17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31',
'32', '33', '34'],
edges=[('1', '2'), ('2', '16'), ('3', '17'), ('5', '15'), ('6', '29'), ('7', '28'), ('8', '22'),
('9', '28'), ('10', '18'), ('11', '15'), ('12', '24'), ('13', '14'), ('15', '18'), ('16', '26'),
('17', '27'), ('18', '20'), ('19', '23'), ('20', '27'), ('23', '26'), ('24', '29'), ('25', '33'),
('26', '30'), ('27', '32'), ('28', '32'), ('30', '32'), ('31', '34'), ('32', '33'), ('33', '34')],
vertex_functions=[None, None, sympy.Nand, None, None, None, None, None, None, None, None, None, None, None,
sympy.Or, sympy.Nand,
sympy.Nand, sympy.Nand, sympy.Nand, None, sympy.Xor, None, sympy.And, sympy.Nand,
sympy.Xor, None, sympy.And, sympy.Nand, sympy.And, sympy.Xor, sympy.Or, None, sympy.Or,
sympy.And, sympy.And])
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=2**17))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=None, n_attractors=2**17))
# 30, 31, 32, 33
G = graphs.Network.parse_cnet("C:\\Users\\ariel\\Downloads\\Attractors - for Ariel"
"\\Attractors - for Ariel\\BNS_Dubrova_2011\\tcr.cnet")
experiments.append(ILPAttractorExperimentParameters(G=G, T=1, P=None, n_attractors=8))
experiments.append(ILPAttractorExperimentParameters(G=G, T=2, P=None, n_attractors=8))
experiments.append(ILPAttractorExperimentParameters(G=G, T=6, P=None, n_attractors=9))
experiments.append(ILPAttractorExperimentParameters(G=G, T=8, P=None, n_attractors=9))
print "number of experiments (with keys)={}".format(len(experiments))
for i, experiment in enumerate(experiments):
print "experiment #{}".format(i)
print "n={}, T={}, expected_n_attractors={}".format(len(experiment.G.vertices),
experiment.T, experiment.n_attractors)
# continue
simplify = bool(random.randint(0, 1))
key_slice_size = random.randint(1, 15)
print "key_slice_size={}".format(key_slice_size)
n_attractors = len(find_attractors_onestage_enumeration(G=experiment.G, max_len=experiment.T,
verbose=False,
simplify_general_boolean=simplify,
key_slice_size=key_slice_size))
try:
self.assertEqual(n_attractors, experiment.n_attractors)
except AssertionError as e:
print e
print experiment.G
raise e
except Exception as e:
raise e
# TODO: add dubrova v.s. ILP testing again.
|
arielbro/attractor_learning
|
testing/test_attractors.py
|
Python
|
mit
| 179,258
|
import sys
import os
import warnings
import ruamel.yaml as yaml
from fnmatch import fnmatch
__author__ = "Pymatgen Development Team"
__email__ ="pymatgen@googlegroups.com"
__maintainer__ = "Shyue Ping Ong"
__maintainer_email__ ="shyuep@gmail.com"
__version__ = "2019.7.2"
SETTINGS_FILE = os.path.join(os.path.expanduser("~"), ".pmgrc.yaml")
def _load_pmg_settings():
try:
with open(SETTINGS_FILE, "rt") as f:
d = yaml.safe_load(f)
except IOError:
# If there are any errors, default to using environment variables
# if present.
d = {}
for k, v in os.environ.items():
if k.startswith("PMG_"):
d[k] = v
elif k in ["VASP_PSP_DIR", "MAPI_KEY", "DEFAULT_FUNCTIONAL"]:
d["PMG_" + k] = v
return dict(d)
SETTINGS = _load_pmg_settings()
# Order of imports is important on some systems to avoid
# failures when loading shared libraries.
# import spglib
# from . import optimization, util
# del(spglib, optimization, util)
# Useful aliases for commonly used objects and modules.
# Allows from pymatgen import <class> for quick usage.
from pymatgen.core import *
from .electronic_structure.core import Spin, Orbital
from .ext.matproj import MPRester
from monty.json import MontyEncoder, MontyDecoder, MSONable
def get_structure_from_mp(formula):
"""
Convenience method to get a crystal from the Materials Project database via
the API. Requires PMG_MAPI_KEY to be set.
Args:
formula (str): A formula
Returns:
(Structure) The lowest energy structure in Materials Project with that
formula.
"""
m = MPRester()
entries = m.get_entries(formula, inc_structure="final")
if len(entries) == 0:
raise ValueError("No structure with formula %s in Materials Project!" %
formula)
elif len(entries) > 1:
warnings.warn("%d structures with formula %s found in Materials "
"Project. The lowest energy structure will be returned." %
(len(entries), formula))
return min(entries, key=lambda e: e.energy_per_atom).structure
if sys.version_info < (3, 5):
warnings.warn("""
Pymatgen will drop Py2k support from v2019.1.1. Pls consult the documentation
at https://www.pymatgen.org for more details.""")
def loadfn(fname):
"""
Convenience method to perform quick loading of data from a filename. The
type of object returned depends the file type.
Args:
fname (string): A filename.
Returns:
Note that fname is matched using unix-style, i.e., fnmatch.
(Structure) if *POSCAR*/*CONTCAR*/*.cif
(Vasprun) *vasprun*
(obj) if *json* (passthrough to monty.serialization.loadfn)
"""
if (fnmatch(fname, "*POSCAR*") or fnmatch(fname, "*CONTCAR*") or
".cif" in fname.lower()) or fnmatch(fname, "*.vasp"):
return Structure.from_file(fname)
elif fnmatch(fname, "*vasprun*"):
from pymatgen.io.vasp import Vasprun
return Vasprun(fname)
elif fnmatch(fname, "*.json*"):
from monty.serialization import loadfn
return loadfn(fname)
|
blondegeek/pymatgen
|
pymatgen/__init__.py
|
Python
|
mit
| 3,203
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import math
import re
from hub.formats import Format, Formatter
from hub.structures.file import File
from hub.structures.frame import OdhType
class InterlisModelFormat(Format):
name = 'INTERLIS1Model'
label = 'INTERLIS 1 Modell'
description = """
Modell für INTERLIS 1. Dies wird automatisch generiert aus den vorhandenen Daten und sollte von Hand korrigiert
werden
"""
extension = 'ili'
@classmethod
def is_format(cls, input_file, *args, **kwargs):
# ILI is a write-only format for the moment, so identifying it doesn't help us, really.
return False
class InterlisModelFormatter(Formatter):
targets = InterlisModelFormat,
@classmethod
def format(cls, dfs, name, format, *args, **kwargs):
tables = []
for df in dfs:
tables.append(Table(df.name, df))
model = Model(name, [Topic(name, tables)])
return [File.from_string(name + '.ili', model.get_model_definition()).file_group]
class Model(object):
def __init__(self, name, topics):
self.name = sanitize_name(name)
self.topics = topics
def get_model_definition(self):
result = 'TRANSFER {}; \n\n'.format(self.name)
result += '!! ACHTUNG: Dies ist ein automatisch generiertes Modell und sollte nicht ohne Anpassungen \n'
result += '!! verwendet werden.\n\n'
domain = {}
for topic in self.topics:
for table in topic.tables:
domain.update(table.domain)
if len(domain) > 0:
result += 'DOMAIN\n\n'
for k, v in domain.iteritems():
result += '\t{} = {};\n'.format(k, v)
result += '\nMODEL {}\n\n'.format(self.name)
for topic in self.topics:
result += topic.get_topic_definition()
result += '\nEND {}.\n\n'.format(self.name)
result += 'FORMAT FREE;\n\n'
result += '\nCODE\n\tBLANK = DEFAULT, UNDEFINED = DEFAULT, CONTINUE = DEFAULT;\n\t TID = ANY;\n\nEND.'
return result
class Topic(object):
def __init__(self, name, tables):
self.name = sanitize_name(name)
self.tables = tables
def get_topic_definition(self):
result = 'TOPIC {} = \n\n'.format(self.name)
for table in self.tables:
result += table.get_table_definition()
result += '\nEND {}.\n'.format(self.name)
return result
class Table(object):
def __init__(self, name, df):
self.name = sanitize_name(name)
self.df = df
self.fields, self.domain = self.get_fields()
def get_table_definition(self):
result = '\tTABLE {} = \n'.format(self.name)
for field in self.fields:
result += '\t\t{}: {};\n'.format(sanitize_name(field[0]), field[1])
result += '\tNO IDENT\n'
result += '\tEND {};\n'.format(self.name)
return result
def next_nines(self, x):
'''
results in the next series of 999...
'''
return int(10 ** (math.floor(math.log10(x) + 1)) - 1)
def get_bounds(self, name):
bounds = self.df[name].geom_op('bounds')
min = bounds.min()
max = bounds.max()
return [min.minx, min.miny, max.maxx, max.maxy]
def get_fields(self):
domain = {}
fields = []
for name in self.df.columns:
type = self.df[name].odh_type
ili_type = '!! Unbekannter Typ'
if type == OdhType.TEXT:
max_length = self.df[name].str.len().max() if self.df[name].any() else 10
ili_type = 'TEXT*{}'.format(int(max_length))
elif type in (OdhType.INTEGER, OdhType.BIGINT, OdhType.SMALLINT):
min = self.df[name].min()
min = -self.next_nines(-min) if min and min < 0 else 0
max = self.df[name].max()
max = self.next_nines(max) if max and max > 0 else 0
ili_type = '[{} .. {}]'.format(min, max)
elif type == OdhType.FLOAT:
max = self.df[name].max()
max = self.next_nines(max) if max and max > 0 else 0
ili_type = '[0.000 .. {}.999]'.format(max)
elif type == OdhType.BOOLEAN:
ili_type = 'BOOLEAN'
domain['BOOLEAN'] = '(True, False)'
elif type == OdhType.DATETIME:
ili_type = 'DATE' # actually, this can't include time in interlis. oh well.
else:
first_valid = self.df[name].first_valid_index()
if type == OdhType.GEOMETRY and first_valid is not None:
import shapely.geometry as shp
value = self.df[name][first_valid]
if isinstance(value, shp.Point):
ili_type = 'POINT'
domain['POINT'] = 'COORD2 {:.3f} {:.3f} {:.3f} {:.3f}'.format(*self.get_bounds(name))
elif isinstance(value, (shp.LineString, shp.LinearRing)):
ili_type = ('POLYLINE WITH (STRAIGHTS) '
'VERTEX COORD2 {:.3f} {:.3f} {:.3f} {:.3f} '
'WITHOUT OVERLAPS > 0.001').format(*self.get_bounds(name))
elif isinstance(value, shp.Polygon):
ili_type = ('AREA WITH (STRAIGHTS) '
'VERTEX COORD2 {:.3f} {:.3f} {:.3f} {:.3f} '
'WITHOUT OVERLAPS > 0.001').format(*self.get_bounds(name))
else:
ili_type = '!! Geometrie-Feld'
optional = 'OPTIONAL ' if self.df[name].isnull().any() else ''
fields.append((name, optional + ili_type))
return fields, domain
def sanitize_name(name):
sanitized = re.sub(r'[^A-Za-z0-9_\s]', '', name)
return ''.join([s.capitalize() for s in re.split(r'\s', sanitized.strip())])
|
hsr-ba-fs15-dat/opendatahub
|
src/main/python/hub/formats/interlis_model.py
|
Python
|
mit
| 6,028
|
import json
import os
import glob
import sys
import logging
from watson_developer_cloud import WatsonException
if '__file__' in globals():
sys.path.insert(0, os.path.join(os.path.abspath(__file__), 'scripts'))
else:
sys.path.insert(0, os.path.join(os.path.abspath(os.getcwd()), 'scripts'))
from discovery_setup_utils import ( # noqa
discovery,
curdir,
get_constants,
write_progress
)
# set the DATA_TYPE the same to what was downloaded
DATA_TYPE = 'travel'
# set the TRAINING_PATH to the location of the training data relative
# to the 'data' directory
# by default, evaluates to <DATA_TYPE>/training
TRAINING_PATH = os.path.join(DATA_TYPE, 'training')
DATA_DIRECTORY = os.path.abspath(os.path.join(curdir, '..', 'data'))
TRAINING_DIRECTORY = os.path.join(DATA_DIRECTORY, TRAINING_PATH)
LOG_FILE_PATH = os.path.join(DATA_DIRECTORY, 'training_upload.log')
logging.basicConfig(filename=LOG_FILE_PATH,
filemode='w',
format='%(asctime)s %(levelname)s: %(message)s',
level=logging.INFO)
def upload_training_doc(training_json, environment_id, collection_id):
try:
r = discovery.add_training_data_query(
environment_id=environment_id,
collection_id=collection_id,
natural_language_query=training_json['natural_language_query'],
examples=training_json['examples'])
logging.info("Response:\n%s", json.dumps(r, indent=4))
except WatsonException as exception:
logging.error(exception)
def upload_training_data(training_directory):
print("Training directory: %s" % training_directory)
files = glob.glob(os.path.join(training_directory, '*.json'))
total_files = len(files)
print("Number of files to process: %d" % total_files)
training_data_uploaded = 0
done_percent = 0
write_progress(training_data_uploaded, total_files)
for file in files:
with open(file, 'rb') as file_object:
logging.info("Processing file: %s", file_object.name)
upload_training_doc(
json.loads(file_object.read()),
discovery_constants['environment_id'],
discovery_constants['collection_id']['trained']
)
training_data_uploaded += 1
done_percent = write_progress(training_data_uploaded,
total_files,
done_percent)
logging.info("Finished uploading %d files", total_files)
print("\nFinished uploading %d files" % total_files)
print('Retrieving environment and collection constants...')
"""
retrieve the following:
{
environment_id: env_id,
collection_id: {
trained: trained_id
}
}
"""
discovery_constants = get_constants(
discovery,
trained_name=os.getenv(
'DISCOVERY_TRAINED_COLLECTION_NAME',
'knowledge_base_trained'
)
)
print('Constants retrieved!')
print(discovery_constants)
print("Log file located at: %s" % LOG_FILE_PATH)
upload_training_data(TRAINING_DIRECTORY)
|
watson-developer-cloud/discovery-starter-kit
|
notebooks/scripts/upload_training_data.py
|
Python
|
mit
| 3,238
|
row_key = "Rows"
column_key = "Cols"
special_keywords = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov',
'dec', 'january', 'february', 'march', 'april', 'may', 'june', 'july', 'august', 'september',
'october', 'november', 'december', 'q1', 'q2', 'q3', 'q4']
# cell classes
text_cell = "text"
number_cell = "number"
empty_cell = "empty"
date_cell = "date"
blank_cell = "blank"
row_orientation = "row"
column_orientation = "col"
def data_to_string(data):
return str(data)
|
usc-isi-i2/etk
|
etk/timeseries/annotation/utility.py
|
Python
|
mit
| 566
|
import sys
from PyQt4 import QtGui
from PyQt4 import QtCore
from PyQt4.QtGui import *
import os
class Window(QtGui.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.filename = None
self.initUI()
def initUI(self):
self.italic_flag = False
self.underline_flag = False
self.path = os.path.abspath(__file__)
self.icon_path = "/".join(self.path.split("/")[:-1]+["icons"])
self.exitclick = self.add_action("Exit", "Ctrl+Q",
"/".join([self.icon_path,"exit_icon.png"]),
qApp.quit)
self.newclick = self.add_action("New", "Ctrl+N",
"/".join([self.icon_path,"new_icon.png"]),
self.newfile)
self.openclick = self.add_action("Open", "Ctrl+O",
"/".join([self.icon_path,"open_icon.png"]),
self.openfile)
self.saveclick = self.add_action("Save", "Ctrl+S",
"/".join([self.icon_path,"save_icon.png"]),
self.savefile)
self.saveasclick = self.add_action("SaveAs", "Ctrl+Shift+S",
"/".join([self.icon_path,"save_as_icon.gif"]),
self.save_asfile)
self.copyclick = self.add_action("Copy", "Ctrl+C",
"/".join([self.icon_path,"copy_icon.png"]),
self.copy)
self.pasteclick = self.add_action("Paste", "Ctrl+V",
"/".join([self.icon_path,"paste_icon.jpg"]),
self.paste)
#self.printclick = self.add_action("Print", "Ctrl+P",
# "/".join([self.icon_path,"print_icon.jpg"]),
# self.printclick)
self.close_tab_click = self.add_action("Close", "Ctrl+W",
self,
self.close_tab)
self.italicclick = self.add_action("Italic", "Ctrl+I",
"/".join([self.icon_path,"italic_icon.png"]),
self.italic)
self.boldclick = self.add_action("Bold", "Ctrl+B",
"/".join([self.icon_path,"bold_icon.png"]),
self.bold)
self.underlineclick = self.add_action("Underline", "Ctrl+U",
"/".join([self.icon_path,"underline_icon.png"]),
self.underline)
tab = QTextEdit()
self.tab_widget = QTabWidget()
self.tab_widget.tabsClosable()
textEditf = QFont()
layout = QVBoxLayout(tab)
QtCore.QObject.connect(self.tab_widget,
QtCore.SIGNAL('tabCloseRequested(int)'),
self.close_tab)
self.setCentralWidget(self.tab_widget)
self.statusBar()
self.toolbar = self.addToolBar('New')
self.toolbar.addAction(self.newclick)
self.toolbar.addAction(self.saveclick)
self.toolbar.addAction(self.saveasclick)
self.toolbar.addAction(self.openclick)
self.toolbar.addAction(self.exitclick)
self.toolbar.addAction(self.copyclick)
self.toolbar.addAction(self.pasteclick)
self.toolbar.addAction(self.boldclick)
self.toolbar.addAction(self.italicclick)
self.toolbar.addAction(self.underlineclick)
menubar = self.menuBar()
fileMenu = menubar.addMenu('File')
fileMenu.addAction(self.newclick)
fileMenu.addAction(self.openclick)
fileMenu.addAction(self.saveclick)
fileMenu.addAction(self.saveasclick)
fileMenu.addAction(self.close_tab_click)
#fileMenu.addAction(printclick)
fileMenu.addAction(self.exitclick)
editMenu = menubar.addMenu('Edit')
editMenu.addAction(self.copyclick)
editMenu.addAction(self.pasteclick)
viewMenu = menubar.addMenu('View')
viewMenu.addAction(self.italicclick)
viewMenu.addAction(self.boldclick)
viewMenu.addAction(self.underlineclick)
self.showMaximized()
self.show()
def add_action(self, action_name, shortcut=None, icon_path=None, trigger_action=None ):
action = QAction(QIcon(icon_path), action_name, self)
action.setShortcut(shortcut)
action.setStatusTip(action_name)
action.triggered.connect(trigger_action)
return action
def keyReleaseEvent(self, e):
tab_index = self.tab_widget.currentIndex()
tabText = self.tab_widget.tabText(tab_index)
self.tab_widget.tabBar().setTabTextColor(tab_index,
QColor(255,0,0))
if tab_index < 0:
return
if tabText != "untitled*" and tabText[-1] != "*":
tabText = tabText+"*"
self.tab_widget.setTabText(tab_index,tabText)
def close_tab(self):
print "closing tab"
tab_index = self.tab_widget.currentIndex()
if tab_index < 0:
qApp.quit()
return
tabText = self.tab_widget.tabText(tab_index)
if tabText[-1] == "*":
msgBox = QMessageBox()
msgBox.setText("The document has been modified.")
msgBox.setInformativeText("Do you want to save your changes?")
msgBox.setStandardButtons(QMessageBox.Save | QMessageBox.Discard | QMessageBox.Cancel)
msgBox.setDefaultButton(QMessageBox.Save)
ret = msgBox.exec_()
if ret == QMessageBox.Save:
self.savefile()
self.close_tab()
elif ret == QMessageBox.Discard:
pass
elif ret == QMessageBox.Cancel:
return
self.tab_widget.removeTab(tab_index)
"""
def printfile(self):
#print_cmd = 'lp -d NetPrinter filename'
text=self.textEdit.toPlainText()
os.popen(str(text))
#self.textEdit.print_(os.printer)
"""
def italic(self):
italic_button = self.toolbar.widgetForAction(self.italicclick)
italic_icon = QIcon("/".join([self.icon_path,"italic_icon.png"]))
print self.italic_flag
if not self.italic_flag:
new_pixmap = italic_icon.pixmap(QtCore.QSize(20,20),QIcon.Disabled,QIcon.On)
else:
new_pixmap = italic_icon.pixmap(QtCore.QSize(20,20),QIcon.Active, QIcon.On)
new_icon = QIcon(new_pixmap)
italic_button.setIcon(new_icon)
tab_index = self.tab_widget.currentIndex()
textEdit = self.tab_widget.widget(tab_index)
if not textEdit:
return
textEdit.setFontItalic(not self.italic_flag)
self.italic_flag = not self.italic_flag
def bold(self):
bold_button = self.toolbar.widgetForAction(self.boldclick)
bold_icon = QIcon("/".join([self.icon_path,"bold_icon.png"]))
tab_index = self.tab_widget.currentIndex()
textEdit = self.tab_widget.widget(tab_index)
if not textEdit:
return
font_weight = textEdit.fontWeight()
if font_weight == 50:
new_pixmap = bold_icon.pixmap(QtCore.QSize(20,20),QIcon.Disabled,QIcon.On)
font_weight = 75
textEdit.setFontWeight(font_weight)
else:
new_pixmap = bold_icon.pixmap(QtCore.QSize(20,20),QIcon.Active, QIcon.On)
font_weight = 50
textEdit.setFontWeight(font_weight)
new_icon = QIcon(new_pixmap)
bold_button.setIcon(new_icon)
def underline(self):
tab_index = self.tab_widget.currentIndex()
textEdit = self.tab_widget.widget(tab_index)
if not textEdit:
return
if not self.underline_flag:
status = QIcon.Disabled
else:
status = QIcon.Active
textEdit.setFontUnderline(not self.underline_flag)
button = self.toolbar.widgetForAction(self.underlineclick)
icon = QIcon("/".join([self.icon_path,"underline_icon.png"]))
new_pixmap = icon.pixmap(QtCore.QSize(20,20),status,QIcon.On)
new_icon = QIcon(new_pixmap)
button.setIcon(new_icon)
self.underline_flag = not self.underline_flag
def copy(self):
tab_index = self.tab_widget.currentIndex()
if tab_index < 0:
return
textEdit = self.tab_widget.widget(tab_index)
textEdit.copy()
def paste(self):
tab_index = self.tab_widget.currentIndex()
if tab_index < 0:
return
textEdit = self.tab_widget.widget(tab_index)
textEdit.paste()
def savefile(self):
tab_index = self.tab_widget.currentIndex()
if tab_index < 0:
return
textEdit = self.tab_widget.widget(tab_index)
filename = self.tab_widget.tabText(tab_index)
if filename == "untitled*":
self.save_asfile()
return
if filename[-1] == "*":
filename = filename[:-1]
f=open(filename, 'w')
f.write(textEdit.toPlainText())
f.close()
self.tab_widget.setTabText(tab_index,filename)
self.tab_widget.tabBar().setTabTextColor(tab_index, QColor(0,0,0))
def save_asfile(self):
tab_index = self.tab_widget.currentIndex()
if tab_index < 0:
return
textEdit = self.tab_widget.widget(tab_index)
filename = QFileDialog.getSaveFileName(self,"Save File",os.getcwd())
print filename
f=open(filename, 'w')
f.write(textEdit.toPlainText())
f.close()
self.tab_widget.tabBar().setTabTextColor(tab_index, QColor(0,0,0))
self.tab_widget.setTabText(tab_index,filename.split("/")[-1])
def openfile(self):
filename = QFileDialog.getOpenFileName(self,"Open File",os.getcwd())
print filename
f=open(filename, 'r')
text=f.read()
f.close()
textEdit = QTextEdit()
textEdit.setText(text)
self.tab_widget.addTab(textEdit,filename.split("/")[-1])
tab_count = self.tab_widget.count()
tabbar = self.tab_widget.tabBar()
close_tab_click = QAction(QIcon("/".join([self.icon_path,"dialog-close.svg"])),"",self)
close_tab_click.triggered.connect(self.close_tab)
but = QToolButton()
but.setDefaultAction(close_tab_click)
self.tab_widget.tabBar().setTabButton(tab_count-1,QTabBar.RightSide,but)
self.tab_widget.tabsClosable()
self.show()
def newfile(self):
tab = QTextEdit()
layout = QVBoxLayout(tab)
self.tab_widget.addTab(tab,"untitled*")
tab_count = self.tab_widget.count()
tabbar = self.tab_widget.tabBar()
close_tab_click = QAction(QIcon("/".join([self.icon_path,"dialog-close.svg"])),"",self)
close_tab_click.triggered.connect(self.close_tab)
but = QToolButton()
but.setDefaultAction(close_tab_click)
self.tab_widget.tabBar().setTabButton(tab_count-1,QTabBar.RightSide,but)
self.tab_widget.tabsClosable()
self.show()
def closeEvent(self, event):
reply = QMessageBox.question(self,
'Message',
"Are you sure you want to quit?",
QMessageBox.Yes | QMessageBox.No,
QMessageBox.No)
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
def KudoEdit():
app = QApplication(sys.argv)
window = Window()
sys.exit(app.exec_())
#KudoEdit()
|
PyKudos/KudoEdit
|
KudoEdit/KudoEdit.py
|
Python
|
mit
| 12,273
|
from unittest import TestCase
from PyProjManCore.task import Task
class TestTaskOperationsExceptions(TestCase):
"""Test Exceptions for Task operations"""
def test_append_duplicate_prereq(self):
"""Test appending duplicate prerequisites to a task, it should be unique"""
root = Task("Root Task")
parent = Task("Parent Task")
root.append_prerequisite(parent)
root.append_prerequisite(parent)
self.assertNotEqual(2, len(root.prerequisites))
def test_cyclic_dependency(self):
"""
Test case of a cyclic dependency, i.e. a Task depends on itself,
or a task has both prerequisite and child the same
"""
self.fail("Not implemented ")
def test_append_duplicate_dep(self):
"""Test appending duplicate dependants to a task, it should be unique"""
root = Task("Root Task")
child = Task("Child Task")
root.append_dependant(child)
root.append_dependant(child)
self.assertNotEqual(2, len(root.dependants))
|
aawadall/PyProjMan
|
UnitTesting/test_task_exceptions.py
|
Python
|
mit
| 1,048
|
"""
https://leetcode.com/explore/interview/card/top-interview-questions-hard/116/array-and-strings/827/
"""
from unittest import TestCase
from kevin.leet.product_except_self import Solution, SolutionOptimized
class TestProductExceptSelf(TestCase):
def _base_test_product_except_self(self, nums, expected):
for sol_class in [Solution, SolutionOptimized]:
sol = sol_class()
actual = sol.product_except_self(nums)
assert expected == actual, (expected, actual)
def test_product_except_self_basic(self):
nums = [1, 2, 3, 4]
expected = [24, 12, 8, 6]
self._base_test_product_except_self(nums, expected)
|
kalyons11/kevin
|
kevin/tests/leet/test_product_except_self.py
|
Python
|
mit
| 678
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10a1 on 2016-06-21 09:08
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('discussions', '0004_post_author'),
]
operations = [
migrations.AlterField(
model_name='post',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
Udayraj123/dashboard_IITG
|
Binder/discussions/migrations/0005_auto_20160621_1438.py
|
Python
|
mit
| 572
|
##################################################################
# Copyright 2018 Open Source Geospatial Foundation and others #
# licensed under MIT, Please consult LICENSE.txt for details #
##################################################################
from pywps import Process
from pywps.inout import LiteralInput, LiteralOutput
from pywps.inout.literaltypes import ValuesReference
class SimpleProcess(Process):
identifier = "simpleprocess"
def __init__(self):
self.add_input(LiteralInput())
class UltimateQuestion(Process):
def __init__(self):
super(UltimateQuestion, self).__init__(
self._handler,
identifier='ultimate_question',
title='Ultimate Question',
outputs=[LiteralOutput('outvalue', 'Output Value', data_type='string')])
@staticmethod
def _handler(request, response):
response.outputs['outvalue'].data = '42'
return response
class Greeter(Process):
def __init__(self):
super(Greeter, self).__init__(
self.greeter,
identifier='greeter',
title='Greeter',
inputs=[LiteralInput('name', 'Input name', data_type='string')],
outputs=[LiteralOutput('message', 'Output message', data_type='string')]
)
@staticmethod
def greeter(request, response):
name = request.inputs['name'][0].data
assert type(name) is text_type
response.outputs['message'].data = "Hello {}!".format(name)
return response
class InOut(Process):
def __init__(self):
super(InOut, self).__init__(
self.inout,
identifier='inout',
title='In and Out',
inputs=[
LiteralInput('string', 'String', data_type='string'),
LiteralInput('time', 'Time', data_type='time',
default='12:00:00'),
LiteralInput('ref_value', 'Referenced Value', data_type='string',
allowed_values=ValuesReference(reference="https://en.wikipedia.org/w/api.php?action=opensearch&search=scotland&format=json"), # noqa
default='Scotland',),
],
outputs=[
LiteralOutput('string', 'Output', data_type='string')
]
)
@staticmethod
def inout(request, response):
a_string = request.inputs['string'][0].data
response.outputs['string'].data = "".format(a_string)
return response
|
bird-house/PyWPS
|
tests/processes/__init__.py
|
Python
|
mit
| 2,519
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "CMS.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
IEEEDTU/CMS
|
manage.py
|
Python
|
mit
| 246
|
from RGT.XML.SVG.basicSvgNode import BasicSvgNode
from RGT.XML.SVG.Attribs.conditionalProcessingAttributes import ConditionalProcessingAttributes
from RGT.XML.SVG.Attribs.xlinkAttributes import XlinkAttributes
from RGT.XML.SVG.Attribs.animationTimingAttributes import AnimationTimingAttributes
class BaseAnimationNode(BasicSvgNode):
ATTRIBUTE_EXTERNAL_RESOURCES_REQUIRED = 'externalResourcesRequired'
def __init__(self, ownerDoc, tagName):
BasicSvgNode.__init__(self, ownerDoc, tagName)
ConditionalProcessingAttributes.__init__(self)
XlinkAttributes.__init__(self)
AnimationTimingAttributes.__init__(self)
self._allowedSvgChildNodes.update(self.SVG_GROUP_DESCRIPTIVE_ELEMENTS)
def setExternalResourcesRequired(self, data):
allowedValues = ['true', 'false']
if data is not None:
if data not in allowedValues:
values = ''
for value in allowedValues:
values += value + ', '
values = values[0: len(values) - 2]
raise ValueError('Value not allowed, only ' + values + 'are allowed')
else:
self._setNodeAttribute(self.ATTRIBUTE_EXTERNAL_RESOURCES_REQUIRED, data)
def getExternalResourcesRequired(self):
node = self._getNodeAttribute(self.ATTRIBUTE_EXTERNAL_RESOURCES_REQUIRED)
if node is not None:
return node.nodeValue
return None
|
danrg/RGT-tool
|
src/RGT/XML/SVG/Animation/baseAnimationNode.py
|
Python
|
mit
| 1,499
|
from __future__ import unicode_literals
from django.apps import AppConfig
from django.db.models import signals
from django.contrib.auth import get_user_model
def populate_users(sender, **kwargs):
User = get_user_model()
for i in range(10):
username = "user_{}".format(i+1)
email = "{}@example.com".format(username)
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
user = User.objects.create_user(username=username,
email=email,
is_staff=True,
password="demo1234")
class PresenceConfig(AppConfig):
name = 'apps.presence'
def ready(self):
signals.post_migrate.connect(populate_users, sender=self)
|
nav/presence
|
presence/apps/presence/apps.py
|
Python
|
mit
| 836
|
import json
import numpy as np
import pytest
import requests
from mipframework.testutils import get_test_params
from tests import vm_url
from tests.algorithm_tests.test_logistic_regression import expected_file
headers = {"Content-type": "application/json", "Accept": "text/plain"}
url = vm_url + "LOGISTIC_REGRESSION"
@pytest.mark.parametrize(
"test_input, expected", get_test_params(expected_file, slice(95, 100))
)
def test_logistic_regression_algorithm_exareme(test_input, expected):
result = requests.post(url, data=json.dumps(test_input), headers=headers)
result = json.loads(result.text)
result = result["result"][0]["data"]
assert are_collinear(result["Coefficients"], expected["coeff"])
def are_collinear(u, v):
cosine_similarity = np.dot(v, u) / (np.sqrt(np.dot(v, v)) * np.sqrt(np.dot(u, u)))
return np.isclose(abs(cosine_similarity), 1, rtol=1e-5)
|
madgik/exareme
|
Exareme-Docker/src/mip-algorithms/tests/integration_tests/test_exareme_integration_logistic_regression.py
|
Python
|
mit
| 894
|
# To install another language:
# - update default_bridges below
# - update languages.cson in seamless/compiler
from ...highlevel.Environment import ContextEnvironment, Environment
from . import r
_default_bridges = {
"r": {
"code": r.bridge_r,
"params": r.default_bridge_parameters,
"environment": {
"which": ["R", "Rscript"],
"conda": """
dependencies:
- rpy2
""",
}
}
}
def load_py_bridges(env: ContextEnvironment):
for lang in _default_bridges:
b = _default_bridges[lang]
if env._py_bridges is None:
env._py_bridges = {}
env.set_py_bridge(lang, b["code"])
params = b.get("params")
if params is not None:
env.set_py_bridge_parameters(lang, params)
if "environment" in b:
e = b["environment"]
bridge_env = Environment()
if "conda" in e:
bridge_env.set_conda(e["conda"], format="yaml")
if "which" in e:
bridge_env.set_which(e["which"], format="plain")
env.set_py_bridge_environment(lang, bridge_env)
|
sjdv1982/seamless
|
seamless/metalevel/python_bridges/__init__.py
|
Python
|
mit
| 1,180
|
r"""
Utilities and helper classes/functions
======================================
This module contains two very important classes (Project and Workspace)
as well as a number of helper classes.
"""
import logging as logging
from .misc import *
from ._settings import *
from ._workspace import *
from ._project import *
# You can add info to the logger message by inserting the desired %(item)
# For a list of available items see:
# https://docs.python.org/3/library/logging.html#logrecord-attributes
# NOTE: If the calling locations appears as 'root' it's because the logger
# was not given a name in a file somewhere. A good option is __name__.
log_format = \
'-' * 60 + '\n\
%(levelname)-11s: %(message)s \n\
SOURCE : %(name)s.%(funcName)s \n\
TIME STAMP : %(asctime)s\
\n' + '-' * 60
logging.basicConfig(level=logging.WARNING, format=log_format)
del log_format
def _get_version():
from openpnm.__version__ import __version__
return __version__.strip(".dev0")
|
PMEAL/OpenPNM
|
openpnm/utils/__init__.py
|
Python
|
mit
| 983
|
import logging
from pajbot.apiwrappers.response_cache import ListSerializer
from pajbot.apiwrappers.twitch.base import BaseTwitchAPI
from pajbot.models.emote import Emote
log = logging.getLogger(__name__)
class TwitchKrakenV5API(BaseTwitchAPI):
authorization_header_prefix = "OAuth"
def __init__(self, client_credentials, redis):
super().__init__(base_url="https://api.twitch.tv/kraken/", redis=redis)
self.session.headers["Accept"] = "application/vnd.twitchtv.v5+json"
self.client_credentials = client_credentials
@property
def default_authorization(self):
return self.client_credentials
def get_stream_status(self, user_id):
data = self.get(["streams", user_id])
def rest_data_offline():
return {
"viewers": -1,
"game": None,
"title": None,
"created_at": None,
"followers": -1,
"views": -1,
"broadcast_id": None,
}
def rest_data_online():
stream = data["stream"]
return {
"viewers": stream["viewers"],
"game": stream["game"],
"title": stream["channel"]["status"],
"created_at": stream["created_at"],
"followers": stream["channel"]["followers"],
"views": stream["channel"]["views"],
"broadcast_id": stream["_id"],
}
online = "stream" in data and data["stream"] is not None
def rest_data():
nonlocal online
if online:
return rest_data_online()
else:
return rest_data_offline()
return {"online": online, **rest_data()}
def set_game(self, user_id, game, authorization):
self.put(["channels", user_id], json={"channel": {"game": game}}, authorization=authorization)
def set_title(self, user_id, title, authorization):
self.put(["channels", user_id], json={"channel": {"status": title}}, authorization=authorization)
def get_vod_videos(self, channel_name):
return self.get(["channels", channel_name, "videos"], {"broadcast_type": "archive"})
def fetch_global_emotes(self):
# circular import prevention
from pajbot.managers.emote import EmoteManager
resp = self.get("/chat/emoticon_images", params={"emotesets": "0"})
return [EmoteManager.twitch_emote(str(data["id"]), data["code"]) for data in resp["emoticon_sets"]["0"]]
def get_global_emotes(self, force_fetch=False):
return self.cache.cache_fetch_fn(
redis_key="api:twitch:kraken:v5:global-emotes",
fetch_fn=lambda: self.fetch_global_emotes(),
serializer=ListSerializer(Emote),
expiry=60 * 60,
force_fetch=force_fetch,
)
|
pajlada/tyggbot
|
pajbot/apiwrappers/twitch/kraken_v5.py
|
Python
|
mit
| 2,893
|
import os
from .input import VaspInput
__author__ = "Guillermo Avendano-Franco"
__copyright__ = "Copyright 2016"
__version__ = "0.1"
__maintainer__ = "Guillermo Avendano-Franco"
__email__ = "gtux.gaf@gmail.com"
__status__ = "Development"
__date__ = "May 13, 2016"
def read_incar(filename='INCAR'):
"""
Load the file INCAR in the directory 'path' or
read directly the file 'path' and return an object
'inputvars' for pychemia
:param filename: (str) Filename of a INCAR file format
:return:
"""
if os.path.isfile(filename):
filename = filename
elif os.path.isdir(filename) and os.path.isfile(filename + '/INCAR'):
filename += '/INCAR'
else:
raise ValueError('[ERROR] INCAR path not found: %s' % filename)
iv = VaspInput(filename=filename)
return iv
def write_incar(iv, filepath='INCAR'):
"""
Takes an object inputvars from pychemia and
save the file INCAR in the directory 'path' or
save the file 'path' as a VASP INCAR file
:param iv: (VaspInput) VASP Input variables
:param filepath: (str) File path to write the INCAR file
"""
if os.path.isdir(filepath):
filename = filepath + '/INCAR'
else:
filename = filepath
iv.write(filename)
|
MaterialsDiscovery/PyChemia
|
pychemia/code/vasp/incar.py
|
Python
|
mit
| 1,272
|
import _plotly_utils.basevalidators
class OpacitysrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="opacitysrc", parent_name="scattercarpet.marker", **kwargs
):
super(OpacitysrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/scattercarpet/marker/_opacitysrc.py
|
Python
|
mit
| 474
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('umibukela', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='SurveyType',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200)),
('description', models.TextField()),
],
),
migrations.AlterField(
model_name='partner',
name='context_statement',
field=models.TextField(),
),
migrations.AlterField(
model_name='partner',
name='intro_statement',
field=models.TextField(),
),
migrations.AddField(
model_name='cycleresultset',
name='survey_type',
field=models.ForeignKey(blank=True, to='umibukela.SurveyType', null=True),
),
]
|
Code4SA/umibukela
|
umibukela/migrations/0002_auto_20160120_1337.py
|
Python
|
mit
| 1,080
|
import os
from invoke import task
@task
def test():
os.system('coverage run --source tryagain -m py.test')
os.system('coverage report')
@task
def register(production=False):
target = 'pypi' if production else 'pypitest'
os.system('python3 setup.py register -r %s' % target)
@task
def upload(production=False):
target = 'pypi' if production else 'pypitest'
os.system('python3 setup.py bdist_wheel upload -r %s' % target)
|
tfeldmann/tryagain
|
tasks.py
|
Python
|
mit
| 450
|
# -*- coding:utf-8 -*-
import os
site_title = 'plum.J'
site_description = '\'s blog'
site_url = 'http://plumj.com'
static_url = 'static'
theme_name = 'sealscript'
google_analytics = ''
catsup_path = os.path.dirname(__file__)
posts_path = os.path.join(catsup_path, '_posts')
theme_path = os.path.join(catsup_path, 'themes', theme_name)
common_template_path = os.path.join(catsup_path, 'template')
deploy_path = os.path.join(catsup_path, 'deploy')
twitter = '_plumJ'
weibo = 'dobbyfree'
github = 'plumJ'
disqus_shortname = 'catsup'
feed = 'feed.xml'
post_per_page = 3
links = (
('Leonbb', 'http://leonbb.com', "Leonbb's Blog"),
)
if site_url.endswith('/'):
site_url = site_url[:-1]
if static_url.endswith('/'):
static_url = static_url[:-1]
settings = dict(static_path=os.path.join(theme_path, 'static'),
template_path=os.path.join(theme_path, 'template'),
gzip=True,
site_title=site_title,
site_description=site_description,
site_url=site_url,
twitter=twitter,
weibo=weibo,
github=github,
feed=feed,
post_per_page=post_per_page,
disqus_shortname=disqus_shortname,
links=links,
static_url=static_url,
google_analytics=google_analytics,
)
|
plumJ/catsup
|
config.py
|
Python
|
mit
| 1,211
|
from kaneda.backends import LoggerBackend, ElasticsearchBackend
from kaneda.queues import CeleryQueue
from django_kaneda import settings # NOQA
class TestDjango(object):
def test_django_kaneda_with_backend(self, mocker, django_settings_backend):
mocker.patch('django_kaneda.settings', django_settings_backend)
from django_kaneda import LazyMetrics
metrics = LazyMetrics()
assert isinstance(metrics.backend, ElasticsearchBackend)
result = metrics.gauge('test_gauge', 42)
assert result
def test_django_kaneda_with_debug(self, mocker, django_settings_debug):
mocker.patch('django_kaneda.settings', django_settings_debug)
from django_kaneda import LazyMetrics
metrics = LazyMetrics()
metrics.gauge('test_gauge', 42)
assert isinstance(metrics.backend, LoggerBackend)
def test_django_kaneda_with_queue(self, mocker, django_settings_queue):
mocker.patch('django_kaneda.settings', django_settings_queue)
from django_kaneda import LazyMetrics
metrics = LazyMetrics()
assert isinstance(metrics.queue, CeleryQueue)
result = metrics.gauge('test_gauge', 42)
assert result
|
APSL/kaneda
|
tests/integration/django/test_django.py
|
Python
|
mit
| 1,213
|
# coding: utf-8
"""
/properties/
/properties/:id/
/properties/groups/
/properties/groups/:id/
/properties/groups/:name/
"""
from django.test import TestCase
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from ..models import Property, PropertyGroup
from ..defaults import PROPERTY_TEXT_FIELD
class TestPropertiesCRUD(APITestCase):
fixtures = ['erp_test/tests/fixtures/properties_crud.json',]
def test_property_list(self):
url = reverse('api:property-list')
response = self.client.get(url, format='json')
data = [{'id': obj.id, 'name': obj.name, 'title': obj.title,
'required': obj.required, 'position': obj.position,
'type': obj.type, 'unit': obj.unit
} for obj in Property.objects.all()]
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, data)
def test_property_create(self):
url = reverse('api:property-list')
data = {'name': 'test', 'title': 'Test', 'required': False,
'position': 999, 'type': PROPERTY_TEXT_FIELD}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_server_update(self):
url = reverse('api:property-detail', args=[1])
data = {'name': 'new server'}
response = self.client.patch(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_server_delete(self):
url = reverse('api:property-detail', args=[1])
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
class TestPropertyGroupCRUD(APITestCase):
fixtures = ['erp_test/tests/fixtures/property_groups.json',]
def test_property_group_list(self):
url = reverse('api:property-group-list')
data = [{'id': obj.id, 'name': obj.name}
for obj in PropertyGroup.objects.all()]
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, data)
def test_property_group_detail_by_pk(self):
url = reverse('api:property-group-detail', args=[2])
data = {'id': 2, 'name': 'cpu',
'properties': [
{'id': 2, 'name': 'cpu.socket',
'title': 'CPU Socket', 'required': True,
'position': 2, 'type': 3, 'unit': ''},
]}
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, data)
def test_property_group_detail_by_name(self):
url = reverse('api:property-group-detail-by_name', args=['cpu'])
data = {'id': 2, 'name': 'cpu',
'properties': [
{'id': 2, 'name': 'cpu.socket',
'title': 'CPU Socket', 'required': True,
'position': 2, 'type': 3, 'unit': ''},
]}
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, data)
|
baffolobill/mb_test_1
|
src/mbtest1/erp_test/tests/test_properties.py
|
Python
|
mit
| 3,376
|
# -*- coding: utf-8 -*-
""" Forms for the votes application. """
# standard library
# django
from django import forms
# models
from .models import Vote
# views
from base.forms import BaseModelForm
class VoteForm(BaseModelForm):
"""
Form Vote model.
"""
score = forms.IntegerField(required=True)
class Meta:
model = Vote
widgets = {
'user': forms.HiddenInput(),
'card': forms.HiddenInput(),
}
exclude = ()
|
Angoreher/xcero
|
votes/forms.py
|
Python
|
mit
| 488
|
'''
Usage:
manager puzzle_load (--file <filename>) [--url <url>]
manager puzzle_del (--name <name>) [--url <url>]
manager puzzles [--url <url>]
manager puzzleboards_clear [--url <url>]
manager puzzleboard_consume [--async-url <url>] (--name <name>) [--size <size>]
manager puzzleboard_pop (--name <name>) [--url <url>]
Options:
--async-url <url> The url to the async function endpoint [default: http://localhost:8080/async-function/huntwordsapi]
--url <url> The url to the function [default: http://localhost:8080/function/huntwordsapi]
--file <filename> The filename from which to read the words; one per line
--name <name> The puzzle name to give the dictionary of words
--size <size> The length of a side of the grid on which to place words [default: 15]
-h, --help Print this help text and exit
--version Print the version and exit
'''
from docopt import docopt
from .commands_puzzleboard import command_puzzleboards_clear, command_puzzleboard_consume, command_puzzleboard_pop
from .commands_puzzle import command_puzzle_load, command_puzzles
# Command pattern
verbs = {
'puzzle_load': command_puzzle_load,
'puzzles': command_puzzles,
'puzzleboards_clear': command_puzzleboards_clear,
'puzzleboard_consume': command_puzzleboard_consume,
'puzzleboard_pop': command_puzzleboard_pop
}
if __name__ == '__main__':
opts = docopt(__doc__, version='0.1')
command = [v for k, v in verbs.items() if opts[k]][0]
command(**opts)
|
klmcwhirter/huntwords
|
manager/__main__.py
|
Python
|
mit
| 1,563
|
__author__ = 'Jason Mehring'
#
# This module is only used with WingIDE debugger for testing code within
# The debugging environment
#
# Stage 1: bind /srv/...modules to cache/extmods. No sync will take place
# since files are bound
# BIND
# True : bind custom modules
# False : do not bind custom modules, but will attempt umounting then exit
# None : do not bind custom modules, and do not attempt umounting
BIND = None
import os
import sys
import shutil
import subprocess
import logging
if BIND is not None:
import salt.config
import salt.fileclient
import salt.fileserver
import salt.loader
import salt.modules.saltutil
import salt.pillar
try:
from subprocess import DEVNULL # py3k
except ImportError:
import os
DEVNULL = open(os.devnull, 'wb')
from salt.modules.saltutil import (
_get_top_file_envs, _listdir_recursively, _list_emptydirs
)
from salt.ext.six import string_types
# Enable logging
log = logging.getLogger(__name__)
BASE_DIR = os.getcwd()
# Set salt pillar, grains and opts settings so they can be applied to modules
__opts__ = salt.config.minion_config('/etc/salt/minion')
__opts__['grains'] = salt.loader.grains(__opts__)
pillar = salt.pillar.get_pillar(
__opts__,
__opts__['grains'],
__opts__['id'],
__opts__['environment'],
)
__opts__['pillar'] = pillar.compile_pillar()
__salt__ = salt.loader.minion_mods(__opts__)
__grains__ = __opts__['grains']
__pillar__ = __opts__['pillar']
__context__ = {}
salt.modules.saltutil.__opts__ = __opts__
salt.modules.saltutil.__grains__ = __grains__
salt.modules.saltutil.__pillar__ = __pillar__
salt.modules.saltutil.__salt__ = __salt__
salt.modules.saltutil.__context__ = __context__
from salt.scripts import salt_call
def _bind(form, saltenv=None, umount=False):
'''
Bind the files in salt extmods directory within the given environment
'''
if saltenv is None:
saltenv = _get_top_file_envs()
if isinstance(saltenv, string_types):
saltenv = saltenv.split(',')
ret = []
remote = set()
source = os.path.join('salt://_{0}'.format(form))
mod_dir = os.path.join(__opts__['extension_modules'], '{0}'.format(form))
if not os.path.isdir(mod_dir):
log.info('Creating module dir {0!r}'.format(mod_dir))
try:
os.makedirs(mod_dir)
except (IOError, OSError):
msg = 'Cannot create cache module directory {0}. Check permissions.'
log.error(msg.format(mod_dir))
for sub_env in saltenv:
log.info('Syncing {0} for environment {1!r}'.format(form, sub_env))
cache = []
log.info('Loading cache from {0}, for {1})'.format(source, sub_env))
# Grab only the desired files (.py, .pyx, .so)
cache.extend(
__salt__['cp.cache_dir'](
source, sub_env, include_pat=r'E@\.(pyx?|so)$'
)
)
local_cache_base_dir = os.path.join(
__opts__['cachedir'],
'files',
sub_env
)
log.debug('Local cache base dir: {0!r}'.format(local_cache_base_dir))
local_cache_dir = os.path.join(local_cache_base_dir, '_{0}'.format(form))
log.debug('Local cache dir: {0!r}'.format(local_cache_dir))
client = salt.fileclient.get_file_client(__opts__)
fileserver = salt.fileserver.Fileserver(__opts__)
for fn_ in cache:
relpath = os.path.relpath(fn_, local_cache_dir)
relname = os.path.splitext(relpath)[0].replace(os.sep, '.')
saltpath = os.path.relpath(fn_, local_cache_base_dir)
filenamed = fileserver.find_file(saltpath, sub_env)
remote.add(relpath)
dest = os.path.join(mod_dir, relpath)
if not os.path.isfile(dest):
dest_dir = os.path.dirname(dest)
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
shutil.copyfile(fn_, dest)
ret.append('{0}.{1}'.format(form, relname))
# Test to see if already mounted (bound)
cmd = ['findmnt', dest]
proc = subprocess.Popen(cmd, stdout=DEVNULL, stderr=subprocess.STDOUT)
proc.wait()
if proc.returncode:
cmd = ['mount', '--bind', filenamed['path'], dest]
proc = subprocess.Popen(cmd, stdout=DEVNULL, stderr=subprocess.STDOUT)
proc.wait()
elif umount:
cmd = ['umount', dest]
proc = subprocess.Popen(cmd, stdout=DEVNULL, stderr=subprocess.STDOUT)
proc.wait()
touched = bool(ret)
if __opts__.get('clean_dynamic_modules', True):
current = set(_listdir_recursively(mod_dir))
for fn_ in current - remote:
full = os.path.join(mod_dir, fn_)
if os.path.ismount(full):
proc = subprocess.Popen(['umount', full])
proc.wait()
if os.path.isfile(full):
touched = True
try:
os.remove(full)
except OSError: pass
# Cleanup empty dirs
while True:
emptydirs = _list_emptydirs(mod_dir)
if not emptydirs:
break
for emptydir in emptydirs:
touched = True
shutil.rmtree(emptydir, ignore_errors=True)
# Dest mod_dir is touched? trigger reload if requested
if touched:
mod_file = os.path.join(__opts__['cachedir'], 'module_refresh')
with salt.utils.fopen(mod_file, 'a+') as ofile:
ofile.write('')
return ret
def bind_dirs(umount):
_bind('beacons', umount=umount)
_bind('modules', umount=umount)
_bind('states', umount=umount)
_bind('grains', umount=umount)
_bind('renderers', umount=umount)
_bind('returners', umount=umount)
_bind('outputters', umount=umount)
_bind('utils', umount=umount)
if __name__ == '__main__':
argv = sys.argv
def join_path(basepath, paths):
return [os.path.join(basepath, path) for path in paths]
if BIND or BIND is False:
umount = not BIND
path = BASE_DIR.split(os.sep)
srv_dir = '/srv'
if srv_dir.lstrip(os.sep) in path:
index = BASE_DIR.index(srv_dir)
basepath = os.sep.join(path[:6+1])
cur_dirs = join_path(basepath, os.listdir(basepath))
srv_dirs = join_path(srv_dir, os.listdir(srv_dir))
for path in cur_dirs:
if path not in srv_dirs:
basename = os.path.basename(path)
if basename in os.listdir(srv_dir):
dest = os.path.join(srv_dir, basename)
# Test to see if already mounted (bound)
cmd = ['findmnt', dest]
proc = subprocess.Popen(cmd, stdout=DEVNULL, stderr=subprocess.STDOUT)
proc.wait()
if proc.returncode:
print 'mounting:', path, dest
cmd = ['mount', '--bind', path, dest]
proc = subprocess.Popen(cmd, stdout=DEVNULL, stderr=subprocess.STDOUT)
proc.wait()
elif umount:
cmd = ['umount', dest]
proc = subprocess.Popen(cmd, stdout=DEVNULL, stderr=subprocess.STDOUT)
proc.wait()
# Bind custom modules
bind_dirs(umount)
if not BIND:
sys.exit()
salt_call()
|
DockerNAS/yamlscript-formula
|
src/salt-call.py
|
Python
|
mit
| 7,810
|
import json, codecs, re
from abc import ABCMeta, abstractmethod
from PIL import Image, ExifTags
from witica.util import throw, sstr, suni
#regular expressions regarding item ids
RE_METAFILE = r'^meta\/[^\n]+$'
RE_FIRST_ITEMID = r'(?!meta\/)[^\n?@.]+'
RE_ITEMFILE_EXTENSION = r'[^\n?@\/]+'
RE_ITEMID = r'^' + RE_FIRST_ITEMID + '$'
RE_ITEMFILE = r'^' + RE_FIRST_ITEMID + '\.' + RE_ITEMFILE_EXTENSION + '$'
RE_ITEM_SPLIT_ITEMID_EXTENSION = r'^(' + RE_FIRST_ITEMID + ')\.(' + RE_ITEMFILE_EXTENSION + ')$'
RE_ITEM_REFERENCE = r'^!(?:.\/)?' + RE_FIRST_ITEMID + '$'
#regular expressions to be used for md files parsing
RE_MD_SPLIT_JSON_MD = "^\s*({[\s\S]*?})?[\s]*([^}\s][\s\S]*)$" #splits md file into the json metadata and markdown sections as caputre groups
RE_MD_SPLIT_TITLE_BODY = "^(?:#(?!#)[\t ]*([\S][^\n\r]*)(?:\n|\r\n?|$))?([\s\S]*)$" #splits markdown section into title and body sections as capture groups
RE_MD_NOBRACKET = r'[^\]\[]*'
RE_MD_BRK = ( r'\[('
+ (RE_MD_NOBRACKET + r'(\[')*6
+ (RE_MD_NOBRACKET+ r'\])*')*6
+ RE_MD_NOBRACKET + r')\]' )
RE_MD_IMAGE_LINK = r'\!' + RE_MD_BRK + r'\s*\((?!\!)(<.*?>|([^")]+"[^"]*"|[^\)]*))\)'
#  or 
#RE_MD_ITEM_LINK = r'\!' + RE_MD_BRK + r'\s*\(\!(<.*?>|([^")]+"[^"]*"|[^\)]*))\)'
#  or 
RE_MD_ITEM_LINK = r'!({[\s\S]*?})?\((![\s\S]+?)\)'
# !{renderparametersjson}(!itemid)
registered_extractors = [];
def register(extension, extractor):
"""Register new metadata extractor for file extension"""
for (ext,extr) in registered_extractors:
if extension == ext:
raise ValueError("A metadata extractor for extension '" + extension + "' is already registered.")
#TODO: check type of extractor
registered_extractors.append((extension,extractor))
#print("registered: " + extension + " " + sstr(extractor))
def register_default_extractors():
register("item", JSONExtractor)
register("json", JSONExtractor)
register("md", MDExtractor)
register("txt", MDExtractor)
register("jpg", ImageExtractor)
register("jpeg", ImageExtractor)
def is_supported(extension):
for (ext,extractor) in registered_extractors:
if extension == ext:
return True
return False
def extract_metadata(filename):
extension = filename.rpartition(".")[2]
for (ext,extractor) in registered_extractors:
if extension == ext:
return extractor().extract_metadata(filename)
raise ValueError("Could not extract metadata, because a metadata extractor for extension '" + extension + "' is not registered.")
class MetadataExtractor(object):
__metaclass__ = ABCMeta
"""Abstract class representing a metadata extractor"""
supported_extensions = [];
def __init__(self):
pass
@abstractmethod
def extract_metadata(self, filename):
"""Extract metadata from filename and return metadata as json"""
pass
class JSONExtractor(MetadataExtractor):
__metaclass__ = ABCMeta
"""Extracts metadata from item or json file"""
supported_extensions = ["item", "json"];
def __init__(self):
pass
def extract_metadata(self, filename):
"""Extract metadata from filename and return metadata as json"""
f = codecs.open(filename, mode="r", encoding="utf-8")
return json.loads(f.read())
class MDExtractor(MetadataExtractor):
__metaclass__ = ABCMeta
"""Extracts metadata from markdown file"""
supported_extensions = ["md", "txt"];
def __init__(self):
pass
def extract_metadata(self, filename):
try:
meta = {}
#split into json and markdown part
f = codecs.open(filename, mode="r", encoding="utf-8")
match = re.match(RE_MD_SPLIT_JSON_MD,f.read())
f.close()
if not match:
raise IOError("Extracting metadata from file '" + sstr(filename) + "' failed. Could not split JSON and markdown parts.")
jsonstr, mdstr = match.groups()
#get title string (first heading in markdown string) if available
title = re.match(RE_MD_SPLIT_TITLE_BODY,mdstr).group(1)
if not title == None:
meta["title"] = title
#update with explicit json
if not jsonstr == None:
meta.update(json.loads(jsonstr))
return meta
except Exception, e:
throw(IOError, "Extracting metadata from file '" + sstr(filename) + "' failed.", e)
class ImageExtractor(MetadataExtractor):
__metaclass__ = ABCMeta
"""Extracts metadata from markdown file"""
supported_extensions = ["jpg", "jpeg"];
def __init__(self):
pass
def extract_metadata(self, filename):
try:
meta = {"type": "image"}
img = Image.open(filename)
exif = {
ExifTags.TAGS[k]: v
for k, v in img._getexif().items()
if k in ExifTags.TAGS
}
if ("ImageDescription" in exif or "UserComment" in exif):
if "UserComment" in exif:
meta["title"] = exif["UserComment"]
if "ImageDescription" in exif:
meta["title"] = exif["ImageDescription"]
if ("Make" in exif or "Model" in exif):
meta["camera"] = (exif["Make"] if "Make" in exif else "") + " " + (exif["Model"] if "Model" in exif else "")
if ("Orientation" in exif):
meta["orientation"] = exif["Orientation"]
if ("Artist" in exif):
meta["author"] = exif["Artist"]
if ("DateTimeOriginal" in exif):
meta["created"] = exif["DateTimeOriginal"] #TODO: convert to unix time
if ("Flash" in exif):
meta["flash"] = exif["Flash"]
if ("GPSInfo" in exif):
lat, lon = self.get_lat_lon(exif["GPSInfo"])
if lat and lon:
meta["lat"] = lat
meta["lon"] = lon
return meta
except Exception, e:
throw(IOError, "Extracting metadata from file '" + sstr(filename) + "' failed.", e)
# This remaining functions in the ImageExtracotr class are originally by Eran Sandler (MIT-license), see https://gist.github.com/erans/983821
def _get_if_exist(self, data, key):
if key in data:
return data[key]
return None
def _convert_to_degress(self, value):
"""Helper function to convert the GPS coordinates stored in the EXIF to degress in float format"""
d0 = value[0][0]
d1 = value[0][1]
d = float(d0) / float(d1)
m0 = value[1][0]
m1 = value[1][1]
m = float(m0) / float(m1)
s0 = value[2][0]
s1 = value[2][1]
s = float(s0) / float(s1)
return d + (m / 60.0) + (s / 3600.0)
def get_lat_lon(self, gps_info_exif):
"""Returns the latitude and longitude, if available, from the provided exif_data (obtained through get_exif_data above)"""
lat = None
lon = None
gps_info = {
ExifTags.GPSTAGS[k]: v
for k, v in gps_info_exif.items()
if k in ExifTags.GPSTAGS
}
gps_latitude = self._get_if_exist(gps_info, "GPSLatitude")
gps_latitude_ref = self._get_if_exist(gps_info, 'GPSLatitudeRef')
gps_longitude = self._get_if_exist(gps_info, 'GPSLongitude')
gps_longitude_ref = self._get_if_exist(gps_info, 'GPSLongitudeRef')
if gps_latitude and gps_latitude_ref and gps_longitude and gps_longitude_ref:
lat = self._convert_to_degress(gps_latitude)
if gps_latitude_ref != "N":
lat = 0 - lat
lon = self._convert_to_degress(gps_longitude)
if gps_longitude_ref != "E":
lon = 0 - lon
return lat, lon
|
bitsteller/witica
|
witica/metadata/extractor.py
|
Python
|
mit
| 7,072
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
from tinyrpc.protocols.jsonrpc import JSONRPCProtocol
from tinyrpc import RPCErrorResponse
@pytest.fixture(params=['jsonrpc'])
def protocol(request):
if 'jsonrpc':
return JSONRPCProtocol()
raise RuntimeError('Bad protocol name in test case')
def test_protocol_returns_bytes(protocol):
req = protocol.create_request('foo', ['bar'])
assert isinstance(req.serialize(), bytes)
def test_procotol_responds_bytes(protocol):
req = protocol.create_request('foo', ['bar'])
rep = req.respond(42)
err_rep = req.error_respond(Exception('foo'))
assert isinstance(rep.serialize(), bytes)
assert isinstance(err_rep.serialize(), bytes)
def test_one_way(protocol):
req = protocol.create_request('foo', None, {'a': 'b'}, True)
assert req.respond(None) == None
def test_raises_on_args_and_kwargs(protocol):
with pytest.raises(Exception):
protocol.create_request('foo', ['arg1', 'arg2'], {'kw_key': 'kw_value'})
def test_supports_no_args(protocol):
protocol.create_request('foo')
def test_creates_error_response(protocol):
req = protocol.create_request('foo', ['bar'])
err_rep = req.error_respond(Exception('foo'))
assert hasattr(err_rep, 'error')
def test_parses_error_response(protocol):
req = protocol.create_request('foo', ['bar'])
err_rep = req.error_respond(Exception('foo'))
parsed = protocol.parse_reply(err_rep.serialize())
assert hasattr(parsed, 'error')
def test_default_id_generator():
from tinyrpc.protocols import default_id_generator
g = default_id_generator(1)
assert next(g) == 1
assert next(g) == 2
assert next(g) == 3
|
mbr/tinyrpc
|
tests/test_protocols.py
|
Python
|
mit
| 1,721
|
'''
Nonlinear optimization by use of Affine DualAveraging
@author: Maximilian Balandat
@date: May 13, 2015
'''
import numpy as np
from .Domains import nBox
class NLoptProblem():
""" Basic class describing a Nonlinear Optimization problem. """
def __init__(self, domain, objective):
""" Constructor for the basic problem class. Here objective is a callable that
provides val and grad methods for computing value and gradient, respectively. """
if not isinstance(domain, nBox):
raise Exception('For now only nBoxes are supported!')
self.domain, self.objective = domain, objective
def run_minimization(self, etas, N, **kwargs):
""" Runs the minimization of the objective function based on interpreting
the value/gradient at the current iterate as an affine loss function
and applying dual averaging with the Exponential Potential. """
t, T, = 1, len(etas)
A = np.zeros((N, self.domain.n))
actions = [self.domain.sample_uniform(N)]
bounds = np.array(self.domain.bounds)
while t<T:
A += self.objective.grad(actions[-1])
actions.append(quicksample(bounds, A, etas[t]))
t += 1
return actions
def quicksample(bounds, A, eta):
""" Function returning actions sampled from the solution of the Dual Averaging
update on an Box with Affine losses, Exponential Potential. """
C1, C2 = np.exp(-eta*A*bounds[:,0]), np.exp(-eta*A*bounds[:,1])
Finv = lambda U: -np.log(C1 - (C1-C2)*U)/A/eta
return Finv(np.random.rand(*A.shape))
|
Balandat/cont_no_regret
|
old_code/NLopt.py
|
Python
|
mit
| 1,667
|
from celery.task import Task
from ..notifications.contextmanagers import BatchNotifications
class AsyncBadgeAward(Task):
ignore_result = True
def run(self, badge, state, **kwargs):
# from celery.contrib import rdb; rdb.set_trace()
with BatchNotifications():
badge.actually_possibly_award(**state)
|
fgmacedo/django-awards
|
awards/tasks.py
|
Python
|
mit
| 337
|
class Solution(object):
def intersect(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
from collections import Counter
nums2 = Counter(nums2)
nums1 = Counter(nums1)
ret = []
for i in nums1:
cnt = min(nums1[i], nums2.get(i, 0))
for j in range(cnt):
ret.append(i)
return ret
print Solution().intersect([1,2,3,2], [2,2])
|
xingjian-f/Leetcode-solution
|
357. Count Numbers with Unique Digits.py
|
Python
|
mit
| 482
|
# This code is licensed under the MIT License (see LICENSE file for details)
from PyQt5 import Qt
class ViewportRectItem(Qt.QGraphicsObject):
size_changed = Qt.pyqtSignal(Qt.QSizeF)
def __init__(self):
super().__init__()
self.setFlags(
Qt.QGraphicsItem.ItemIgnoresTransformations |
Qt.QGraphicsItem.ItemSendsGeometryChanges |
Qt.QGraphicsItem.ItemSendsScenePositionChanges |
Qt.QGraphicsItem.ItemHasNoContents
)
self._size = Qt.QSizeF()
# Children are generally overlay items that should appear over anything else rather than z-fighting
self.setZValue(10)
@property
def size(self):
return self._size
@size.setter
def size(self, v):
if not isinstance(v, Qt.QSizeF):
v = Qt.QSizeF(v)
if self._size != v:
self.prepareGeometryChange()
self._size = v
self.size_changed.emit(v)
def boundingRect(self):
return Qt.QRectF(Qt.QPointF(), self._size)
|
zpincus/RisWidget
|
ris_widget/qgraphicsitems/viewport_rect_item.py
|
Python
|
mit
| 1,046
|
'''
usage: scrapy runspider recursive_link_results.py (or from root folder: scrapy crawl scrapy_spyder_recursive)
'''
#from scrapy.spider import Spider
from scrapy.selector import Selector
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from scrapy.http import Request
from scrapy_spider.items import PageItem,LinkItem,SearchItem
class Search(CrawlSpider):
# Parameters set used for spider crawling
name = 'scrapy_spider_recursive'
def __init__(self,url_list,search_id):#specified by -a
#REMARK is allowed_domains is not set then ALL are allowed
self.start_urls = url_list.split(',')
self.search_id = int(search_id)
#allow any link but the ones with different font size(repetitions)
self.rules = (
Rule(LinkExtractor(allow=(),deny=('fontSize=*','infoid=*','SortBy=*', ),unique=True), callback='parse_item', follow=True),
)
super(Search, self).__init__(url_list)
def parse_item(self, response):
sel = Selector(response)
## Get meta info from website
title = sel.xpath('//title/text()').extract()
if len(title)>0:
title = title[0].encode('utf-8')
contents = sel.xpath('/html/head/meta[@name="description"]/@content').extract()
content = ' '.join([c.encode('utf-8') for c in contents]).strip()
fromurl = response.request.headers['Referer']
tourl = response.url
depth = response.request.meta['depth']
#get search item
search_item = SearchItem.django_model.objects.get(id=self.search_id)
#newpage
if not PageItem.django_model.objects.filter(url=tourl).exists():
newpage = PageItem()
newpage['searchterm'] = search_item
newpage['title'] = title
newpage['content'] = content
newpage['url'] = tourl
newpage['depth'] = depth
newpage.save()#cant use pipeline cause the execution can finish here
print fromurl,'--title:',title,'-',response.url,' depth:',depth
#print contents
#if( int(depth)> 1):
# print fromurl,'--title:',title,'-',response.url,' depth:',depth
#get from_id,to_id
from_page = PageItem.django_model.objects.get(url=fromurl)
from_id = from_page.id
to_page = PageItem.django_model.objects.get(url=tourl)
to_id = to_page.id
#newlink
if not LinkItem.django_model.objects.filter(from_id=from_id).filter(to_id=to_id).exists():
newlink = LinkItem()
newlink['searchterm'] = search_item
newlink['from_id'] = from_id
newlink['to_id'] = to_id
newlink.save()
|
xianjunzhengbackup/code
|
data science/machine_learning_for_the_web/chapter_8/movie_reviews_analizer_app/scrapy_spider/spiders/recursive_link_results.py
|
Python
|
mit
| 2,856
|
# This example is designed to be paired with example file 31-bridge-out.py
# Run the two with DIFFERENT DEVICE TOKENS.
# (They can be either in same "project" or separate projects as set at phone. Just use different tokens.)
# This "in" bridge receives data directly from other RPi.
# Our display shows incoming messages.
# Our LED on gpio 21 is controlled by button at other end.
import gpiozero as GPIO
from PiBlynk import Blynk
from mytoken import *
blynk = Blynk(token2) # <<<<<<<<<<<<<<<<<<<< USE DIFFERENT TOKED FROM OTHER END !!!
#-----------------------------------------------
# gpio (incoming) write
def gpioOut_h(val, pin, gpioObj):
gpioObj.value = val # control the LED
print("Incoming GPIO OUT command:", pin, val)
# set up the RPi LED or other outputs and connect to generic gpioOut function above
ledR = GPIO.LED(21) # gpiozero led objects
blynk.add_digital_hw_pin(21, None, gpioOut_h, ledR)
#-----------------------------------------
# Listen for anything coming in V61. Just print it
def virt_in_h(val, pin, st):
print("Incoming on VP:", pin, val)
blynk.add_virtual_pin(61, write=virt_in_h) # we place a LISTEN for incoming writes on V61
def cnct_cb():
print ("Connected: ")
print("Waiting for incoming messages ...")
blynk.on_connect(cnct_cb)
######################################################################################
blynk.run()
######################################################################################
#At APP:
# Nothing
|
BLavery/PiBlynk
|
PiBlynk-py/32-bridge-in.py
|
Python
|
mit
| 1,523
|
#class SVNRepo:
# @classmethod
# def isBadVersion(cls, id)
# # Run unit tests to check whether verison `id` is a bad version
# # return true if unit tests passed else false.
# You can use SVNRepo.isBadVersion(10) to check whether version 10 is a
# bad version.
class Solution:
"""
@param n: An integers.
@return: An integer which is the first bad version.
"""
def findFirstBadVersion(self, n):
# write your code here
start, end = 1, n
if (n == 1):
return 1
while (start <= end):
i = (start + end) / 2
if (not SVNRepo.isBadVersion(i)):
start = i + 1
else:
end = i - 1
return start
|
Rhadow/leetcode
|
lintcode/Medium/074_First_Bad_Version.py
|
Python
|
mit
| 742
|
import socket
import time
import random
import threading
import re
import json
import sys
import os
import platform
import notify2
from urllib import request
g_rid= b'265352'
g_username= b'visitor42'
g_ip= b'danmu.douyutv.com'
g_port= 8601
g_gid= b'0'
g_exit= False
sysinfo = platform.system()
def notify(title, message):
if sysinfo == 'Linux':
os.system('notify-send {}'.format(': '.join([title, message])))
# notify2.init('douyu')
# notify2.Notification(title, message).show()
else:
t = '-title {!r}'.format(title)
m = '-message {!r}'.format(message)
os.system('terminal-notifier {}'.format(' '.join([m, t])))
def is_exit():
global g_exit
return g_exit
def cast_wetght(g):
g= int(g)
if g>1e6:
return str(round(g/1e6,2))+'t'
elif g>1e3:
return str(round(g/1e3,2))+'kg'
else:
return str(g)+'g'
def sendmsg(s,msg,code=689):
data_length= len(msg)+8
s.send(int.to_bytes(data_length,4,'little'))
s.send(int.to_bytes(data_length,4,'little'))
s.send(int.to_bytes(code,4,'little'))
sent=0
while sent<len(msg):
tn= s.send(msg[sent:])
sent= sent + tn
def recvmsg(s):
bdata_length= s.recv(12)
data_length= int.from_bytes(bdata_length[:4],'little')-8
if data_length<=0:
print('badlength',bdata_length)
return None
total_data=[]
while True:
msg= s.recv(data_length)
if not msg: break
data_length= data_length - len(msg)
total_data.append(msg)
ret= b''.join(total_data)
return ret
def unpackage(data):
ret={}
lines= data.split(b'/')
lines.pop() # pop b''
for line in lines:
kv= line.split(b'@=')
if len(kv)==2:
ret[kv[0]]= kv[1].replace(b'@S',b'/').replace(b'@A',b'@')
else:
ret[len(ret)]= kv[0].replace(b'@S',b'/').replace(b'@A',b'@')
return ret
def unpackage_list(l):
ret=[]
lines= l.split(b'@S')
for line in lines:
line= line.replace(b'@AA',b'')
mp= line.split(b'@AS')
tb={}
for kv in mp:
try:
k,v= kv.split(b'=')
tb[k]=v
except:
pass
ret.append(tb)
return ret
def get_longinres(s_ip=b'117.79.132.20', s_port=8001, rid=b'265352'):
s= socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((s_ip, int(s_port)))
sendmsg(s,b'type@=loginreq/username@=/password@=/roomid@='+rid+b'/\x00')
print('==========longinres')
longinres= unpackage(recvmsg(s))
#print('==========msgrepeaterlist')
msgrepeaterlist= unpackage(recvmsg(s))
lst= unpackage(msgrepeaterlist[b'list'])
tb= unpackage(random.choice(tuple(lst.values())))
#print('==========setmsggroup')
setmsggroup= unpackage(recvmsg(s))
ret= {'rid':rid,
'username': longinres[b'username'],
'ip': tb[b'ip'],
'port': tb[b'port'],
'gid': setmsggroup[b'gid']
}
def keepalive_send():
while not is_exit():
sendmsg(s,b'type@=keeplive/tick@='+str(random.randint(1,99)).encode('ascii')+b'/\x00')
time.sleep(45)
s.close()
threading.Thread(target=keepalive_send).start()
def keepalive_recv():
while not is_exit():
bmsg= recvmsg(s)
print('*** usr alive:',unpackage(bmsg),'***')
s.close()
threading.Thread(target=keepalive_recv).start()
return ret
def get_danmu(rid=b'5275', ip=b'danmu.douyutv.com', port=8001, username=b'visitor42', gid=b'0'):
"args needs bytes not str"
print('==========danmu')
s= socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((ip,int(port)))
sendmsg(s,b'type@=loginreq/username@='+username+b'/password@=1234567890123456/roomid@='+rid+b'/\x00')
loginres= unpackage(recvmsg(s))
sendmsg(s,b'type@=joingroup/rid@='+rid+b'/gid@='+gid+b'/\x00')
def keepalive():
while not is_exit():
sendmsg(s,b'type@=keeplive/tick@='+str(random.randint(1,99)).encode('ascii')+b'/\x00')
time.sleep(45)
s.close()
threading.Thread(target=keepalive).start()
while True:
bmsg= recvmsg(s)
if not bmsg:
print('*** connection break ***')
global g_exit
g_exit= True
break
msg= unpackage(bmsg)
msgtype= msg.get(b'type',b'undefined')
if msgtype==b'chatmessage':
nick= msg[b'snick'].decode('utf8')
content= msg.get(b'content',b'undefined').decode('utf8')
print(nick, ': ', content)
notify(nick, content)
elif msgtype==b'donateres':
sui= unpackage(msg.get(b'sui',b'nick@=undifined//00'))
nick= sui[b'nick'].decode('utf8')
print('***', nick, '送给主播', int(msg[b'ms']),\
'个鱼丸 (', cast_wetght(msg[b'dst_weight']), ') ***')
notify(nick, '送给主播' + str(int(msg[b'ms'])) + '个鱼丸')
elif msgtype==b'keeplive':
print('*** dm alive:',msg,'***')
elif msgtype in (b'userenter'):
pass
else:
print(msg)
###########from common.py
def match1(text, *patterns):
"""Scans through a string for substrings matched some patterns (first-subgroups only).
Args:
text: A string to be scanned.
patterns: Arbitrary number of regex patterns.
Returns:
When only one pattern is given, returns a string (None if no match found).
When more than one pattern are given, returns a list of strings ([] if no match found).
"""
if len(patterns) == 1:
pattern = patterns[0]
match = re.search(pattern, text)
if match:
return match.group(1)
else:
return None
else:
ret = []
for pattern in patterns:
match = re.search(pattern, text)
if match:
ret.append(match.group(1))
return ret
def get_content(url, headers={}, decoded=True, cookies_txt=''):
"""Gets the content of a URL via sending a HTTP GET request.
Args:
url: A URL.
headers: Request headers used by the client.
decoded: Whether decode the response body using UTF-8 or the charset specified in Content-Type.
Returns:
The content as a string.
"""
req = request.Request(url, headers=headers)
if cookies_txt:
cookies_txt.add_cookie_header(req)
req.headers.update(req.unredirected_hdrs)
response = request.urlopen(req)
data = response.read()
# Handle HTTP compression for gzip and deflate (zlib)
content_encoding = response.getheader('Content-Encoding')
if content_encoding == 'gzip':
data = ungzip(data)
elif content_encoding == 'deflate':
data = undeflate(data)
# Decode the response body
if decoded:
charset = match1(response.getheader('Content-Type'), r'charset=([\w-]+)')
if charset is not None:
data = data.decode(charset)
else:
data = data.decode('utf-8')
return data
###########from util/strings.py
try:
# py 3.4
from html import unescape as unescape_html
except ImportError:
import re
from html.entities import entitydefs
def unescape_html(string):
'''HTML entity decode'''
string = re.sub(r'&#[^;]+;', _sharp2uni, string)
string = re.sub(r'&[^;]+;', lambda m: entitydefs[m.group(0)[1:-1]], string)
return string
def _sharp2uni(m):
'''&#...; ==> unicode'''
s = m.group(0)[2:].rstrip(';;')
if s.startswith('x'):
return chr(int('0'+s, 16))
else:
return chr(int(s))
##########
def get_room_info(url):
print('==========room')
html = get_content(url)
room_id_patt = r'"room_id":(\d{1,99}),'
title_patt = r'<div class="headline clearfix">\s*<h1>([^<]{1,9999})</h1>'
title_patt_backup = r'<title>([^<]{1,9999})</title>'
roomid = match1(html,room_id_patt)
title = match1(html,title_patt) or match1(html,title_patt_backup)
title = unescape_html(title)
conf = get_content("http://www.douyutv.com/api/client/room/"+roomid)
metadata = json.loads(conf)
servers= metadata['data']['servers']
dest_server= servers[0]
return {'s_ip': dest_server['ip'],
's_port': dest_server['port'],
'rid': metadata['data']['room_id'].encode()
}
print(metadata)
def main(url='http://www.douyutv.com/xtd'):
login_user_info= get_room_info(url)
print('login_user_info:', login_user_info)
login_room_info= get_longinres(**login_user_info)
print('login_room_info', login_room_info)
get_danmu(**login_room_info)
if __name__=='__main__':
url= sys.argv[1] if len(sys.argv)>1 else 'http://www.douyutv.com/zeek'
main(url)
|
zephyrzoom/douyu
|
test/comment-douyu.py
|
Python
|
mit
| 8,896
|
# -*- coding: utf-8 -*-
import aaargh
from app import Negi
app = aaargh.App(description="Jinja2+JSON powered static HTML build tool")
@app.cmd(help='Parse JSON and build HTML')
@app.cmd_arg('-d','--data_dir',default='./data',help='JSON data dirctory(default:./data')
@app.cmd_arg('-t','--tmpl_dir',default='./templates',help='Jinja2 template dirctory(default:./templates')
@app.cmd_arg('-o','--out_dir',default='./dist',help='Output dirctory(default:./dist')
@app.cmd_arg('-v','--verbose',nargs='?',const=True,default=False)
def build(data_dir,tmpl_dir,out_dir,verbose):
builder = Negi(
data_dir= data_dir,
tmpl_dir = tmpl_dir,
out_dir = out_dir,
verbose = verbose
)
builder.build()
def main():
app.run()
if __name__ == '__main__':
main()
|
zk33/negi
|
negi/main.py
|
Python
|
mit
| 798
|
from django.conf import settings
from . import defaults
__title__ = 'fobi.contrib.plugins.form_elements.fields.' \
'select_multiple_with_max.conf'
__author__ = 'Artur Barseghyan <artur.barseghyan@gmail.com>'
__copyright__ = '2014-2017 Artur Barseghyan'
__license__ = 'GPL 2.0/LGPL 2.1'
__all__ = ('get_setting',)
def get_setting(setting, override=None):
"""Get setting.
Get a setting from
`fobi.contrib.plugins.form_elements.fields.select_multiple_with_max`
conf module, falling back to the default.
If override is not None, it will be used instead of the setting.
:param setting: String with setting name
:param override: Value to use when no setting is available. Defaults
to None.
:return: Setting value.
"""
if override is not None:
return override
if hasattr(
settings,
'FOBI_FORM_ELEMENT_SELECT_MULTIPLE_WITH_MAX_{0}'.format(setting)
):
return getattr(
settings,
'FOBI_FORM_ELEMENT_SELECT_MULTIPLE_WITH_MAX_{0}'.format(setting)
)
else:
return getattr(defaults, setting)
|
mansonul/events
|
events/contrib/plugins/form_elements/fields/select_multiple_with_max/conf.py
|
Python
|
mit
| 1,127
|
import logging
from queue import Queue
from gi.repository import GObject
from lib.commands import ControlServerCommands
from lib.tcpmulticonnection import TCPMultiConnection
from lib.response import NotifyResponse
class ControlServer(TCPMultiConnection):
def __init__(self, pipeline):
'''Initialize server and start listening.'''
self.log = logging.getLogger('ControlServer')
super().__init__(port=9999)
self.command_queue = Queue()
self.commands = ControlServerCommands(pipeline)
def on_accepted(self, conn, addr):
'''Asynchronous connection listener.
Starts a handler for each connection.'''
self.log.debug('setting gobject io-watch on connection')
GObject.io_add_watch(conn, GObject.IO_IN, self.on_data, [''])
def on_data(self, conn, _, leftovers, *args):
'''Asynchronous connection handler.
Pushes data from socket into command queue linewise'''
close_after = False
try:
while True:
try:
leftovers.append(conn.recv(4096).decode(errors='replace'))
if len(leftovers[-1]) == 0:
self.log.info("Socket was closed")
leftovers.pop()
close_after = True
break
except UnicodeDecodeError as e:
continue
except BlockingIOError:
pass
data = "".join(leftovers)
del leftovers[:]
lines = data.split('\n')
for line in lines[:-1]:
self.log.debug("got line: %r", line)
line = line.strip()
# 'quit' = remote wants us to close the connection
if line == 'quit' or line == 'exit':
self.log.info("Client asked us to close the Connection")
self.close_connection(conn)
return False
self.log.debug('re-starting on_loop scheduling')
GObject.idle_add(self.on_loop)
self.command_queue.put((line, conn))
if close_after:
self.close_connection(conn)
return False
if lines[-1] != '':
self.log.debug("remaining %r", lines[-1])
leftovers.append(lines[-1])
return True
def on_loop(self):
'''Command handler. Processes commands in the command queue whenever
nothing else is happening (registered as GObject idle callback)'''
self.log.debug('on_loop called')
if self.command_queue.empty():
self.log.debug('command_queue is empty again, '
'stopping on_loop scheduling')
return False
line, requestor = self.command_queue.get()
words = line.split()
if len(words) < 1:
self.log.debug('command_queue is empty again, '
'stopping on_loop scheduling')
return True
command = words[0]
args = words[1:]
self.log.info("processing command %r with args %s", command, args)
response = None
try:
# deny calling private methods
if command[0] == '_':
self.log.info('private methods are not callable')
raise KeyError()
command_function = self.commands.__class__.__dict__[command]
except KeyError as e:
self.log.info("received unknown command %s", command)
response = "error unknown command %s\n" % command
else:
try:
responseObject = command_function(self.commands, *args)
except Exception as e:
message = str(e) or "<no message>"
response = "error %s\n" % message
else:
if isinstance(responseObject, NotifyResponse):
responseObject = [responseObject]
if isinstance(responseObject, list):
for obj in responseObject:
signal = "%s\n" % str(obj)
for conn in self.currentConnections:
self._schedule_write(conn, signal)
else:
response = "%s\n" % str(responseObject)
finally:
if response is not None and requestor in self.currentConnections:
self._schedule_write(requestor, response)
return False
def _schedule_write(self, conn, message):
queue = self.currentConnections[conn]
self.log.debug('re-starting on_write[%u] scheduling', conn.fileno())
GObject.io_add_watch(conn, GObject.IO_OUT, self.on_write)
queue.put(message)
def on_write(self, conn, *args):
self.log.debug('on_write[%u] called', conn.fileno())
try:
queue = self.currentConnections[conn]
except KeyError:
return False
if queue.empty():
self.log.debug('write_queue[%u] is empty again, '
'stopping on_write scheduling',
conn.fileno())
return False
message = queue.get()
try:
conn.send(message.encode())
except Exception as e:
self.log.warning('failed to send message', exc_info=True)
return True
|
h01ger/voctomix
|
voctocore/lib/controlserver.py
|
Python
|
mit
| 5,365
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('point', models.IntegerField(default=1)),
('lastcall', models.DateTimeField(auto_now_add=True)),
('user', models.OneToOneField(to=settings.AUTH_USER_MODEL)),
],
),
]
|
genonfire/portality
|
accounts/migrations/0001_initial.py
|
Python
|
mit
| 730
|
from gameinfo import *
from porkglobals import *
def genGameMap():
"""This is an "abstract function" to hold this docstring and information.
A GameMap function defines Places and connects all the Places it defines in
a graph, but simpler graph than CommandGraph. It simply uses Place.nextnodes.
A GameMap function returns the starting location."""
def testGameMap():
""" ***TEST CASES*** """
# testing item adj/name collision
testsword = Weapon("elvish sword", "A blade of Elvish make.", 2, weight=2)
testsword2 = Weapon("rusty elvish sword", "A discarded old blade of Elvish steel.", 2)
testsword3 = Weapon("sword elvish rusty", "A mix of adjectives to fuck with you.", 2)
startlocnext = {}
startloc = Place("Sword testing location.",
items=[testsword,testsword2,testsword3],
next=startlocnext)
return startloc
def goldenfieldMap():
# python objs are pointers, putting an object in two places on accident
# would make some weird behavior
shittystartersword = Weapon("old, rusty sword", "A simple sword, obviously aged and covered in rust.", 2, weight=2)
startlocnext = {'e':"There is a wall there."}
startloc = Place("You are in a field. Swaying, golden grass surrounds you in all directions.",
items=[shittystartersword],
next=startlocnext)
field1next = {'s':startloc}
field1 = Place("You are in a field. Golden, swaying grass surrounds you in all directions.",
next=field1next)
startlocnext['n'] = field1
field2next = {'n':startloc}
field2 = Place("You are in a field with golden, swaying grass in all directions.",
next=field2next)
startlocnext['s'] = field2
# wait why the hell am i not just doing Place.next = {}
aSecretRoomNext = {'u':startloc}
aSecretRoom = Place(("You find yourself in a secret room. The walls glare down at you, but otherwise the room is quiet. There "
"is a painting on the wall in front of you, flanked by two statues of what appear to be kneeling warriors."),
next=aSecretRoomNext)
warriorStatue = Feature("warrior statue", ("A statue of a kneeling warrior. He faces down, with one hand on the hilt of his sheathed sword and "
"the other in a fist."))
painting = Feature("painting", "A painting of a bowl of fruit. A note attached to it says, do not to this.")
aSecretRoom.features = [warriorStatue, painting]
startlocnext['d'] = aSecretRoom
return startloc
if DEBUG:
genGameMap = testGameMap
# ghetto map choosing
genGameMap = goldenfieldMap
|
phorust/pork
|
game/game.py
|
Python
|
mit
| 2,596
|
"""tidy_up_latent
Tidy up a latent migration not previously picked up by alembic or ignored.
Revision ID: 1280
Revises: 1270
Create Date: 2019-06-10 15:51:48.661665
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1280'
down_revision = '1270'
def upgrade():
# this field not has a "unique index" instead of a "unique constraint"
op.drop_constraint('uq_direct_award_projects_external_id', 'direct_award_projects', type_='unique')
def downgrade():
op.create_unique_constraint('uq_direct_award_projects_external_id', 'direct_award_projects', ['external_id'])
|
alphagov/digitalmarketplace-api
|
migrations/versions/1280_tidy_up_latent.py
|
Python
|
mit
| 633
|
import base64
from dataclasses import dataclass
from typing import Any, List, Tuple, Union
import flask
import sqlalchemy as sa
from marshmallow import ValidationError, fields
from flask_resty.sorting import FieldOrderings, FieldSortingBase
from flask_resty.view import ModelView
from . import meta
from .exceptions import ApiError
from .utils import if_none
# -----------------------------------------------------------------------------
class PaginationBase:
"""The base class for pagination components.
Pagination components control the list view fetches individual pages of
data, as opposed to the full collection. They handle limiting the number
of returned records, and fetching additional records after the initial
page.
Subclasses must implement :py:meth:`get_page` to provide the pagination
logic.
"""
def adjust_sort_ordering(
self, view, field_orderings: FieldOrderings
) -> FieldOrderings:
return field_orderings
def get_page(self, query, view):
"""Restrict the specified query to a single page.
:param query: The query to paginate.
:type query: :py:class:`sqlalchemy.orm.query.Query`
:param view: The view with the model we wish to paginate.
:type view: :py:class:`ModelView`
:return: The paginated query
:rtype: :py:class:`sqlalchemy.orm.query.Query`
:raises: A :py:class:`NotImplementedError` if no implementation is
provided.
"""
raise NotImplementedError()
def get_item_meta(self, item, view):
"""Build pagination metadata for a single item.
:param item: An instance of the :py:attr:`ModelView.model`.
:type item: obj
:param view: The view with the :py:attr:`ModelView.model`.
:type view: :py:class:`ModelView`
"""
return None
# -----------------------------------------------------------------------------
class LimitPaginationBase(PaginationBase):
"""The base scheme for pagination components that limit fetched item count.
The pagination metadata will indicate the presence of additional items with
the ``has_next_page`` property.
Subclasses must implement :py:meth:`get_limit` to provide maximum number
of returned items.
"""
def get_page(self, query, view) -> List:
limit = self.get_limit()
if limit is not None:
query = query.limit(limit + 1)
items = query.all()
if limit is not None and len(items) > limit:
has_next_page = True
items = items[:limit]
else:
has_next_page = False
meta.update_response_meta({"has_next_page": has_next_page})
return items
def get_limit(self):
"""Override this method to return the maximum number of returned items.
:rtype: int
"""
raise NotImplementedError()
class MaxLimitPagination(LimitPaginationBase):
"""Return up to a fixed maximum number of items.
This is not especially useful and is included only for completeness.
:param int max_limit: The maximum number of items to retrieve.
"""
def __init__(self, max_limit):
self._max_limit = max_limit
def get_limit(self):
return self._max_limit
class LimitPagination(LimitPaginationBase):
"""A pagination scheme that takes a user-specified limit.
This is not especially useful and is included only for completeness.
This pagination scheme uses the :py:attr:`limit_arg` query parameter to
limit the number of items returned by the query.
If no such limit is explicitly specified, this uses `default_limit`. If
`max_limit` is specified, then the user-specified limit may not exceed
`max_limit`.
:param int default_limit: The default maximum number of items to retrieve,
if the user does not specify an explicit value.
:param int max_limit: The maximum number of items the user is allowed to
request.
"""
#: The name of the query parameter to inspect for the LIMIT value.
limit_arg = "limit"
def __init__(self, default_limit=None, max_limit=None):
self._default_limit = if_none(default_limit, max_limit)
self._max_limit = max_limit
if self._max_limit is not None:
assert (
self._default_limit <= self._max_limit
), "default limit exceeds max limit"
def get_limit(self):
limit = flask.request.args.get(self.limit_arg)
try:
return self.parse_limit(limit)
except ApiError as e:
raise e.update({"source": {"parameter": self.limit_arg}})
def parse_limit(self, limit):
if limit is None:
return self._default_limit
try:
limit = int(limit)
except ValueError as e:
raise ApiError(400, {"code": "invalid_limit"}) from e
if limit < 0:
raise ApiError(400, {"code": "invalid_limit"})
if self._max_limit is not None:
limit = min(limit, self._max_limit)
return limit
class LimitOffsetPagination(LimitPagination):
"""A pagination scheme that takes a user-specified limit and offset.
This pagination scheme takes a user-specified limit and offset. It will
retrieve up to the specified number of items, beginning at the specified
offset.
"""
#: The name of the query parameter to inspect for the OFFSET value.
offset_arg = "offset"
def get_page(self, query, view):
offset = self.get_offset()
query = query.offset(offset)
return super().get_page(query, view)
def get_offset(self):
offset = flask.request.args.get(self.offset_arg)
try:
return self.parse_offset(offset)
except ApiError as e:
raise e.update({"source": {"parameter": self.offset_arg}})
def parse_offset(self, offset):
if offset is None:
return 0
try:
offset = int(offset)
except ValueError as e:
raise ApiError(400, {"code": "invalid_offset"}) from e
if offset < 0:
raise ApiError(400, {"code": "invalid_offset"})
return offset
class PagePagination(LimitOffsetPagination):
"""A pagination scheme that fetches a particular fixed-size page.
This works similar to `LimitOffsetPagination`. The limit used will always
be the fixed page size. The offset will be page * page_size.
:param int page_size: The fixed number of items per page.
"""
#: The name of the query parameter to inspect for the page value.
page_arg = "page"
def __init__(self, page_size):
super().__init__()
self._page_size = page_size
def get_offset(self):
return self.get_request_page() * self._page_size
def get_request_page(self):
page = flask.request.args.get(self.page_arg)
try:
return self.parse_page(page)
except ApiError as e:
raise e.update({"source": {"parameter": self.page_arg}})
def parse_page(self, page):
if page is None:
return 0
try:
page = int(page)
except ValueError as e:
raise ApiError(400, {"code": "invalid_page"}) from e
if page < 0:
raise ApiError(400, {"code": "invalid_page"})
return page
def get_limit(self):
return self._page_size
# -----------------------------------------------------------------------------
Cursor = Tuple[Any, ...]
@dataclass
class CursorInfo:
reversed: bool
cursor: Union[str, None]
cursor_arg: Union[str, None]
limit: Union[str, None]
limit_arg: Union[str, None]
class CursorPaginationBase(LimitPagination):
"""The base class for pagination schemes that use cursors.
Unlike with offsets that identify items by relative position, cursors
identify position by content. This allows continuous pagination without
concerns about page shear on dynamic collections. This makes cursor-based
pagination especially effective for lists with infinite scroll dynamics,
as offset-based pagination can miss or duplicate items due to inserts or
deletes.
It's also more efficient against the database, as the cursor condition can
be cheaply evaluated as a filter against an index.
:param bool validate: If unset, bypass validation on cursor values. This is
useful if the deserializer field imposes validation that will fail for
on cursor values for items actually present.
"""
#: The name of the query parameter to inspect for the cursor value.
cursor_arg = "cursor"
limit_arg = "limit"
#: the name of the query parameter to inspect for explicit forward pagination
after_arg = "after"
first_arg = "first"
#: the name of the query parameter to inspect for explicit backward pagination
before_arg = "before"
last_arg = "last"
def __init__(self, *args, validate_values=True, **kwargs):
super().__init__(*args, **kwargs)
self._validate_values = validate_values
def try_get_arg(self, *args):
for arg in args:
value = flask.request.args.get(arg)
if value is not None:
return (value, arg)
return (None, None)
# There are a number of different cases that this covers in order to be backwards compatible with
def get_cursor_info(self):
cursor = None
cursor_arg = None
limit = None
limit_arg = None
# Unambigious cases where a cursor is provided.
# legacy "cursor_arg" cases always map to after/first
if (
self.after_arg in flask.request.args
or self.cursor_arg in flask.request.args
):
reversed = False
cursor, cursor_arg = self.try_get_arg(
self.after_arg, self.cursor_arg
)
limit, limit_arg = self.try_get_arg(self.first_arg, self.limit_arg)
elif self.before_arg in flask.request.args:
reversed = True
cursor, cursor_arg = self.try_get_arg(self.before_arg)
limit, limit_arg = self.try_get_arg(self.last_arg, self.limit_arg)
# Ambigious cases where limits are provided but not cursors
# Thrre may be two explicit limit args provided, default to "first"
# in keeping with the cursor logic
elif self.first_arg in flask.request.args:
reversed = False
limit, limit_arg = self.try_get_arg(self.first_arg)
elif self.last_arg in flask.request.args:
reversed = True
limit, limit_arg = self.try_get_arg(self.last_arg)
# otherwise fall back on limit
else:
reversed = False
limit, limit_arg = self.try_get_arg(self.limit_arg)
return CursorInfo(reversed, cursor, cursor_arg, limit, limit_arg)
def get_limit(self):
cursor_info = self.get_cursor_info()
try:
return self.parse_limit(cursor_info.limit)
except ApiError as e:
raise e.update({"source": {"parameter": cursor_info.limit_arg}})
@property
def reversed(self):
return self.get_cursor_info().reversed
def adjust_sort_ordering(
self, view: ModelView, field_orderings
) -> FieldOrderings:
"""Ensure the query is sorted correctly and get the field orderings.
The implementation of cursor-based pagination in Flask-RESTy requires
that the query be sorted in a fully deterministic manner. The timestamp
columns usually used in sorting do not quite qualify, as two different
rows can have the same timestamp. This method adds the ID fields to the
sorting criterion, then returns the field orderings for use in the
other methods, as in `get_field_orderings` below.
:param view: The view with the model we wish to paginate.
:type view: :py:class:`ModelView`
:return: The field orderings necessary to do cursor pagination deterministically
:rtype: FieldOrderings
"""
# ignore the passed in sort so that it's consistent
# with further calls in get_page
return self.get_field_orderings(view)
def get_field_orderings(self, view: ModelView):
sorting: FieldSortingBase = view.sorting
assert (
sorting is not None
), "sorting must be defined when using cursor pagination"
sorting_field_orderings = sorting.get_request_field_orderings(view)
sorting_ordering_fields = frozenset(
field_name for field_name, _ in sorting_field_orderings
)
sorting_ordering_fields = frozenset(
field_name for field_name, _ in sorting_field_orderings
)
# For convenience, use the ascending setting on the last explicit
# ordering when possible, such that reversing the sort will reverse
# the IDs as well.
if sorting_field_orderings:
last_field_asc = sorting_field_orderings[-1][1]
else:
last_field_asc = True
missing_field_orderings = tuple(
(id_field, last_field_asc)
for id_field in view.id_fields
if id_field not in sorting_ordering_fields
)
field_ordering = sorting_field_orderings + missing_field_orderings
if self.reversed:
field_ordering = tuple(
(field, not order) for field, order in field_ordering
)
return field_ordering
def get_request_cursor(self, view, field_orderings):
"""Get the cursor value specified in the request.
Given the view and the field_orderings as above, this method will read
the encoded cursor from the query, then return the cursor as a tuple of
the field values in the cursor.
This parsed cursor can then be used in `get_filter`.
:param view: The view with the model we wish to paginate.
:type view: :py:class:`ModelView`
:param field_orderings: A sequence of field_ordering tuples
:type field_orderings: seq
:return: A cursor value
:rtype: str
:raises: :py:class:`ApiError` if an invalid cursor is provided in
`cursor_arg`.
"""
cursor_info = self.get_cursor_info()
if cursor_info.cursor is None:
return None
try:
return self.parse_cursor(view, cursor_info.cursor, field_orderings)
except ApiError as e:
raise e.update({"source": {"parameter": cursor_info.cursor_arg}})
def parse_cursor(
self,
view: ModelView,
cursor: str,
field_orderings: FieldOrderings,
) -> Cursor:
cursor = self.decode_cursor(cursor)
if len(cursor) != len(field_orderings):
raise ApiError(400, {"code": "invalid_cursor.length"})
deserializer = view.deserializer
column_fields = (
deserializer.fields[field_name]
for field_name, _ in field_orderings
)
try:
cursor = tuple(
self.deserialize_value(field, value)
for field, value in zip(column_fields, cursor)
)
except ValidationError as e:
raise ApiError.from_validation_error(
400, e, self.format_validation_error
) from e
return cursor
def decode_cursor(self, cursor: str) -> Tuple[str, ...]:
try:
cursor = cursor.split(".")
cursor = tuple(self.decode_value(value) for value in cursor)
except (TypeError, ValueError) as e:
raise ApiError(400, {"code": "invalid_cursor.encoding"}) from e
return cursor
def decode_value(self, value: str):
value = value.encode("ascii")
value += (3 - ((len(value) + 3) % 4)) * b"=" # Add back padding.
value = base64.urlsafe_b64decode(value)
return value.decode()
def deserialize_value(self, field, value):
return (
field.deserialize(value)
if self._validate_values
# Cursors don't need to be fully valid values; they just need to be
# the correct type for sorting, so it can make sense to bypass
# validation.
else field._deserialize(value, None, None)
)
def format_validation_error(self, message, path):
return {"code": "invalid_cursor", "detail": message}
def get_filter(
self, view, field_orderings: FieldOrderings, cursor: Cursor
):
"""Build the filter clause corresponding to a cursor.
Given the field orderings and the cursor as above, this will construct
a filter clause that can be used to filter a query to return only items
after the specified cursor, per the specified field orderings. Use this
to apply the equivalent of the offset specified by the cursor.
:param view: The view with the model we wish to paginate.
:type view: :py:class:`ModelView`
:param field_orderings: A sequence of field_ordering tuples derived from the view's Sorting with explicit id ordering
:type field_orderings: seq
:param cursor: A set of values corresponding to the fields in
`field_orderings`
:type cursor: seq
:return: A filter clause
"""
sorting: FieldSortingBase = view.sorting
column_cursors = tuple(
(sorting.get_column(view, field_name), asc, value)
for (field_name, asc), value in zip(field_orderings, cursor)
)
return sa.or_(
self.get_filter_clause(column_cursors[: i + 1])
for i in range(len(column_cursors))
)
def get_filter_clause(self, column_cursors):
previous_clauses = sa.and_(
column == value for column, _, value in column_cursors[:-1]
)
column, asc, value = column_cursors[-1]
# SQL Alchemy won't let you > or < a boolean, so we convert
# to an integer, the DB's seem to handle this just fine
if isinstance(value, bool):
column = sa.cast(column, sa.Integer)
value = int(value)
if asc:
current_clause = column > value
else:
current_clause = column < value
return sa.and_(previous_clauses, current_clause)
def make_cursors(self, items, view, field_orderings):
"""Build a cursor for each of many items.
This method creates a cursor for each item in `items`. It produces the
same cursors as :py:meth:`make_cursor`, but is slightly more efficient
in cases where cursors for multiple items are required.
:param seq items: A sequence of instances of :py:attr:`ApiView.model`
:param view: The view we wish to paginate.
:type view: :py:class:`ModelView`
:param seq field_orderings: A sequence of (field, asc?).
:return: A sequence of :py:class:`marshmallow.Field`.
:rtype: seq
"""
column_fields = self.get_column_fields(view, field_orderings)
return tuple(self.render_cursor(item, column_fields) for item in items)
def make_cursor(self, item, view, field_orderings):
"""Build a cursor for a given item.
Given an item and the field orderings as above, this builds a cursor
for the item. This cursor encodes the value for each field on the item
per the specified field orderings.
This cursor should be returned in page or item metadata to allow
pagination continuing after the cursor for the item.
:param obj item: An instance :py:attr:`ApiView.model`
:param view: The view we wish to paginate.
:type view: :py:class:`ModelView`
:param seq field_orderings: A sequence of (field, asc?).
:return: A sequence of :py:class:`marshmallow.Field`.
:rtype: seq
"""
column_fields = self.get_column_fields(view, field_orderings)
return self.render_cursor(item, column_fields)
def get_column_fields(self, view, field_orderings):
serializer = view.serializer
return tuple(
serializer.fields[field_name] for field_name, _ in field_orderings
)
def render_cursor(self, item, column_fields):
cursor = tuple(
field._serialize(getattr(item, field.name), field.name, item)
for field in column_fields
)
return self.encode_cursor(cursor)
def encode_cursor(self, cursor):
return ".".join(self.encode_value(value) for value in cursor)
def encode_value(self, value):
value = str(value)
value = value.encode()
value = base64.urlsafe_b64encode(value)
value = value.rstrip(b"=") # Strip padding.
return value.decode("ascii")
class RelayCursorPagination(CursorPaginationBase):
"""A pagination scheme that works with the Relay specification.
This pagination scheme assigns a cursor to each retrieved item. The page
metadata will contain an array of cursors, one per item. The item metadata
will include the cursor for the fetched item.
For Relay Cursor Connections Specification, see
https://facebook.github.io/relay/graphql/connections.htm.
"""
def __init__(
self,
*args,
page_info_arg=None,
default_include_page_info=False,
**kwargs,
):
super().__init__(*args, **kwargs)
self._default_include_page_info = default_include_page_info
self.page_info_arg = page_info_arg
def get_page_info(self, query, view, field_orderings, cursor):
include_page_info = (
self.deserialize_value(
fields.Boolean(),
flask.request.args.get(
self.page_info_arg, self._default_include_page_info
),
)
if self.page_info_arg
else self._default_include_page_info
)
if not include_page_info:
return {}
total = query.count()
index = 0
if cursor:
filter_clause = self.get_filter(
view,
tuple((field, not order) for field, order in field_orderings),
cursor,
)
index = query.filter(filter_clause).count()
# in the reversed case, both the order by and sort of inverted.
# so in practice this gives us a reverse index, e.g. distance from
# the end of the list. We normalize it back by subtracting from the total
if self.reversed:
index = max(total - index - 1, 0)
return {"index": index, "total": total}
def get_page(self, query, view):
field_orderings = self.get_field_orderings(view)
cursor_in = self.get_request_cursor(view, field_orderings)
page_query = query
if cursor_in is not None:
page_query = page_query.filter(
self.get_filter(view, field_orderings, cursor_in)
)
items = super().get_page(page_query, view)
if self.reversed:
items.reverse()
# Relay expects a cursor for each item.
cursors_out = self.make_cursors(items, view, field_orderings)
page_info = self.get_page_info(query, view, field_orderings, cursor_in)
meta.update_response_meta({"cursors": cursors_out, **page_info})
return items
def get_item_meta(self, item, view):
cursor = self.make_cursor(item, view, self.get_field_orderings(view))
return {"cursor": cursor}
|
taion/flask-jsonapiview
|
flask_resty/pagination.py
|
Python
|
mit
| 23,769
|
# sequences.py
# strings
>>> # 4 ways to make a string
>>> str1 = 'This is a string. We built it with single quotes.'
>>> str2 = "This is also a string, but built with double quotes."
>>> str3 = '''This is built using triple quotes,
... so it can span multiple lines.'''
>>> str4 = """This too
... is a multiline one
... built with triple double-quotes."""
>>> str4 #A
'This too\nis a multiline one\nbuilt with triple double-quotes.'
>>> print(str4) #B
This too
is a multiline one
built with triple double-quotes.
>>>
# encode / decode
>>> s = "This is üŋíc0de" # unicode string: code points
>>> type(s)
<class 'str'>
>>> encoded_s = s.encode('utf-8') # utf-8 encoded version of s
>>> encoded_s
b'This is \xc3\xbc\xc5\x8b\xc3\xadc0de' # result: bytes object
>>> type(encoded_s) # another way to verify it
<class 'bytes'>
>>> encoded_s.decode('utf-8') # let's revert to the original
'This is üŋíc0de'
>>> bytes_obj = b"A bytes object" # a bytes object
>>> type(bytes_obj)
<class 'bytes'>
# length
>>> len(str1)
49
# indexing and slicing
>>> s = "The trouble is you think you have time."
>>> s[0] # indexing at position 0, which is the first char
'T'
>>> s[5] # indexing at position 5, which is the sixth char
'r'
>>> s[:4] # slicing, we specify only the stop position
'The '
>>> s[4:] # slicing, we specify only the start position
'trouble is you think you have time.'
>>> s[2:14] # slicing, both start and stop positions
'e trouble is'
>>> s[2:14:3] # slicing, start, stop and step (every 3 chars)
'erb '
>>> s[:] # quick way of making a copy
'The trouble is you think you have time.'
|
mkhuthir/learnPython
|
Book_learning-python-r1.1/ch2/sequences.py
|
Python
|
mit
| 1,614
|
__all__ = ["user_controller", "plant_controller"]
|
CHrycyna/LandscapeTracker
|
app/controllers/__init__.py
|
Python
|
mit
| 49
|
# 统计一下提交代码量
import os
import subprocess
import pandas as pd
os.chdir('../')
# %%
start_dt = '2019-05-01'
end_dt = '2019-06-20'
commits = subprocess.check_output(
"git log --after={start_dt} --before={end_dt} --format='%s%cr'".
format(start_dt=start_dt, end_dt=end_dt),
shell=True)
commits = commits.decode('utf-8')
print(commits)
print('提交了' + str(len(commits.split('\n'))) + '次')
# %%
detail = subprocess.check_output(
"git log --after={start_dt} --before={end_dt} --pretty=tformat: --numstat".
format(start_dt=start_dt, end_dt=end_dt),
shell=True)
detail_str=eval(str(detail).replace('\\\\','\\')).decode('utf-8')
detail_formated=[line.split('\t') for line in detail_str.split('\n') if len(line)>0]
detail_pd=pd.DataFrame(detail_formated)
print(detail_pd)
|
guofei9987/guofei9987.github.io
|
reading/tools/commit_stats.py
|
Python
|
mit
| 825
|
import collections
class Solution:
def arrangeWords(self, text: str) -> str:
words = text.split()
table = collections.defaultdict(list)
for word in words:
table[len(word)].append(word)
result = []
for key in sorted(table):
result.extend(table[key])
return ' '.join(result).capitalize()
# Sort is stable
class Solution2:
def arrangeWords(self, text: str) -> str:
return ' '.join(sorted(text.split(), key=len)).capitalize()
|
jiadaizhao/LeetCode
|
1401-1500/1451-Rearrange Words in a Sentence/1451-Rearrange Words in a Sentence.py
|
Python
|
mit
| 513
|
# =============================================================================
# COPYRIGHT 2013 Brain Corporation.
# License under MIT license (see LICENSE file)
# =============================================================================
import doctest
import logging
import os
import pytest
import robustus
from robustus.detail import check_module_available
from robustus.detail.utility import run_shell, check_run_shell
import shutil
import subprocess
import tempfile
def test_doc_tests():
doctest.testmod(robustus, raise_on_error=True)
doctest.testmod(robustus.detail.utility, raise_on_error=True)
def test_run_shell():
def check(command, expected_ret_code, expected_output, verbose):
tf = tempfile.TemporaryFile('w+')
assert run_shell(command, shell=True, stdout=tf, verbose=verbose) == expected_ret_code
tf.seek(0)
assert tf.read() == expected_output
try:
exception_occured = False
check_run_shell(command, shell=True, verbose=verbose)
except subprocess.CalledProcessError:
exception_occured = True
assert exception_occured == (exception_occured != 0)
check('echo robustus', 0, 'robustus\n', verbose=True)
check('echo robustus', 0, 'robustus\n', verbose=False)
check('echo robustus && exit 1', 1, 'robustus\n', verbose=True)
check('echo robustus && exit 1', 1, 'robustus\n', verbose=False)
def test_robustus(tmpdir):
tmpdir.chdir()
test_env = 'test_env'
# create env and install some packages
logging.info('creating ' + test_env)
robustus.execute(['env', test_env])
assert os.path.isdir(test_env)
assert os.path.isfile(os.path.join(test_env, '.robustus'))
robustus_executable = os.path.join(test_env, 'bin/robustus')
assert os.path.isfile(robustus_executable)
# install some packages
logging.info('installing requirements into ' + test_env)
run_shell([robustus_executable, 'install', 'pyserial'])
test_requirements1 = 'test_requirements1.txt'
with open(test_requirements1, 'w') as file:
file.write('pep8==1.3.3\n')
file.write('pytest==2.3.5\n')
run_shell([robustus_executable, 'install', '-r', test_requirements1])
# check packages are installed
packages_to_check = ['pyserial', 'pep8==1.3.3', 'pytest==2.3.5']
with open('freezed_requirements.txt', 'w') as req_file:
run_shell([robustus_executable, 'freeze'], stdout=req_file)
with open('freezed_requirements.txt') as req_file:
installed_packages = [line.strip() for line in req_file]
for package in packages_to_check:
assert package in installed_packages
assert check_module_available(test_env, 'serial')
assert check_module_available(test_env, 'pep8')
assert check_module_available(test_env, 'pytest')
shutil.rmtree(test_env)
def create_editable_environment(tmpdir):
"""Create an environment with an editable (shared between some tests) and
chdir into it."""
base_dir = str(tmpdir.mkdir('test_perrepo_env'))
test_env = os.path.join(base_dir, 'env')
working_dir = os.path.join(base_dir, 'working_dir')
# create env and install some packages
logging.info('creating ' + test_env)
os.mkdir(working_dir)
os.chdir(working_dir)
os.system('git init .')
robustus.execute(['env', test_env])
os.chdir(working_dir)
robustus_executable = os.path.join(test_env, 'bin/robustus')
test_requirements = os.path.join(working_dir, 'requirements.txt')
with open(test_requirements, 'w') as file:
file.write('-e git+https://github.com/braincorp/robustus-test-repo.git@master#egg=ardrone\n')
run_shell([robustus_executable, 'install', '-r', test_requirements])
return working_dir, test_env, robustus_executable
def test_pereditable(tmpdir):
"""Create a package with some editable requirements and check
that perrepo runs as expected."""
working_dir, test_env, robustus_executable = create_editable_environment(tmpdir)
# Now check that robustus behaves as expected
run_shell([robustus_executable, 'perrepo', 'touch', 'foo'])
assert os.path.exists(os.path.join(working_dir, 'foo'))
assert os.path.exists(os.path.join(test_env, 'src', 'ardrone', 'foo'))
def test_reset(tmpdir):
"""Try reset the environment"""
working_dir, test_env, robustus_executable = create_editable_environment(tmpdir)
# Change a file in the repo and check it is reset
changed_filepath = os.path.join(test_env, 'src', 'ardrone', 'README')
original_content = open(changed_filepath, 'r').read()
f = open(changed_filepath, 'w')
f.write('junk')
f.close()
run_shell([robustus_executable, 'reset', '-f'])
assert original_content == open(changed_filepath, 'r').read()
def test_install_with_tag(tmpdir):
"""Create a package with some editable requirements and install using a tag."""
base_dir = str(tmpdir.mkdir('test_perrepo_env'))
test_env = os.path.join(base_dir, 'env')
working_dir = os.path.join(base_dir, 'working_dir')
# create env and install some packages
logging.info('creating ' + test_env)
os.mkdir(working_dir)
os.chdir(working_dir)
os.system('git init .')
robustus.execute(['env', test_env])
os.chdir(working_dir)
robustus_executable = os.path.join(test_env, 'bin/robustus')
test_requirements = os.path.join(working_dir, 'requirements.txt')
with open(test_requirements, 'w') as file:
file.write('-e git+https://github.com/braincorp/robustus-test-repo.git@master#egg=robustus-test-repo\n')
run_shell([robustus_executable, 'install', '--tag', 'test-tag', '-r', test_requirements])
# Now check that robustus behaves as expected
assert os.path.exists(os.path.join(test_env, 'src', 'robustus-test-repo', 'test-tag'))
def test_install_with_branch_testing(tmpdir):
"""Create a package with some editable requirements and install using a branch
and check that one repo with the branch gets checked out using the branch
and the other ends up on master (this is how testing is often done)."""
base_dir = str(tmpdir.mkdir('test_perrepo_env'))
test_env = os.path.join(base_dir, 'env')
working_dir = os.path.join(base_dir, 'working_dir')
# create env and install some packages
logging.info('creating ' + test_env)
os.mkdir(working_dir)
os.chdir(working_dir)
# creat a new local repo
os.system('git init .')
setup_file_content =\
'''
from setuptools import setup, find_packages
setup(
name='test_perrepo_env',
author='Brain Corporation',
author_email='sinyavskiy@braincorporation.com',
url='https://github.com/braincorp/test_perrepo_env',
long_description='',
version='dev',
packages=find_packages(),
include_package_data=True,
install_requires=[])
'''
setup_file = os.path.join(working_dir, 'setup.py')
with open(setup_file, 'w') as file:
file.write(setup_file_content)
test_requirements = os.path.join(working_dir, 'requirements.txt')
with open(test_requirements, 'w') as file:
file.write('-e git+https://github.com/braincorp/robustus-test-repo.git@master#egg=robustus-test-repo\nmock==0.8.0\n-e git+https://github.com/braincorp/filecacher.git@master#egg=filecacher\n')
os.system('git add setup.py')
os.system('git add requirements.txt')
os.system('git commit -am "setup and reqs"')
# create test branch
os.system('git checkout -b test-branch')
test_file_on_test_branch = os.path.join(working_dir, 'root_test_branch.file')
with open(test_file_on_test_branch, 'w') as file:
file.write('root test')
os.system('git add root_test_branch.file')
os.system('git commit -am "root_test_branch.file"')
os.system('git checkout master')
robustus.execute(['env', test_env])
os.chdir(working_dir)
robustus_executable = os.path.join(test_env, 'bin/robustus')
run_shell([robustus_executable, 'install', '-e', '.', '--tag', 'test-branch', '--ignore-missing-refs'], verbose = True)
# Now check that robustus behaves as expected
assert os.path.exists(os.path.join(test_env, 'src', 'robustus-test-repo', 'test_branch.file'))
assert os.path.exists(os.path.join(test_env, 'lib', 'python2.7', 'site-packages',
'python-ardrone.egg-link'))
assert os.path.exists(os.path.join(test_env, 'src', 'filecacher', 'requirements.txt'))
assert os.path.exists(os.path.join(test_env, 'lib', 'python2.7', 'site-packages',
'filecacher.egg-link'))
# Now check that the repo itself is on the test branch
assert os.path.exists(test_file_on_test_branch)
if __name__ == '__main__':
test_doc_tests()
pytest.main('-s %s -n0' % __file__)
|
braincorp/robustus
|
robustus/tests/test_robustus.py
|
Python
|
mit
| 8,887
|
#!/usr/bin/python
from __future__ import print_function
import RPi.GPIO as GPIO
import time
import Queue # https://pymotw.com/2/Queue/
#GPIO pins
Taster1 = 24
Taster2 = 27
# GPIO-Nummer als Pinreferenz waehlen
GPIO.setmode(GPIO.BCM)
# GPIO vom SoC als Input deklarieren und Pull-Down Widerstand aktivieren
#PULL = GPIO.PUD_DOWN #GPIO -> GND
PULL = GPIO.PUD_UP #GPIO -> 3V3
GPIO.setup(Taster1, GPIO.IN, pull_up_down=PULL)
GPIO.setup(Taster2, GPIO.IN, pull_up_down=PULL)
# Dictionary definieren. http://www.tutorialspoint.com/python/python_dictionary.htm
dictionary = {}
dictionary['pause'] = False
queue = Queue.Queue()
# Script pausieren/blockieren/beschaeftigen
def Pause():
while dictionary['pause'] == True:
time.sleep(1)
# ISR
def interrupt_event(pin):
if pin == Taster1:
queue.put(pin)
if pin == Taster2:
print("Führe Script weiter aus")
dictionary['pause'] = False
try:
# Interrupt Event hinzufuegen. Auf steigende Flanke reagieren und ISR "Interrupt" deklarieren sowie Pin entprellen
GPIO.add_event_detect(Taster1, GPIO.RISING, callback=interrupt_event, bouncetime=200)
GPIO.add_event_detect(Taster2, GPIO.RISING, callback=interrupt_event, bouncetime=200)
# keep script running
while True:
time.sleep(0.5)
if not queue.empty():
job = queue.get()
if job == Taster1:
print("Pausiere Script")
dictionary['pause'] = True
Pause()
print("...puh... Im super heavy busy...")
except (KeyboardInterrupt, SystemExit):
GPIO.cleanup()
print("\nQuit\n")
|
meigrafd/Sample-Code
|
interrupt_pause_script.py
|
Python
|
mit
| 1,638
|
#!/usr/bin/env python3
import numpy as np
import matplotlib.pyplot as plt
import re
rootPath = "/Users/jeff/work/debug/20181216_hard_fe2k_15fps/"
finalLogFile = "rosout.log.2"
def appendTimestamps(arr, start, stop, flag):
#flag = True
d = stop - start
if flag or (d > -10 and d < 2000):
arr.append(d)
return True
return False
## camera -> OA(ObjectAanalytics) -> Fusion -> Flink -> V2X
stamps = [[],[],[],[],[]]
log = open(rootPath + finalLogFile)
lines = log.readlines()
log.close()
for i in range(0, len(lines)):
line = lines[i].rstrip('\n').strip()
ret = re.findall(r'\"camera_output_ts\":(\d+),.*\"flink_output_ts\":(\d+),.*\"fusion_output_ts\":(\d+),.*\"oa_output_ts\":\[([\d,]+)\],.*\"v2xnode_input_ts\":(\d+)', line)
if len(ret) > 0 and len(ret[0]) == 5:
if i < 2:
#print("line", line)
print("ret:", ret)
stamps[0].append(long(ret[0][0])) # camera
stamps[2].append(long(ret[0][2])) # fusion
stamps[3].append(long(ret[0][1])) # flink
stamps[4].append(long(ret[0][4])) # v2x
# oa
oastamps = ret[0][3].split(',')
t1 = long(oastamps[0])
t2 = long(oastamps[1])
t3 = long(oastamps[2])
mi = t1
ma = t1
if mi > t2:
mi = t2
if mi > t3:
mi = t3
if ma < t2:
ma = t2
if ma < t3:
ma = t3
#stamps[1].append((t1 + t2 + t3) / 3)
#stamps[1].append(mi)
stamps[1].append(ma)
stamps[1].append(long(oastamps[0]))
stamps[1].append(long(oastamps[1]))
stamps[1].append(long(oastamps[2]))
## [ 0 1 2 3 4 ]
## [ Camera OA(3) Fusion Flink V2X ]
## [ 0 1 2 3 4 5 ]
## [ Total(V2X - Camera), OA(OA-Camera), Fusion(Fusion-OA), Flink(Flink - Fusion), V2X(V2X - Flink) Fusion-CAM ]
delays = [[], [], [], [], [], [], [], []]
for i in range(len(stamps[0])):
if appendTimestamps(delays[0], stamps[0][i], stamps[4][i], False): # total
appendTimestamps(delays[1], stamps[0][i], stamps[1][i * 4], True) # OA
appendTimestamps(delays[2], stamps[1][i * 4], stamps[2][i], True) # Fusion
appendTimestamps(delays[3], stamps[2][i], stamps[3][i], True) # Flink
appendTimestamps(delays[4], stamps[3][i], stamps[4][i], True) # V2x
appendTimestamps(delays[5], stamps[0][i], stamps[2][i], True) # Fusion - Cam
print("===length: ", len(delays[0]),len(delays[1]),len(delays[2]),len(delays[3]),len(delays[4]))
delayavg = [0,0,0,0,0,0]
if len(delays[0]) == 0:
print("empty delay array")
quit()
for i in range(len(delays[0])):
delayavg[0] = delayavg[0] + delays[0][i]
delayavg[1] = delayavg[1] + delays[1][i]
delayavg[2] = delayavg[2] + delays[2][i]
delayavg[3] = delayavg[3] + delays[3][i]
delayavg[4] = delayavg[4] + delays[4][i]
delayavg[5] = delayavg[5] + delays[5][i]
for i in range(6):
delayavg[i] = delayavg[i] / len(delays[0])
print("===AVG(Total, OA, Fusion, Flink, V2X): ", delayavg)
frameIntervals = []
for i in range(len(stamps[0]) - 1):
tmp = stamps[0][i + 1] - stamps[0][i]
if tmp < 1000:
frameIntervals.append(stamps[0][i + 1] - stamps[0][i])
## plot
plt.figure()
#plt.plot(delays[0])
#plt.plot(delays[1])
#plt.plot(delays[2])
#plt.plot(delays[3])
plt.plot(delays[4])
#plt.plot(delays[5])
plt.legend(["Total", "OA", "Fusion", "Flink", "V2X", "OA+Fusion"])
plt.show()
'''
## interval
plt.plot(frameIntervals)
plt.show()
'''
print("done!")
|
yejingfu/samples
|
tensorflow/pyplot03.py
|
Python
|
mit
| 3,650
|