source_code
stringlengths 45
7.24k
| function_name
stringlengths 1
111
| messages
list |
|---|---|---|
def x(self, status=201):
self.response = self.get_response_handler()
self.response.process(self.obj)
return self._response(self.response.get_response_data(), status=status)
|
create_response
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, status=201):\n self.response = self.get_response_handler()\n self.response.process(self.obj)\n return self._response(self.response.get_response_data(), status=status)\n",
"role": "user"
},
{
"content": "create_response",
"role": "assistant"
}
] |
def x(self):
self.request = self.get_request_handler()
self.obj = self.request.process().data
self.save_object(self.obj)
return self.create_response()
|
handle_post_request
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self.request = self.get_request_handler()\n self.obj = self.request.process().data\n self.save_object(self.obj)\n return self.create_response()\n",
"role": "user"
},
{
"content": "handle_post_request",
"role": "assistant"
}
] |
def x(self):
if not getattr(self, '_obj', None):
self._obj = self.get_object()
if self._obj is None and not self.allow_none:
self.return_error(404)
return self._obj
|
obj
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if not getattr(self, '_obj', None):\n self._obj = self.get_object()\n if self._obj is None and not self.allow_none:\n self.return_error(404)\n return self._obj\n",
"role": "user"
},
{
"content": "obj",
"role": "assistant"
}
] |
def x(alphabet):
return fsm(alphabet=alphabet, states={0}, initial=0, finals=set(), map=
{(0): dict([(symbol, 0) for symbol in alphabet])})
|
null
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(alphabet):\n return fsm(alphabet=alphabet, states={0}, initial=0, finals=set(), map=\n {(0): dict([(symbol, 0) for symbol in alphabet])})\n",
"role": "user"
},
{
"content": "null",
"role": "assistant"
}
] |
def x(fsms, test):
alphabet = set().union(*[fsm.alphabet for fsm in fsms])
initial = dict([(i, fsm.initial) for i, fsm in enumerate(fsms)])
def x(current, symbol):
next = {}
for i in range(len(fsms)):
if symbol not in fsms[i].alphabet and anything_else in fsms[i
].alphabet:
actual_symbol = anything_else
else:
actual_symbol = symbol
if i in current and current[i] in fsms[i
].map and actual_symbol in fsms[i].map[current[i]]:
next[i] = fsms[i].map[current[i]][actual_symbol]
if len(next.keys()) == 0:
raise OblivionError
return next
def x(state):
accepts = [(i in state and state[i] in fsm.finals) for i, fsm in
enumerate(fsms)]
return test(accepts)
return crawl(alphabet, initial, final, follow).reduce()
|
parallel
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(fsms, test):\n alphabet = set().union(*[fsm.alphabet for fsm in fsms])\n initial = dict([(i, fsm.initial) for i, fsm in enumerate(fsms)])\n\n def x(current, symbol):\n next = {}\n for i in range(len(fsms)):\n if symbol not in fsms[i].alphabet and anything_else in fsms[i\n ].alphabet:\n actual_symbol = anything_else\n else:\n actual_symbol = symbol\n if i in current and current[i] in fsms[i\n ].map and actual_symbol in fsms[i].map[current[i]]:\n next[i] = fsms[i].map[current[i]][actual_symbol]\n if len(next.keys()) == 0:\n raise OblivionError\n return next\n\n def x(state):\n accepts = [(i in state and state[i] in fsm.finals) for i, fsm in\n enumerate(fsms)]\n return test(accepts)\n return crawl(alphabet, initial, final, follow).reduce()\n",
"role": "user"
},
{
"content": "parallel",
"role": "assistant"
}
] |
def x(alphabet, initial, final, follow):
states = [initial]
finals = set()
map = {}
i = 0
while i < len(states):
state = states[i]
if final(state):
finals.add(i)
map[i] = {}
for symbol in sorted(alphabet, key=key):
try:
next = follow(state, symbol)
try:
j = states.index(next)
except ValueError:
j = len(states)
states.append(next)
except OblivionError:
continue
map[i][symbol] = j
i += 1
return fsm(alphabet=alphabet, states=range(len(states)), initial=0,
finals=finals, map=map)
|
crawl
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(alphabet, initial, final, follow):\n states = [initial]\n finals = set()\n map = {}\n i = 0\n while i < len(states):\n state = states[i]\n if final(state):\n finals.add(i)\n map[i] = {}\n for symbol in sorted(alphabet, key=key):\n try:\n next = follow(state, symbol)\n try:\n j = states.index(next)\n except ValueError:\n j = len(states)\n states.append(next)\n except OblivionError:\n continue\n map[i][symbol] = j\n i += 1\n return fsm(alphabet=alphabet, states=range(len(states)), initial=0,\n finals=finals, map=map)\n",
"role": "user"
},
{
"content": "crawl",
"role": "assistant"
}
] |
def x(self, input):
state = self.initial
for symbol in input:
if anything_else in self.alphabet and not symbol in self.alphabet:
symbol = anything_else
if not (state in self.map and symbol in self.map[state]):
return False
state = self.map[state][symbol]
return state in self.finals
|
accepts
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, input):\n state = self.initial\n for symbol in input:\n if anything_else in self.alphabet and not symbol in self.alphabet:\n symbol = anything_else\n if not (state in self.map and symbol in self.map[state]):\n return False\n state = self.map[state][symbol]\n return state in self.finals\n",
"role": "user"
},
{
"content": "accepts",
"role": "assistant"
}
] |
def x(*fsms):
alphabet = set().union(*[fsm.alphabet for fsm in fsms])
def x(i, substate):
result = {(i, substate)}
while i < len(fsms) - 1 and substate in fsms[i].finals:
i += 1
substate = fsms[i].initial
result.add((i, substate))
return result
initial = set()
if len(fsms) > 0:
initial.update(connect_all(0, fsms[0].initial))
initial = frozenset(initial)
def x(state):
for i, substate in state:
if i == len(fsms) - 1 and substate in fsms[i].finals:
return True
return False
def x(current, symbol):
next = set()
for i, substate in current:
fsm = fsms[i]
if substate in fsm.map and symbol in fsm.map[substate]:
next.update(connect_all(i, fsm.map[substate][symbol]))
if len(next) == 0:
raise OblivionError
return frozenset(next)
return crawl(alphabet, initial, final, follow).reduce()
|
concatenate
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(*fsms):\n alphabet = set().union(*[fsm.alphabet for fsm in fsms])\n\n def x(i, substate):\n result = {(i, substate)}\n while i < len(fsms) - 1 and substate in fsms[i].finals:\n i += 1\n substate = fsms[i].initial\n result.add((i, substate))\n return result\n initial = set()\n if len(fsms) > 0:\n initial.update(connect_all(0, fsms[0].initial))\n initial = frozenset(initial)\n\n def x(state):\n for i, substate in state:\n if i == len(fsms) - 1 and substate in fsms[i].finals:\n return True\n return False\n\n def x(current, symbol):\n next = set()\n for i, substate in current:\n fsm = fsms[i]\n if substate in fsm.map and symbol in fsm.map[substate]:\n next.update(connect_all(i, fsm.map[substate][symbol]))\n if len(next) == 0:\n raise OblivionError\n return frozenset(next)\n return crawl(alphabet, initial, final, follow).reduce()\n",
"role": "user"
},
{
"content": "concatenate",
"role": "assistant"
}
] |
def x(self):
alphabet = self.alphabet
initial = {self.initial}
def x(state, symbol):
next = set()
for substate in state:
if substate in self.map and symbol in self.map[substate]:
next.add(self.map[substate][symbol])
if (substate in self.finals and self.initial in self.map and
symbol in self.map[self.initial]):
next.add(self.map[self.initial][symbol])
if len(next) == 0:
raise OblivionError
return frozenset(next)
def x(state):
return any(substate in self.finals for substate in state)
return crawl(alphabet, initial, final, follow) | epsilon(alphabet)
|
star
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n alphabet = self.alphabet\n initial = {self.initial}\n\n def x(state, symbol):\n next = set()\n for substate in state:\n if substate in self.map and symbol in self.map[substate]:\n next.add(self.map[substate][symbol])\n if (substate in self.finals and self.initial in self.map and \n symbol in self.map[self.initial]):\n next.add(self.map[self.initial][symbol])\n if len(next) == 0:\n raise OblivionError\n return frozenset(next)\n\n def x(state):\n return any(substate in self.finals for substate in state)\n return crawl(alphabet, initial, final, follow) | epsilon(alphabet)\n",
"role": "user"
},
{
"content": "star",
"role": "assistant"
}
] |
def x(self, multiplier):
if multiplier < 0:
raise Exception("Can't multiply an FSM by " + repr(multiplier))
alphabet = self.alphabet
initial = {(self.initial, 0)}
def x(state):
for substate, iteration in state:
if substate == self.initial and (self.initial in self.finals or
iteration == multiplier):
return True
return False
def x(current, symbol):
next = []
for substate, iteration in current:
if (iteration < multiplier and substate in self.map and symbol in
self.map[substate]):
next.append((self.map[substate][symbol], iteration))
if self.map[substate][symbol] in self.finals:
next.append((self.initial, iteration + 1))
if len(next) == 0:
raise OblivionError
return frozenset(next)
return crawl(alphabet, initial, final, follow).reduce()
|
times
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, multiplier):\n if multiplier < 0:\n raise Exception(\"Can't multiply an FSM by \" + repr(multiplier))\n alphabet = self.alphabet\n initial = {(self.initial, 0)}\n\n def x(state):\n for substate, iteration in state:\n if substate == self.initial and (self.initial in self.finals or\n iteration == multiplier):\n return True\n return False\n\n def x(current, symbol):\n next = []\n for substate, iteration in current:\n if (iteration < multiplier and substate in self.map and symbol in\n self.map[substate]):\n next.append((self.map[substate][symbol], iteration))\n if self.map[substate][symbol] in self.finals:\n next.append((self.initial, iteration + 1))\n if len(next) == 0:\n raise OblivionError\n return frozenset(next)\n return crawl(alphabet, initial, final, follow).reduce()\n",
"role": "user"
},
{
"content": "times",
"role": "assistant"
}
] |
def x(self):
alphabet = self.alphabet
initial = {(0): self.initial}
def x(current, symbol):
next = {}
if 0 in current and current[0] in self.map and symbol in self.map[
current[0]]:
next[0] = self.map[current[0]][symbol]
return next
def x(state):
return not (0 in state and state[0] in self.finals)
return crawl(alphabet, initial, final, follow).reduce()
|
everythingbut
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n alphabet = self.alphabet\n initial = {(0): self.initial}\n\n def x(current, symbol):\n next = {}\n if 0 in current and current[0] in self.map and symbol in self.map[\n current[0]]:\n next[0] = self.map[current[0]][symbol]\n return next\n\n def x(state):\n return not (0 in state and state[0] in self.finals)\n return crawl(alphabet, initial, final, follow).reduce()\n",
"role": "user"
},
{
"content": "everythingbut",
"role": "assistant"
}
] |
def x(self):
alphabet = self.alphabet
initial = frozenset(self.finals)
def x(current, symbol):
next = frozenset([prev for prev in self.map for state in current if
symbol in self.map[prev] and self.map[prev][symbol] == state])
if len(next) == 0:
raise OblivionError
return next
def x(state):
return self.initial in state
return crawl(alphabet, initial, final, follow)
|
reversed
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n alphabet = self.alphabet\n initial = frozenset(self.finals)\n\n def x(current, symbol):\n next = frozenset([prev for prev in self.map for state in current if\n symbol in self.map[prev] and self.map[prev][symbol] == state])\n if len(next) == 0:\n raise OblivionError\n return next\n\n def x(state):\n return self.initial in state\n return crawl(alphabet, initial, final, follow)\n",
"role": "user"
},
{
"content": "reversed",
"role": "assistant"
}
] |
def x(self, state):
reachable = [state]
i = 0
while i < len(reachable):
current = reachable[i]
if current in self.finals:
return True
if current in self.map:
for symbol in self.map[current]:
next = self.map[current][symbol]
if next not in reachable:
reachable.append(next)
i += 1
return False
|
islive
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, state):\n reachable = [state]\n i = 0\n while i < len(reachable):\n current = reachable[i]\n if current in self.finals:\n return True\n if current in self.map:\n for symbol in self.map[current]:\n next = self.map[current][symbol]\n if next not in reachable:\n reachable.append(next)\n i += 1\n return False\n",
"role": "user"
},
{
"content": "islive",
"role": "assistant"
}
] |
def x(self):
livestates = set(state for state in self.states if self.islive(state))
strings = []
cstate = self.initial
cstring = []
if cstate in livestates:
if cstate in self.finals:
yield cstring
strings.append((cstring, cstate))
i = 0
while i < len(strings):
cstring, cstate = strings[i]
if cstate in self.map:
for symbol in sorted(self.map[cstate], key=key):
nstate = self.map[cstate][symbol]
nstring = cstring + [symbol]
if nstate in livestates:
if nstate in self.finals:
yield nstring
strings.append((nstring, nstate))
i += 1
|
strings
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n livestates = set(state for state in self.states if self.islive(state))\n strings = []\n cstate = self.initial\n cstring = []\n if cstate in livestates:\n if cstate in self.finals:\n yield cstring\n strings.append((cstring, cstate))\n i = 0\n while i < len(strings):\n cstring, cstate = strings[i]\n if cstate in self.map:\n for symbol in sorted(self.map[cstate], key=key):\n nstate = self.map[cstate][symbol]\n nstring = cstring + [symbol]\n if nstate in livestates:\n if nstate in self.finals:\n yield nstring\n strings.append((nstring, nstate))\n i += 1\n",
"role": "user"
},
{
"content": "strings",
"role": "assistant"
}
] |
def x(self):
num_strings = {}
def x(state):
if self.islive(state):
if state in num_strings:
if num_strings[state] is None:
raise OverflowError(state)
return num_strings[state]
num_strings[state] = None
n = 0
if state in self.finals:
n += 1
if state in self.map:
for symbol in self.map[state]:
n += get_num_strings(self.map[state][symbol])
num_strings[state] = n
else:
num_strings[state] = 0
return num_strings[state]
return get_num_strings(self.initial)
|
cardinality
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n num_strings = {}\n\n def x(state):\n if self.islive(state):\n if state in num_strings:\n if num_strings[state] is None:\n raise OverflowError(state)\n return num_strings[state]\n num_strings[state] = None\n n = 0\n if state in self.finals:\n n += 1\n if state in self.map:\n for symbol in self.map[state]:\n n += get_num_strings(self.map[state][symbol])\n num_strings[state] = n\n else:\n num_strings[state] = 0\n return num_strings[state]\n return get_num_strings(self.initial)\n",
"role": "user"
},
{
"content": "cardinality",
"role": "assistant"
}
] |
def x(self):
return fsm(alphabet=self.alphabet, states=self.states, initial=self.
initial, finals=self.finals, map=self.map)
|
copy
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n return fsm(alphabet=self.alphabet, states=self.states, initial=self.\n initial, finals=self.finals, map=self.map)\n",
"role": "user"
},
{
"content": "copy",
"role": "assistant"
}
] |
def x(self, input):
try:
state = self.initial
for symbol in input:
if not symbol in self.alphabet:
if not anything_else in self.alphabet:
raise KeyError(symbol)
symbol = anything_else
if not (state in self.map and symbol in self.map[state]):
raise OblivionError
state = self.map[state][symbol]
return fsm(alphabet=self.alphabet, states=self.states, initial=
state, finals=self.finals, map=self.map)
except OblivionError:
return null(self.alphabet)
|
derive
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, input):\n try:\n state = self.initial\n for symbol in input:\n if not symbol in self.alphabet:\n if not anything_else in self.alphabet:\n raise KeyError(symbol)\n symbol = anything_else\n if not (state in self.map and symbol in self.map[state]):\n raise OblivionError\n state = self.map[state][symbol]\n return fsm(alphabet=self.alphabet, states=self.states, initial=\n state, finals=self.finals, map=self.map)\n except OblivionError:\n return null(self.alphabet)\n",
"role": "user"
},
{
"content": "derive",
"role": "assistant"
}
] |
def x(self, endpoint):
if self.url_prefix:
url = '{prefix}{url}'.format(prefix=self.url_prefix, url=endpoint.url)
else:
url = endpoint.url
self.add_url_rule(url, view_func=endpoint.as_view(endpoint.get_name()))
|
add_endpoint
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, endpoint):\n if self.url_prefix:\n url = '{prefix}{url}'.format(prefix=self.url_prefix, url=endpoint.url)\n else:\n url = endpoint.url\n self.add_url_rule(url, view_func=endpoint.as_view(endpoint.get_name()))\n",
"role": "user"
},
{
"content": "add_endpoint",
"role": "assistant"
}
] |
def x(method):
def x(self, *args, **kwargs):
result = method(self, *args, **kwargs)
if result == self:
return result
return result.reduce()
return new_method
|
reduce_after
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(method):\n\n def x(self, *args, **kwargs):\n result = method(self, *args, **kwargs)\n if result == self:\n return result\n return result.reduce()\n return new_method\n",
"role": "user"
},
{
"content": "reduce_after",
"role": "assistant"
}
] |
def x(method):
fsm_method = getattr(fsm.fsm, method.__name__)
def x(*legos):
alphabet = set().union(*[lego.alphabet() for lego in legos])
return from_fsm(fsm_method(*[lego.to_fsm(alphabet) for lego in legos]))
return new_method
|
call_fsm
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(method):\n fsm_method = getattr(fsm.fsm, method.__name__)\n\n def x(*legos):\n alphabet = set().union(*[lego.alphabet() for lego in legos])\n return from_fsm(fsm_method(*[lego.to_fsm(alphabet) for lego in legos]))\n return new_method\n",
"role": "user"
},
{
"content": "call_fsm",
"role": "assistant"
}
] |
def x(f):
for symbol in f.alphabet:
if symbol == fsm.anything_else:
continue
if isinstance(symbol, str) and len(symbol) == 1:
continue
raise Exception('Symbol ' + repr(symbol) +
' cannot be used in a regular expression')
outside = object()
states = [f.initial]
i = 0
while i < len(states):
current = states[i]
if current in f.map:
for symbol in sorted(f.map[current], key=fsm.key):
next = f.map[current][symbol]
if next not in states:
states.append(next)
i += 1
brz = {}
for a in f.states:
brz[a] = {}
for b in (f.states | {outside}):
brz[a][b] = nothing
for a in f.map:
for symbol in f.map[a]:
b = f.map[a][symbol]
if symbol == fsm.anything_else:
brz[a][b] |= ~charclass(f.alphabet - {fsm.anything_else})
else:
brz[a][b] |= charclass({symbol})
if a in f.finals:
brz[a][outside] |= emptystring
for i in reversed(range(len(states))):
a = states[i]
loop = brz[a][a] * star
del brz[a][a]
for right in brz[a]:
brz[a][right] = loop + brz[a][right]
for j in range(i):
b = states[j]
univ = brz[b][a]
del brz[b][a]
for right in brz[a]:
brz[b][right] |= univ + brz[a][right]
return brz[f.initial][outside].reduce()
|
from_fsm
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(f):\n for symbol in f.alphabet:\n if symbol == fsm.anything_else:\n continue\n if isinstance(symbol, str) and len(symbol) == 1:\n continue\n raise Exception('Symbol ' + repr(symbol) +\n ' cannot be used in a regular expression')\n outside = object()\n states = [f.initial]\n i = 0\n while i < len(states):\n current = states[i]\n if current in f.map:\n for symbol in sorted(f.map[current], key=fsm.key):\n next = f.map[current][symbol]\n if next not in states:\n states.append(next)\n i += 1\n brz = {}\n for a in f.states:\n brz[a] = {}\n for b in (f.states | {outside}):\n brz[a][b] = nothing\n for a in f.map:\n for symbol in f.map[a]:\n b = f.map[a][symbol]\n if symbol == fsm.anything_else:\n brz[a][b] |= ~charclass(f.alphabet - {fsm.anything_else})\n else:\n brz[a][b] |= charclass({symbol})\n if a in f.finals:\n brz[a][outside] |= emptystring\n for i in reversed(range(len(states))):\n a = states[i]\n loop = brz[a][a] * star\n del brz[a][a]\n for right in brz[a]:\n brz[a][right] = loop + brz[a][right]\n for j in range(i):\n b = states[j]\n univ = brz[b][a]\n del brz[b][a]\n for right in brz[a]:\n brz[b][right] |= univ + brz[a][right]\n return brz[f.initial][outside].reduce()\n",
"role": "user"
},
{
"content": "from_fsm",
"role": "assistant"
}
] |
def x(cls, string):
obj, i = cls.match(string, 0)
if i != len(string):
raise Exception("Could not parse '" + string + "' beyond index " +
str(i))
return obj
|
parse
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(cls, string):\n obj, i = cls.match(string, 0)\n if i != len(string):\n raise Exception(\"Could not parse '\" + string + \"' beyond index \" +\n str(i))\n return obj\n",
"role": "user"
},
{
"content": "parse",
"role": "assistant"
}
] |
def x(self, otherchar=None):
for string in self.to_fsm().strings():
if fsm.anything_else in string:
if otherchar == None:
raise Exception("Please choose an 'otherchar'")
string = [(otherchar if char == fsm.anything_else else char) for
char in string]
yield ''.join(string)
|
strings
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, otherchar=None):\n for string in self.to_fsm().strings():\n if fsm.anything_else in string:\n if otherchar == None:\n raise Exception(\"Please choose an 'otherchar'\")\n string = [(otherchar if char == fsm.anything_else else char) for\n char in string]\n yield ''.join(string)\n",
"role": "user"
},
{
"content": "strings",
"role": "assistant"
}
] |
def x(self, other):
return other.optional == bound(0
) or self.optional * other.mandatory + bound(1) >= self.mandatory
|
canmultiplyby
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, other):\n return other.optional == bound(0\n ) or self.optional * other.mandatory + bound(1) >= self.mandatory\n",
"role": "user"
},
{
"content": "canmultiplyby",
"role": "assistant"
}
] |
def x(self, other):
return not (self.max < other.min or other.max < self.min)
|
canintersect
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, other):\n return not (self.max < other.min or other.max < self.min)\n",
"role": "user"
},
{
"content": "canintersect",
"role": "assistant"
}
] |
def x(self, other):
return not (self.max + bound(1) < other.min or other.max + bound(1) <
self.min)
|
canunion
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, other):\n return not (self.max + bound(1) < other.min or other.max + bound(1) <\n self.min)\n",
"role": "user"
},
{
"content": "canunion",
"role": "assistant"
}
] |
def x(self, other):
mandatory = min(self.mandatory, other.mandatory)
optional = min(self.optional, other.optional)
return multiplier(mandatory, mandatory + optional)
|
common
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, other):\n mandatory = min(self.mandatory, other.mandatory)\n optional = min(self.optional, other.optional)\n return multiplier(mandatory, mandatory + optional)\n",
"role": "user"
},
{
"content": "common",
"role": "assistant"
}
] |
def x(self, other):
if other.multiplicand != self.multiplicand:
raise Exception("Can't subtract " + repr(other) + ' from ' + repr(self)
)
return mult(self.multiplicand, self.multiplier - other.multiplier)
|
dock
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, other):\n if other.multiplicand != self.multiplicand:\n raise Exception(\"Can't subtract \" + repr(other) + ' from ' + repr(self)\n )\n return mult(self.multiplicand, self.multiplier - other.multiplier)\n",
"role": "user"
},
{
"content": "dock",
"role": "assistant"
}
] |
def x(self, other):
if self.multiplicand == other.multiplicand:
return mult(self.multiplicand, self.multiplier.common(other.multiplier)
)
return mult(nothing, zero)
|
common
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, other):\n if self.multiplicand == other.multiplicand:\n return mult(self.multiplicand, self.multiplier.common(other.multiplier)\n )\n return mult(nothing, zero)\n",
"role": "user"
},
{
"content": "common",
"role": "assistant"
}
] |
def x(self, other, suffix=False):
mults = []
indices = range(min(len(self.mults), len(other.mults)))
if suffix:
indices = [(-i - 1) for i in indices]
for i in indices:
common = self.mults[i].common(other.mults[i])
if common.multiplier == zero:
break
mults.append(common)
if common != self.mults[i] or common != other.mults[i]:
break
if suffix:
mults = reversed(mults)
return conc(*mults)
|
common
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, other, suffix=False):\n mults = []\n indices = range(min(len(self.mults), len(other.mults)))\n if suffix:\n indices = [(-i - 1) for i in indices]\n for i in indices:\n common = self.mults[i].common(other.mults[i])\n if common.multiplier == zero:\n break\n mults.append(common)\n if common != self.mults[i] or common != other.mults[i]:\n break\n if suffix:\n mults = reversed(mults)\n return conc(*mults)\n",
"role": "user"
},
{
"content": "common",
"role": "assistant"
}
] |
def x(self, other):
new = list(self.mults)
for i in reversed(range(len(other.mults))):
j = len(self.mults) - len(other.mults) + i
new[j] = new[j].dock(other.mults[i])
if new[j].multiplier == zero:
del new[j]
elif i != 0:
raise Exception("Can't subtract " + repr(other) + ' from ' +
repr(self))
return conc(*new)
|
dock
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, other):\n new = list(self.mults)\n for i in reversed(range(len(other.mults))):\n j = len(self.mults) - len(other.mults) + i\n new[j] = new[j].dock(other.mults[i])\n if new[j].multiplier == zero:\n del new[j]\n elif i != 0:\n raise Exception(\"Can't subtract \" + repr(other) + ' from ' +\n repr(self))\n return conc(*new)\n",
"role": "user"
},
{
"content": "dock",
"role": "assistant"
}
] |
def x(self, other):
return pattern(*[c.dock(other) for c in self.concs])
|
dock
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, other):\n return pattern(*[c.dock(other) for c in self.concs])\n",
"role": "user"
},
{
"content": "dock",
"role": "assistant"
}
] |
def x(self, other):
return pattern(*[c.behead(other) for c in self.concs])
|
behead
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, other):\n return pattern(*[c.behead(other) for c in self.concs])\n",
"role": "user"
},
{
"content": "behead",
"role": "assistant"
}
] |
def x(self, suffix=False):
if len(self.concs) == 0:
raise Exception("Can't call _commonconc on " + repr(self))
from functools import reduce
return reduce(lambda x, y: x.common(y, suffix=suffix), self.concs)
|
commonconc
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, suffix=False):\n if len(self.concs) == 0:\n raise Exception(\"Can't call _commonconc on \" + repr(self))\n from functools import reduce\n return reduce(lambda x, y: x.common(y, suffix=suffix), self.concs)\n",
"role": "user"
},
{
"content": "commonconc",
"role": "assistant"
}
] |
def x(self, app):
for other in app.plugins:
if not isinstance(other, SQLAlchemyPlugin):
continue
if other.keyword == self.keyword:
raise bottle.PluginError(
'Found another SQLAlchemy plugin with conflicting settings (non-unique keyword).'
)
elif other.name == self.name:
self.name += '_%s' % self.keyword
if self.create and not self.metadata:
raise bottle.PluginError('Define metadata value to create database.')
|
setup
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, app):\n for other in app.plugins:\n if not isinstance(other, SQLAlchemyPlugin):\n continue\n if other.keyword == self.keyword:\n raise bottle.PluginError(\n 'Found another SQLAlchemy plugin with conflicting settings (non-unique keyword).'\n )\n elif other.name == self.name:\n self.name += '_%s' % self.keyword\n if self.create and not self.metadata:\n raise bottle.PluginError('Define metadata value to create database.')\n",
"role": "user"
},
{
"content": "setup",
"role": "assistant"
}
] |
def x(self, *args, **kwargs):
self.__in_send_multipart = True
try:
msg = super(GreenSocket, self).send_multipart(*args, **kwargs)
finally:
self.__in_send_multipart = False
self.__state_changed()
return msg
|
send_multipart
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, *args, **kwargs):\n self.__in_send_multipart = True\n try:\n msg = super(GreenSocket, self).send_multipart(*args, **kwargs)\n finally:\n self.__in_send_multipart = False\n self.__state_changed()\n return msg\n",
"role": "user"
},
{
"content": "send_multipart",
"role": "assistant"
}
] |
def x(self, *args, **kwargs):
self.__in_recv_multipart = True
try:
msg = super(GreenSocket, self).recv_multipart(*args, **kwargs)
finally:
self.__in_recv_multipart = False
self.__state_changed()
return msg
|
recv_multipart
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, *args, **kwargs):\n self.__in_recv_multipart = True\n try:\n msg = super(GreenSocket, self).recv_multipart(*args, **kwargs)\n finally:\n self.__in_recv_multipart = False\n self.__state_changed()\n return msg\n",
"role": "user"
},
{
"content": "recv_multipart",
"role": "assistant"
}
] |
def x(path, include_core=True, subfolders=None, path_in_arc=None):
def x(varStr):
return re.sub('\\W|^(?=\\d)', '_', str(varStr))
path = Path(path)
if zipfile.is_zipfile(str(path)):
with zipfile.ZipFile(file=str(path), mode='r') as zz:
zipcontent = zz.namelist()
if path_in_arc:
path_in_arc = str(path_in_arc)
if path_in_arc not in zipcontent:
path_in_arc = os.path.join(path_in_arc, DEFAULT_FILE_NAMES[
'filepara'])
if path_in_arc not in zipcontent:
raise ReadError(
'File parameter file {} not found in {}. Tip: specify fileparameter filename through "path_in_arc" if different from default.'
.format(DEFAULT_FILE_NAMES['filepara'], path))
else:
with zipfile.ZipFile(file=str(path), mode='r') as zz:
fpfiles = [f for f in zz.namelist() if os.path.basename(f) ==
DEFAULT_FILE_NAMES['filepara'] and json.loads(zz.read(f
).decode('utf-8'))['systemtype'] == 'IOSystem']
if len(fpfiles) == 0:
raise ReadError(
'File parameter file {} not found in {}. Tip: specify fileparameter filename through "path_in_arc" if different from default.'
.format(DEFAULT_FILE_NAMES['filepara'], path))
elif len(fpfiles) > 1:
raise ReadError(
'Mulitple mrio archives found in {}. Specify one by the parameter "path_in_arc"'
.format(path))
else:
path_in_arc = os.path.dirname(fpfiles[0])
logging.debug('Expect file parameter-file at {} in {}'.format(
path_in_arc, path))
io = load(path, include_core=include_core, path_in_arc=path_in_arc)
if zipfile.is_zipfile(str(path)):
root_in_zip = os.path.dirname(path_in_arc)
if subfolders is None:
subfolders = {os.path.relpath(os.path.dirname(p), root_in_zip) for
p in zipcontent if p.startswith(root_in_zip) and os.path.
dirname(p) != root_in_zip}
for subfolder_name in subfolders:
if subfolder_name not in zipcontent + list({os.path.dirname(p) for
p in zipcontent}):
subfolder_full = os.path.join(root_in_zip, subfolder_name)
else:
subfolder_full = subfolder_name
subfolder_name = os.path.basename(os.path.normpath(subfolder_name))
if subfolder_name not in zipcontent:
subfolder_full_meta = os.path.join(subfolder_full,
DEFAULT_FILE_NAMES['filepara'])
else:
subfolder_full_meta = subfolder_full
if subfolder_full_meta in zipcontent:
ext = load(path, include_core=include_core, path_in_arc=
subfolder_full_meta)
setattr(io, clean(subfolder_name), ext)
io.meta._add_fileio('Added satellite account from {}'.
format(subfolder_full))
else:
continue
else:
if subfolders is None:
subfolders = [d for d in path.iterdir() if d.is_dir()]
for subfolder_name in subfolders:
if not os.path.exists(str(subfolder_name)):
subfolder_full = path / subfolder_name
else:
subfolder_full = subfolder_name
subfolder_name = os.path.basename(os.path.normpath(subfolder_name))
if not os.path.isfile(str(subfolder_full)):
subfolder_full_meta = subfolder_full / DEFAULT_FILE_NAMES[
'filepara']
else:
subfolder_full_meta = subfolder_full
if subfolder_full_meta.exists():
ext = load(subfolder_full, include_core=include_core)
setattr(io, clean(subfolder_name), ext)
io.meta._add_fileio('Added satellite account from {}'.
format(subfolder_full))
else:
continue
return io
|
load_all
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(path, include_core=True, subfolders=None, path_in_arc=None):\n\n def x(varStr):\n return re.sub('\\\\W|^(?=\\\\d)', '_', str(varStr))\n path = Path(path)\n if zipfile.is_zipfile(str(path)):\n with zipfile.ZipFile(file=str(path), mode='r') as zz:\n zipcontent = zz.namelist()\n if path_in_arc:\n path_in_arc = str(path_in_arc)\n if path_in_arc not in zipcontent:\n path_in_arc = os.path.join(path_in_arc, DEFAULT_FILE_NAMES[\n 'filepara'])\n if path_in_arc not in zipcontent:\n raise ReadError(\n 'File parameter file {} not found in {}. Tip: specify fileparameter filename through \"path_in_arc\" if different from default.'\n .format(DEFAULT_FILE_NAMES['filepara'], path))\n else:\n with zipfile.ZipFile(file=str(path), mode='r') as zz:\n fpfiles = [f for f in zz.namelist() if os.path.basename(f) ==\n DEFAULT_FILE_NAMES['filepara'] and json.loads(zz.read(f\n ).decode('utf-8'))['systemtype'] == 'IOSystem']\n if len(fpfiles) == 0:\n raise ReadError(\n 'File parameter file {} not found in {}. Tip: specify fileparameter filename through \"path_in_arc\" if different from default.'\n .format(DEFAULT_FILE_NAMES['filepara'], path))\n elif len(fpfiles) > 1:\n raise ReadError(\n 'Mulitple mrio archives found in {}. Specify one by the parameter \"path_in_arc\"'\n .format(path))\n else:\n path_in_arc = os.path.dirname(fpfiles[0])\n logging.debug('Expect file parameter-file at {} in {}'.format(\n path_in_arc, path))\n io = load(path, include_core=include_core, path_in_arc=path_in_arc)\n if zipfile.is_zipfile(str(path)):\n root_in_zip = os.path.dirname(path_in_arc)\n if subfolders is None:\n subfolders = {os.path.relpath(os.path.dirname(p), root_in_zip) for\n p in zipcontent if p.startswith(root_in_zip) and os.path.\n dirname(p) != root_in_zip}\n for subfolder_name in subfolders:\n if subfolder_name not in zipcontent + list({os.path.dirname(p) for\n p in zipcontent}):\n subfolder_full = os.path.join(root_in_zip, subfolder_name)\n else:\n subfolder_full = subfolder_name\n subfolder_name = os.path.basename(os.path.normpath(subfolder_name))\n if subfolder_name not in zipcontent:\n subfolder_full_meta = os.path.join(subfolder_full,\n DEFAULT_FILE_NAMES['filepara'])\n else:\n subfolder_full_meta = subfolder_full\n if subfolder_full_meta in zipcontent:\n ext = load(path, include_core=include_core, path_in_arc=\n subfolder_full_meta)\n setattr(io, clean(subfolder_name), ext)\n io.meta._add_fileio('Added satellite account from {}'.\n format(subfolder_full))\n else:\n continue\n else:\n if subfolders is None:\n subfolders = [d for d in path.iterdir() if d.is_dir()]\n for subfolder_name in subfolders:\n if not os.path.exists(str(subfolder_name)):\n subfolder_full = path / subfolder_name\n else:\n subfolder_full = subfolder_name\n subfolder_name = os.path.basename(os.path.normpath(subfolder_name))\n if not os.path.isfile(str(subfolder_full)):\n subfolder_full_meta = subfolder_full / DEFAULT_FILE_NAMES[\n 'filepara']\n else:\n subfolder_full_meta = subfolder_full\n if subfolder_full_meta.exists():\n ext = load(subfolder_full, include_core=include_core)\n setattr(io, clean(subfolder_name), ext)\n io.meta._add_fileio('Added satellite account from {}'.\n format(subfolder_full))\n else:\n continue\n return io\n",
"role": "user"
},
{
"content": "load_all",
"role": "assistant"
}
] |
def x(path, include_core=True, path_in_arc=''):
path = Path(path)
if not path.exists():
raise ReadError('Given path does not exist')
file_para = get_file_para(path=path, path_in_arc=path_in_arc)
if file_para.content['systemtype'] == GENERIC_NAMES['iosys']:
if zipfile.is_zipfile(str(path)):
ret_system = IOSystem(meta=MRIOMetaData(location=path,
path_in_arc=os.path.join(file_para.folder,
DEFAULT_FILE_NAMES['metadata'])))
ret_system.meta._add_fileio('Loaded IO system from {} - {}'.
format(path, path_in_arc))
else:
ret_system = IOSystem(meta=MRIOMetaData(location=path /
DEFAULT_FILE_NAMES['metadata']))
ret_system.meta._add_fileio('Loaded IO system from {}'.format(path)
)
elif file_para.content['systemtype'] == GENERIC_NAMES['ext']:
ret_system = Extension(file_para.content['name'])
else:
raise ReadError('Type of system no defined in the file parameters')
return None
for key in file_para.content['files']:
if not include_core and key not in ['A', 'L', 'Z']:
continue
file_name = file_para.content['files'][key]['name']
nr_index_col = file_para.content['files'][key]['nr_index_col']
nr_header = file_para.content['files'][key]['nr_header']
_index_col = list(range(int(nr_index_col)))
_header = list(range(int(nr_header)))
_index_col = 0 if _index_col == [0] else _index_col
_header = 0 if _header == [0] else _header
if zipfile.is_zipfile(str(path)):
full_file_name = os.path.join(file_para.folder, file_name)
logging.info('Load data from {}'.format(full_file_name))
with zipfile.ZipFile(file=str(path)) as zf:
if os.path.splitext(str(full_file_name))[1
] == '.pkl' or os.path.splitext(str(full_file_name))[1
] == '.pickle':
setattr(ret_system, key, pd.read_pickle(zf.open(
full_file_name)))
else:
setattr(ret_system, key, pd.read_table(zf.open(
full_file_name), index_col=_index_col, header=_header))
else:
full_file_name = path / file_name
logging.info('Load data from {}'.format(full_file_name))
if os.path.splitext(str(full_file_name))[1
] == '.pkl' or os.path.splitext(str(full_file_name))[1
] == '.pickle':
setattr(ret_system, key, pd.read_pickle(full_file_name))
else:
setattr(ret_system, key, pd.read_table(full_file_name,
index_col=_index_col, header=_header))
return ret_system
|
load
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(path, include_core=True, path_in_arc=''):\n path = Path(path)\n if not path.exists():\n raise ReadError('Given path does not exist')\n file_para = get_file_para(path=path, path_in_arc=path_in_arc)\n if file_para.content['systemtype'] == GENERIC_NAMES['iosys']:\n if zipfile.is_zipfile(str(path)):\n ret_system = IOSystem(meta=MRIOMetaData(location=path,\n path_in_arc=os.path.join(file_para.folder,\n DEFAULT_FILE_NAMES['metadata'])))\n ret_system.meta._add_fileio('Loaded IO system from {} - {}'.\n format(path, path_in_arc))\n else:\n ret_system = IOSystem(meta=MRIOMetaData(location=path /\n DEFAULT_FILE_NAMES['metadata']))\n ret_system.meta._add_fileio('Loaded IO system from {}'.format(path)\n )\n elif file_para.content['systemtype'] == GENERIC_NAMES['ext']:\n ret_system = Extension(file_para.content['name'])\n else:\n raise ReadError('Type of system no defined in the file parameters')\n return None\n for key in file_para.content['files']:\n if not include_core and key not in ['A', 'L', 'Z']:\n continue\n file_name = file_para.content['files'][key]['name']\n nr_index_col = file_para.content['files'][key]['nr_index_col']\n nr_header = file_para.content['files'][key]['nr_header']\n _index_col = list(range(int(nr_index_col)))\n _header = list(range(int(nr_header)))\n _index_col = 0 if _index_col == [0] else _index_col\n _header = 0 if _header == [0] else _header\n if zipfile.is_zipfile(str(path)):\n full_file_name = os.path.join(file_para.folder, file_name)\n logging.info('Load data from {}'.format(full_file_name))\n with zipfile.ZipFile(file=str(path)) as zf:\n if os.path.splitext(str(full_file_name))[1\n ] == '.pkl' or os.path.splitext(str(full_file_name))[1\n ] == '.pickle':\n setattr(ret_system, key, pd.read_pickle(zf.open(\n full_file_name)))\n else:\n setattr(ret_system, key, pd.read_table(zf.open(\n full_file_name), index_col=_index_col, header=_header))\n else:\n full_file_name = path / file_name\n logging.info('Load data from {}'.format(full_file_name))\n if os.path.splitext(str(full_file_name))[1\n ] == '.pkl' or os.path.splitext(str(full_file_name))[1\n ] == '.pickle':\n setattr(ret_system, key, pd.read_pickle(full_file_name))\n else:\n setattr(ret_system, key, pd.read_table(full_file_name,\n index_col=_index_col, header=_header))\n return ret_system\n",
"role": "user"
},
{
"content": "load",
"role": "assistant"
}
] |
def x(source, archive, path_in_arc=None, remove_source=False, compression=
zipfile.ZIP_DEFLATED, compresslevel=-1):
archive = Path(archive)
if type(source) is not list:
source_root = str(source)
source_files = [f for f in Path(source).glob('**/*') if f.is_file()]
else:
source_root = os.path.commonpath([str(f) for f in source])
source_files = [Path(f) for f in source]
path_in_arc = '' if not path_in_arc else path_in_arc
arc_file_names = {str(f): os.path.join(path_in_arc, str(f.relative_to(
source_root))) for f in source_files}
if archive.exists():
with zipfile.ZipFile(file=str(archive), mode='r') as zf:
already_present = zf.namelist()
duplicates = {ff: zf for ff, zf in arc_file_names.items() if zf in
already_present}
if duplicates:
raise FileExistsError(
"""These files already exists in {arc} for path_in_arc "{pa}":
{filelist}"""
.format(pa=path_in_arc, arc=archive, filelist='\n '.join(
duplicates.values())))
if sys.version_info.major == 3 and sys.version_info.minor >= 7:
zip_open_para = dict(file=str(archive), mode='a', compression=
compression, compresslevel=compresslevel)
else:
zip_open_para = dict(file=str(archive), mode='a', compression=
compression)
with zipfile.ZipFile(**zip_open_para) as zz:
for fullpath, zippath in arc_file_names.items():
zz.write(str(fullpath), str(zippath))
if remove_source:
for f in source_files:
os.remove(str(f))
for root, dirs, files in os.walk(source_root, topdown=False):
for name in dirs:
dir_path = os.path.join(root, name)
if not os.listdir(dir_path):
os.rmdir(os.path.join(root, name))
try:
os.rmdir(source_root)
except OSError:
pass
|
archive
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(source, archive, path_in_arc=None, remove_source=False, compression=\n zipfile.ZIP_DEFLATED, compresslevel=-1):\n archive = Path(archive)\n if type(source) is not list:\n source_root = str(source)\n source_files = [f for f in Path(source).glob('**/*') if f.is_file()]\n else:\n source_root = os.path.commonpath([str(f) for f in source])\n source_files = [Path(f) for f in source]\n path_in_arc = '' if not path_in_arc else path_in_arc\n arc_file_names = {str(f): os.path.join(path_in_arc, str(f.relative_to(\n source_root))) for f in source_files}\n if archive.exists():\n with zipfile.ZipFile(file=str(archive), mode='r') as zf:\n already_present = zf.namelist()\n duplicates = {ff: zf for ff, zf in arc_file_names.items() if zf in\n already_present}\n if duplicates:\n raise FileExistsError(\n \"\"\"These files already exists in {arc} for path_in_arc \"{pa}\":\n {filelist}\"\"\"\n .format(pa=path_in_arc, arc=archive, filelist='\\n '.join(\n duplicates.values())))\n if sys.version_info.major == 3 and sys.version_info.minor >= 7:\n zip_open_para = dict(file=str(archive), mode='a', compression=\n compression, compresslevel=compresslevel)\n else:\n zip_open_para = dict(file=str(archive), mode='a', compression=\n compression)\n with zipfile.ZipFile(**zip_open_para) as zz:\n for fullpath, zippath in arc_file_names.items():\n zz.write(str(fullpath), str(zippath))\n if remove_source:\n for f in source_files:\n os.remove(str(f))\n for root, dirs, files in os.walk(source_root, topdown=False):\n for name in dirs:\n dir_path = os.path.join(root, name)\n if not os.listdir(dir_path):\n os.rmdir(os.path.join(root, name))\n try:\n os.rmdir(source_root)\n except OSError:\n pass\n",
"role": "user"
},
{
"content": "archive",
"role": "assistant"
}
] |
def x(ext_file, index_col, name, drop_compartment=True, version=None, year=
None, iosystem=None, sep=','):
ext_file = os.path.abspath(str(ext_file))
F = pd.read_table(ext_file, header=[0, 1], index_col=list(range(
index_col)), sep=sep)
F.columns.names = ['region', 'sector']
if index_col == 1:
F.index.names = ['stressor']
elif index_col == 2:
F.index.names = ['stressor', 'unit']
elif index_col == 3:
F.index.names = ['stressor', 'compartment', 'unit']
else:
F.reset_index(level=list(range(3, index_col)), drop=True, inplace=True)
F.index.names = ['stressor', 'compartment', 'unit']
unit = None
if index_col > 1:
unit = pd.DataFrame(F.iloc[:, 0].reset_index(level='unit').unit)
F.reset_index(level='unit', drop=True, inplace=True)
if drop_compartment:
F.reset_index(level='compartment', drop=True, inplace=True)
unit.reset_index(level='compartment', drop=True, inplace=True)
return Extension(name=name, F=F, unit=unit, iosystem=iosystem, version=
version, year=year)
|
parse_exio12_ext
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(ext_file, index_col, name, drop_compartment=True, version=None, year=\n None, iosystem=None, sep=','):\n ext_file = os.path.abspath(str(ext_file))\n F = pd.read_table(ext_file, header=[0, 1], index_col=list(range(\n index_col)), sep=sep)\n F.columns.names = ['region', 'sector']\n if index_col == 1:\n F.index.names = ['stressor']\n elif index_col == 2:\n F.index.names = ['stressor', 'unit']\n elif index_col == 3:\n F.index.names = ['stressor', 'compartment', 'unit']\n else:\n F.reset_index(level=list(range(3, index_col)), drop=True, inplace=True)\n F.index.names = ['stressor', 'compartment', 'unit']\n unit = None\n if index_col > 1:\n unit = pd.DataFrame(F.iloc[:, 0].reset_index(level='unit').unit)\n F.reset_index(level='unit', drop=True, inplace=True)\n if drop_compartment:\n F.reset_index(level='compartment', drop=True, inplace=True)\n unit.reset_index(level='compartment', drop=True, inplace=True)\n return Extension(name=name, F=F, unit=unit, iosystem=iosystem, version=\n version, year=year)\n",
"role": "user"
},
{
"content": "parse_exio12_ext",
"role": "assistant"
}
] |
def x(filename):
try:
ver_match = re.search('(\\d+\\w*(\\.|\\-|\\_))*\\d+\\w*', filename)
version = ver_match.string[ver_match.start():ver_match.end()]
if re.search('\\_\\d\\d\\d\\d', version[-5:]):
version = version[:-5]
except AttributeError:
version = None
return version
|
get_exiobase12_version
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(filename):\n try:\n ver_match = re.search('(\\\\d+\\\\w*(\\\\.|\\\\-|\\\\_))*\\\\d+\\\\w*', filename)\n version = ver_match.string[ver_match.start():ver_match.end()]\n if re.search('\\\\_\\\\d\\\\d\\\\d\\\\d', version[-5:]):\n version = version[:-5]\n except AttributeError:\n version = None\n return version\n",
"role": "user"
},
{
"content": "get_exiobase12_version",
"role": "assistant"
}
] |
def x(path, coefficients=True):
path = os.path.normpath(str(path))
if coefficients:
exio_core_regex = dict(A=re.compile('(?<!\\_)mrIot.*txt'), Y=re.
compile('(?<!\\_)mrFinalDemand.*txt'), S_factor_inputs=re.
compile('(?<!\\_)mrFactorInputs.*txt'), S_emissions=re.compile(
'(?<!\\_)mrEmissions.*txt'), S_materials=re.compile(
'(?<!\\_)mrMaterials.*txt'), S_resources=re.compile(
'(?<!\\_)mrResources.*txt'), FY_resources=re.compile(
'(?<!\\_)mrFDResources.*txt'), FY_emissions=re.compile(
'(?<!\\_)mrFDEmissions.*txt'), FY_materials=re.compile(
'(?<!\\_)mrFDMaterials.*txt'))
else:
exio_core_regex = dict(Z=re.compile('(?<!\\_)mrIot.*txt'), Y=re.
compile('(?<!\\_)mrFinalDemand.*txt'), F_fac=re.compile(
'(?<!\\_)mrFactorInputs.*txt'), F_emissions=re.compile(
'(?<!\\_)mrEmissions.*txt'), F_materials=re.compile(
'(?<!\\_)mrMaterials.*txt'), F_resources=re.compile(
'(?<!\\_)mrResources.*txt'), FY_emissions=re.compile(
'(?<!\\_)mrFDEmissions.*txt'), FY_materials=re.compile(
'(?<!\\_)mrFDMaterials.*txt'))
repo_content = get_repo_content(path)
exio_files = dict()
for kk, vv in exio_core_regex.items():
found_file = [vv.search(ff).string for ff in repo_content.filelist if
vv.search(ff)]
if len(found_file) > 1:
logging.warning(
'Multiple files found for {}: {} - USING THE FIRST ONE'.
format(kk, found_file))
found_file = found_file[0:1]
elif len(found_file) == 0:
continue
else:
if repo_content.iszip:
format_para = sniff_csv_format(found_file[0], zip_file=path)
else:
format_para = sniff_csv_format(os.path.join(path,
found_file[0]))
exio_files[kk] = dict(root_repo=path, file_path=found_file[0],
version=get_exiobase12_version(os.path.basename(found_file[
0])), index_rows=format_para['nr_header_row'], index_col=
format_para['nr_index_col'], unit_col=format_para[
'nr_index_col'] - 1, sep=format_para['sep'])
return exio_files
|
get_exiobase_files
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(path, coefficients=True):\n path = os.path.normpath(str(path))\n if coefficients:\n exio_core_regex = dict(A=re.compile('(?<!\\\\_)mrIot.*txt'), Y=re.\n compile('(?<!\\\\_)mrFinalDemand.*txt'), S_factor_inputs=re.\n compile('(?<!\\\\_)mrFactorInputs.*txt'), S_emissions=re.compile(\n '(?<!\\\\_)mrEmissions.*txt'), S_materials=re.compile(\n '(?<!\\\\_)mrMaterials.*txt'), S_resources=re.compile(\n '(?<!\\\\_)mrResources.*txt'), FY_resources=re.compile(\n '(?<!\\\\_)mrFDResources.*txt'), FY_emissions=re.compile(\n '(?<!\\\\_)mrFDEmissions.*txt'), FY_materials=re.compile(\n '(?<!\\\\_)mrFDMaterials.*txt'))\n else:\n exio_core_regex = dict(Z=re.compile('(?<!\\\\_)mrIot.*txt'), Y=re.\n compile('(?<!\\\\_)mrFinalDemand.*txt'), F_fac=re.compile(\n '(?<!\\\\_)mrFactorInputs.*txt'), F_emissions=re.compile(\n '(?<!\\\\_)mrEmissions.*txt'), F_materials=re.compile(\n '(?<!\\\\_)mrMaterials.*txt'), F_resources=re.compile(\n '(?<!\\\\_)mrResources.*txt'), FY_emissions=re.compile(\n '(?<!\\\\_)mrFDEmissions.*txt'), FY_materials=re.compile(\n '(?<!\\\\_)mrFDMaterials.*txt'))\n repo_content = get_repo_content(path)\n exio_files = dict()\n for kk, vv in exio_core_regex.items():\n found_file = [vv.search(ff).string for ff in repo_content.filelist if\n vv.search(ff)]\n if len(found_file) > 1:\n logging.warning(\n 'Multiple files found for {}: {} - USING THE FIRST ONE'.\n format(kk, found_file))\n found_file = found_file[0:1]\n elif len(found_file) == 0:\n continue\n else:\n if repo_content.iszip:\n format_para = sniff_csv_format(found_file[0], zip_file=path)\n else:\n format_para = sniff_csv_format(os.path.join(path,\n found_file[0]))\n exio_files[kk] = dict(root_repo=path, file_path=found_file[0],\n version=get_exiobase12_version(os.path.basename(found_file[\n 0])), index_rows=format_para['nr_header_row'], index_col=\n format_para['nr_index_col'], unit_col=format_para[\n 'nr_index_col'] - 1, sep=format_para['sep'])\n return exio_files\n",
"role": "user"
},
{
"content": "get_exiobase_files",
"role": "assistant"
}
] |
def x(exio_files, system=None):
version = ' & '.join({dd.get('version', '') for dd in exio_files.values
() if dd.get('version', '')})
meta_rec = MRIOMetaData(system=system, name='EXIOBASE', version=version)
if len(version) == 0:
meta_rec.note('No version information found, assuming exiobase 1')
meta_rec.change_meta('version', 1)
version = '1'
core_components = ['A', 'Y', 'Z']
core_data = dict()
ext_data = dict()
for tt, tpara in exio_files.items():
full_file_path = os.path.join(tpara['root_repo'], tpara['file_path'])
logging.debug('Parse {}'.format(full_file_path))
if tpara['root_repo'][-3:] == 'zip':
with zipfile.ZipFile(tpara['root_repo'], 'r') as zz:
raw_data = pd.read_table(zz.open(tpara['file_path']),
index_col=list(range(tpara['index_col'])), header=list(
range(tpara['index_rows'])))
else:
raw_data = pd.read_table(full_file_path, index_col=list(range(
tpara['index_col'])), header=list(range(tpara['index_rows'])))
meta_rec._add_fileio('EXIOBASE data {} parsed from {}'.format(tt,
full_file_path))
if tt in core_components:
core_data[tt] = raw_data
else:
ext_data[tt] = raw_data
for table in core_data:
core_data[table].index.names = ['region', 'sector', 'unit']
if table == 'A' or table == 'Z':
core_data[table].columns.names = ['region', 'sector']
_unit = pd.DataFrame(core_data[table].iloc[:, 0]).reset_index(level
='unit').unit
_unit = pd.DataFrame(_unit)
_unit.columns = ['unit']
if table == 'Y':
core_data[table].columns.names = ['region', 'category']
core_data[table].reset_index(level='unit', drop=True, inplace=True)
core_data['unit'] = _unit
mon_unit = core_data['unit'].iloc[0, 0]
if '/' in mon_unit:
mon_unit = mon_unit.split('/')[0]
core_data['unit'].unit = mon_unit
extensions = dict()
for tt, tpara in exio_files.items():
if tt in core_components:
continue
ext_name = '_'.join(tt.split('_')[1:])
table_type = tt.split('_')[0]
if tpara['index_col'] == 3:
ext_data[tt].index.names = ['stressor', 'compartment', 'unit']
elif tpara['index_col'] == 2:
ext_data[tt].index.names = ['stressor', 'unit']
else:
raise ParserError('Unknown EXIOBASE file structure')
if table_type == 'FY':
ext_data[tt].columns.names = ['region', 'category']
else:
ext_data[tt].columns.names = ['region', 'sector']
try:
_unit = pd.DataFrame(ext_data[tt].iloc[:, 0]).reset_index(level
='unit').unit
except IndexError:
_unit = pd.DataFrame(ext_data[tt].iloc[:, 0])
_unit.columns = ['unit']
_unit['unit'] = 'undef'
_unit.reset_index(level='unit', drop=True, inplace=True)
_unit = pd.DataFrame(_unit)
_unit.columns = ['unit']
_unit = pd.DataFrame(_unit)
_unit.columns = ['unit']
_new_unit = _unit.unit.str.replace('/' + mon_unit, '')
_new_unit[_new_unit == ''] = _unit.unit[_new_unit == ''].str.replace(
'/', '')
_unit.unit = _new_unit
ext_data[tt].reset_index(level='unit', drop=True, inplace=True)
ext_dict = extensions.get(ext_name, dict())
ext_dict.update({table_type: ext_data[tt], 'unit': _unit, 'name':
ext_name})
extensions.update({ext_name: ext_dict})
if version[0] == '1':
year = 2000
elif version[0] == '2':
year = 2000
elif version[0] == '3':
raise ParserError('This function can not be used to parse EXIOBASE 3')
else:
logging.warning('Unknown EXIOBASE version')
year = None
return IOSystem(version=version, price='current', year=year, meta=
meta_rec, **dict(core_data, **extensions))
|
generic_exiobase12_parser
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(exio_files, system=None):\n version = ' & '.join({dd.get('version', '') for dd in exio_files.values\n () if dd.get('version', '')})\n meta_rec = MRIOMetaData(system=system, name='EXIOBASE', version=version)\n if len(version) == 0:\n meta_rec.note('No version information found, assuming exiobase 1')\n meta_rec.change_meta('version', 1)\n version = '1'\n core_components = ['A', 'Y', 'Z']\n core_data = dict()\n ext_data = dict()\n for tt, tpara in exio_files.items():\n full_file_path = os.path.join(tpara['root_repo'], tpara['file_path'])\n logging.debug('Parse {}'.format(full_file_path))\n if tpara['root_repo'][-3:] == 'zip':\n with zipfile.ZipFile(tpara['root_repo'], 'r') as zz:\n raw_data = pd.read_table(zz.open(tpara['file_path']),\n index_col=list(range(tpara['index_col'])), header=list(\n range(tpara['index_rows'])))\n else:\n raw_data = pd.read_table(full_file_path, index_col=list(range(\n tpara['index_col'])), header=list(range(tpara['index_rows'])))\n meta_rec._add_fileio('EXIOBASE data {} parsed from {}'.format(tt,\n full_file_path))\n if tt in core_components:\n core_data[tt] = raw_data\n else:\n ext_data[tt] = raw_data\n for table in core_data:\n core_data[table].index.names = ['region', 'sector', 'unit']\n if table == 'A' or table == 'Z':\n core_data[table].columns.names = ['region', 'sector']\n _unit = pd.DataFrame(core_data[table].iloc[:, 0]).reset_index(level\n ='unit').unit\n _unit = pd.DataFrame(_unit)\n _unit.columns = ['unit']\n if table == 'Y':\n core_data[table].columns.names = ['region', 'category']\n core_data[table].reset_index(level='unit', drop=True, inplace=True)\n core_data['unit'] = _unit\n mon_unit = core_data['unit'].iloc[0, 0]\n if '/' in mon_unit:\n mon_unit = mon_unit.split('/')[0]\n core_data['unit'].unit = mon_unit\n extensions = dict()\n for tt, tpara in exio_files.items():\n if tt in core_components:\n continue\n ext_name = '_'.join(tt.split('_')[1:])\n table_type = tt.split('_')[0]\n if tpara['index_col'] == 3:\n ext_data[tt].index.names = ['stressor', 'compartment', 'unit']\n elif tpara['index_col'] == 2:\n ext_data[tt].index.names = ['stressor', 'unit']\n else:\n raise ParserError('Unknown EXIOBASE file structure')\n if table_type == 'FY':\n ext_data[tt].columns.names = ['region', 'category']\n else:\n ext_data[tt].columns.names = ['region', 'sector']\n try:\n _unit = pd.DataFrame(ext_data[tt].iloc[:, 0]).reset_index(level\n ='unit').unit\n except IndexError:\n _unit = pd.DataFrame(ext_data[tt].iloc[:, 0])\n _unit.columns = ['unit']\n _unit['unit'] = 'undef'\n _unit.reset_index(level='unit', drop=True, inplace=True)\n _unit = pd.DataFrame(_unit)\n _unit.columns = ['unit']\n _unit = pd.DataFrame(_unit)\n _unit.columns = ['unit']\n _new_unit = _unit.unit.str.replace('/' + mon_unit, '')\n _new_unit[_new_unit == ''] = _unit.unit[_new_unit == ''].str.replace(\n '/', '')\n _unit.unit = _new_unit\n ext_data[tt].reset_index(level='unit', drop=True, inplace=True)\n ext_dict = extensions.get(ext_name, dict())\n ext_dict.update({table_type: ext_data[tt], 'unit': _unit, 'name':\n ext_name})\n extensions.update({ext_name: ext_dict})\n if version[0] == '1':\n year = 2000\n elif version[0] == '2':\n year = 2000\n elif version[0] == '3':\n raise ParserError('This function can not be used to parse EXIOBASE 3')\n else:\n logging.warning('Unknown EXIOBASE version')\n year = None\n return IOSystem(version=version, price='current', year=year, meta=\n meta_rec, **dict(core_data, **extensions))\n",
"role": "user"
},
{
"content": "generic_exiobase12_parser",
"role": "assistant"
}
] |
def x(path):
ispxp = True if re.search('pxp', path, flags=re.IGNORECASE) else False
isixi = True if re.search('ixi', path, flags=re.IGNORECASE) else False
if ispxp == isixi:
system = None
else:
system = 'pxp' if ispxp else 'ixi'
return system
|
get_MRIO_system
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(path):\n ispxp = True if re.search('pxp', path, flags=re.IGNORECASE) else False\n isixi = True if re.search('ixi', path, flags=re.IGNORECASE) else False\n if ispxp == isixi:\n system = None\n else:\n system = 'pxp' if ispxp else 'ixi'\n return system\n",
"role": "user"
},
{
"content": "get_MRIO_system",
"role": "assistant"
}
] |
def x(path):
path = os.path.abspath(os.path.normpath(str(path)))
exio_files = get_exiobase_files(path)
if len(exio_files) == 0:
raise ParserError('No EXIOBASE files found at {}'.format(path))
system = _get_MRIO_system(path)
if not system:
logging.warning(
'Could not determine system (pxp or ixi) set system parameter manually'
)
io = generic_exiobase12_parser(exio_files, system=system)
return io
|
parse_exiobase1
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(path):\n path = os.path.abspath(os.path.normpath(str(path)))\n exio_files = get_exiobase_files(path)\n if len(exio_files) == 0:\n raise ParserError('No EXIOBASE files found at {}'.format(path))\n system = _get_MRIO_system(path)\n if not system:\n logging.warning(\n 'Could not determine system (pxp or ixi) set system parameter manually'\n )\n io = generic_exiobase12_parser(exio_files, system=system)\n return io\n",
"role": "user"
},
{
"content": "parse_exiobase1",
"role": "assistant"
}
] |
def x(path):
io = load_all(path)
try:
io.satellite.FY = io.satellite.F_hh.copy()
del io.satellite.F_hh
except AttributeError:
pass
io.rename_regions({'AUS': 'AU', 'AUT': 'AT', 'BEL': 'BE', 'BGR': 'BG',
'BRA': 'BR', 'CAN': 'CA', 'CHE': 'CH', 'CHN': 'CN', 'CYP': 'CY',
'CZE': 'CZ', 'DEU': 'DE', 'DNK': 'DK', 'ESP': 'ES', 'EST': 'EE',
'FIN': 'FI', 'FRA': 'FR', 'GBR': 'GB', 'GRC': 'GR', 'HRV': 'HR',
'HUN': 'HU', 'IDN': 'ID', 'IND': 'IN', 'IRL': 'IE', 'ITA': 'IT',
'JPN': 'JP', 'KOR': 'KR', 'LTU': 'LT', 'LUX': 'LU', 'LVA': 'LV',
'MEX': 'MX', 'MLT': 'MT', 'NLD': 'NL', 'NOR': 'NO', 'POL': 'PL',
'PRT': 'PT', 'ROM': 'RO', 'RUS': 'RU', 'SVK': 'SK', 'SVN': 'SI',
'SWE': 'SE', 'TUR': 'TR', 'TWN': 'TW', 'USA': 'US', 'ZAF': 'ZA',
'WWA': 'WA', 'WWE': 'WE', 'WWF': 'WF', 'WWL': 'WL', 'WWM': 'WM'})
return io
|
parse_exiobase3
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(path):\n io = load_all(path)\n try:\n io.satellite.FY = io.satellite.F_hh.copy()\n del io.satellite.F_hh\n except AttributeError:\n pass\n io.rename_regions({'AUS': 'AU', 'AUT': 'AT', 'BEL': 'BE', 'BGR': 'BG',\n 'BRA': 'BR', 'CAN': 'CA', 'CHE': 'CH', 'CHN': 'CN', 'CYP': 'CY',\n 'CZE': 'CZ', 'DEU': 'DE', 'DNK': 'DK', 'ESP': 'ES', 'EST': 'EE',\n 'FIN': 'FI', 'FRA': 'FR', 'GBR': 'GB', 'GRC': 'GR', 'HRV': 'HR',\n 'HUN': 'HU', 'IDN': 'ID', 'IND': 'IN', 'IRL': 'IE', 'ITA': 'IT',\n 'JPN': 'JP', 'KOR': 'KR', 'LTU': 'LT', 'LUX': 'LU', 'LVA': 'LV',\n 'MEX': 'MX', 'MLT': 'MT', 'NLD': 'NL', 'NOR': 'NO', 'POL': 'PL',\n 'PRT': 'PT', 'ROM': 'RO', 'RUS': 'RU', 'SVK': 'SK', 'SVN': 'SI',\n 'SWE': 'SE', 'TUR': 'TR', 'TWN': 'TW', 'USA': 'US', 'ZAF': 'ZA',\n 'WWA': 'WA', 'WWE': 'WE', 'WWF': 'WF', 'WWL': 'WL', 'WWM': 'WM'})\n return io\n",
"role": "user"
},
{
"content": "parse_exiobase3",
"role": "assistant"
}
] |
def x(root_path, year, ll_co, para):
ll_root_content = [ff for ff in os.listdir(root_path) if ff.startswith(
para['start'])]
if len(ll_root_content) < 1:
warnings.warn(
'Extension data for {} not found - Extension not included'.
format(para['start']), ParserWarning)
return None
elif len(ll_root_content) > 1:
raise ParserError(
'Several raw data for extension{} available - clean extension folder.'
.format(para['start']))
pf_env = os.path.join(root_path, ll_root_content[0])
if pf_env.endswith('.zip'):
rf_zip = zipfile.ZipFile(pf_env)
ll_env_content = [ff for ff in rf_zip.namelist() if ff.endswith(
para['ext'])]
else:
ll_env_content = [ff for ff in os.listdir(pf_env) if ff.endswith(
para['ext'])]
dl_env = dict()
dl_env_hh = dict()
for co in ll_co:
ll_pff_read = [ff for ff in ll_env_content if ff.endswith(para[
'ext']) and (ff.startswith(co.upper()) or ff.startswith(co.
lower()))]
if len(ll_pff_read) < 1:
raise ParserError(
'Country data not complete for Extension {} - missing {}.'.
format(para['start'], co))
elif len(ll_pff_read) > 1:
raise ParserError(
'Multiple country data for Extension {} - country {}.'.
format(para['start'], co))
pff_read = ll_pff_read[0]
if pf_env.endswith('.zip'):
ff_excel = pd.ExcelFile(rf_zip.open(pff_read))
else:
ff_excel = pd.ExcelFile(os.path.join(pf_env, pff_read))
if str(year) in ff_excel.sheet_names:
df_env = ff_excel.parse(sheet_name=str(year), index_col=None,
header=0)
else:
warnings.warn(
'Extension {} does not includedata for the year {} - Extension not included'
.format(para['start'], year), ParserWarning)
return None
if not df_env.index.is_numeric():
df_env.reset_index(inplace=True)
if df_env.columns[0] != 'level_0':
para['unit']['all'] = df_env.columns[0]
if para['start'].islower():
pass
elif para['start'].isupper():
df_env = df_env.iloc[:, 1:]
else:
raise ParserError('Format of extension not given.')
df_env.dropna(axis=0, how='all', inplace=True)
df_env = df_env[df_env.iloc[:, 0] != 'total']
df_env = df_env[df_env.iloc[:, 0] != 'secTOT']
df_env = df_env[df_env.iloc[:, 0] != 'secQ']
df_env.iloc[:, 0].astype(str, inplace=True)
df_env.iloc[:, 0].replace(to_replace='sec', value='', regex=True,
inplace=True)
df_env.set_index([df_env.columns[0]], inplace=True)
df_env.index.names = ['sector']
df_env = df_env.T
ikc_hh = 'FC_HH'
dl_env_hh[co] = df_env[ikc_hh]
del df_env[ikc_hh]
dl_env[co] = df_env
df_F = pd.concat(dl_env, axis=1)[ll_co]
df_FY = pd.concat(dl_env_hh, axis=1)[ll_co]
df_F.fillna(0, inplace=True)
df_FY.fillna(0, inplace=True)
df_F.columns.names = IDX_NAMES['F_col']
df_F.index.names = IDX_NAMES['F_row_single']
df_FY.columns.names = IDX_NAMES['Y_col1']
df_FY.index.names = IDX_NAMES['F_row_single']
df_unit = pd.DataFrame(index=df_F.index, columns=['unit'])
_ss_unit = para['unit'].get('all', 'undef')
for ikr in df_unit.index:
df_unit.ix[ikr, 'unit'] = para['unit'].get(ikr, _ss_unit)
df_unit.columns.names = ['unit']
df_unit.index.names = ['stressor']
if pf_env.endswith('.zip'):
rf_zip.close()
return {'F': df_F, 'FY': df_FY, 'unit': df_unit}
|
get_WIOD_env_extension
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(root_path, year, ll_co, para):\n ll_root_content = [ff for ff in os.listdir(root_path) if ff.startswith(\n para['start'])]\n if len(ll_root_content) < 1:\n warnings.warn(\n 'Extension data for {} not found - Extension not included'.\n format(para['start']), ParserWarning)\n return None\n elif len(ll_root_content) > 1:\n raise ParserError(\n 'Several raw data for extension{} available - clean extension folder.'\n .format(para['start']))\n pf_env = os.path.join(root_path, ll_root_content[0])\n if pf_env.endswith('.zip'):\n rf_zip = zipfile.ZipFile(pf_env)\n ll_env_content = [ff for ff in rf_zip.namelist() if ff.endswith(\n para['ext'])]\n else:\n ll_env_content = [ff for ff in os.listdir(pf_env) if ff.endswith(\n para['ext'])]\n dl_env = dict()\n dl_env_hh = dict()\n for co in ll_co:\n ll_pff_read = [ff for ff in ll_env_content if ff.endswith(para[\n 'ext']) and (ff.startswith(co.upper()) or ff.startswith(co.\n lower()))]\n if len(ll_pff_read) < 1:\n raise ParserError(\n 'Country data not complete for Extension {} - missing {}.'.\n format(para['start'], co))\n elif len(ll_pff_read) > 1:\n raise ParserError(\n 'Multiple country data for Extension {} - country {}.'.\n format(para['start'], co))\n pff_read = ll_pff_read[0]\n if pf_env.endswith('.zip'):\n ff_excel = pd.ExcelFile(rf_zip.open(pff_read))\n else:\n ff_excel = pd.ExcelFile(os.path.join(pf_env, pff_read))\n if str(year) in ff_excel.sheet_names:\n df_env = ff_excel.parse(sheet_name=str(year), index_col=None,\n header=0)\n else:\n warnings.warn(\n 'Extension {} does not includedata for the year {} - Extension not included'\n .format(para['start'], year), ParserWarning)\n return None\n if not df_env.index.is_numeric():\n df_env.reset_index(inplace=True)\n if df_env.columns[0] != 'level_0':\n para['unit']['all'] = df_env.columns[0]\n if para['start'].islower():\n pass\n elif para['start'].isupper():\n df_env = df_env.iloc[:, 1:]\n else:\n raise ParserError('Format of extension not given.')\n df_env.dropna(axis=0, how='all', inplace=True)\n df_env = df_env[df_env.iloc[:, 0] != 'total']\n df_env = df_env[df_env.iloc[:, 0] != 'secTOT']\n df_env = df_env[df_env.iloc[:, 0] != 'secQ']\n df_env.iloc[:, 0].astype(str, inplace=True)\n df_env.iloc[:, 0].replace(to_replace='sec', value='', regex=True,\n inplace=True)\n df_env.set_index([df_env.columns[0]], inplace=True)\n df_env.index.names = ['sector']\n df_env = df_env.T\n ikc_hh = 'FC_HH'\n dl_env_hh[co] = df_env[ikc_hh]\n del df_env[ikc_hh]\n dl_env[co] = df_env\n df_F = pd.concat(dl_env, axis=1)[ll_co]\n df_FY = pd.concat(dl_env_hh, axis=1)[ll_co]\n df_F.fillna(0, inplace=True)\n df_FY.fillna(0, inplace=True)\n df_F.columns.names = IDX_NAMES['F_col']\n df_F.index.names = IDX_NAMES['F_row_single']\n df_FY.columns.names = IDX_NAMES['Y_col1']\n df_FY.index.names = IDX_NAMES['F_row_single']\n df_unit = pd.DataFrame(index=df_F.index, columns=['unit'])\n _ss_unit = para['unit'].get('all', 'undef')\n for ikr in df_unit.index:\n df_unit.ix[ikr, 'unit'] = para['unit'].get(ikr, _ss_unit)\n df_unit.columns.names = ['unit']\n df_unit.index.names = ['stressor']\n if pf_env.endswith('.zip'):\n rf_zip.close()\n return {'F': df_F, 'FY': df_FY, 'unit': df_unit}\n",
"role": "user"
},
{
"content": "get_WIOD_env_extension",
"role": "assistant"
}
] |
def x(root_path, year, data_sheet='DATA'):
sea_ext = '.xlsx'
sea_start = 'WIOD_SEA'
_SEA_folder = os.path.join(root_path, 'SEA')
if not os.path.exists(_SEA_folder):
_SEA_folder = root_path
sea_folder_content = [ff for ff in os.listdir(_SEA_folder) if os.path.
splitext(ff)[-1] == sea_ext and ff[:8] == sea_start]
if sea_folder_content:
sea_file = os.path.join(_SEA_folder, sorted(sea_folder_content)[0])
df_sea = pd.read_excel(sea_file, sheet_name=data_sheet, header=0,
index_col=[0, 1, 2, 3])
ic_sea = df_sea.columns.tolist()
ic_sea = [yystr.lstrip('_') for yystr in ic_sea]
df_sea.columns = ic_sea
try:
ds_sea = df_sea[str(year)]
except KeyError:
warnings.warn(
'SEA extension does not include data for the year {} - SEA-Extension not included'
.format(year), ParserWarning)
return None, None
mt_sea = ['EMP', 'EMPE', 'H_EMP', 'H_EMPE']
ds_use_sea = pd.concat([ds_sea.xs(key=vari, level='Variable',
drop_level=False) for vari in mt_sea])
ds_use_sea.drop(labels='TOT', level='Code', inplace=True)
ds_use_sea.reset_index('Description', drop=True, inplace=True)
if 'RoW' not in ds_use_sea.index.get_level_values('Country'):
ds_RoW = ds_use_sea.xs('USA', level='Country', drop_level=False)
ds_RoW.ix[:] = 0
df_RoW = ds_RoW.reset_index()
df_RoW['Country'] = 'RoW'
ds_use_sea = pd.concat([ds_use_sea.reset_index(), df_RoW]
).set_index(['Country', 'Code', 'Variable'])
ds_use_sea.fillna(value=0, inplace=True)
df_use_sea = ds_use_sea.unstack(level=['Country', 'Code'])[str(year)]
df_use_sea.index.names = IDX_NAMES['VA_row_single']
df_use_sea.columns.names = IDX_NAMES['F_col']
df_use_sea = df_use_sea.astype('float')
df_unit = pd.DataFrame(data=['thousand persons', 'thousand persons',
'mill hours', 'mill hours'], columns=['unit'], index=df_use_sea
.index)
return df_use_sea, df_unit
else:
warnings.warn(
'SEA extension raw data file not found - SEA-Extension not included'
, ParserWarning)
return None, None
|
get_WIOD_SEA_extension
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(root_path, year, data_sheet='DATA'):\n sea_ext = '.xlsx'\n sea_start = 'WIOD_SEA'\n _SEA_folder = os.path.join(root_path, 'SEA')\n if not os.path.exists(_SEA_folder):\n _SEA_folder = root_path\n sea_folder_content = [ff for ff in os.listdir(_SEA_folder) if os.path.\n splitext(ff)[-1] == sea_ext and ff[:8] == sea_start]\n if sea_folder_content:\n sea_file = os.path.join(_SEA_folder, sorted(sea_folder_content)[0])\n df_sea = pd.read_excel(sea_file, sheet_name=data_sheet, header=0,\n index_col=[0, 1, 2, 3])\n ic_sea = df_sea.columns.tolist()\n ic_sea = [yystr.lstrip('_') for yystr in ic_sea]\n df_sea.columns = ic_sea\n try:\n ds_sea = df_sea[str(year)]\n except KeyError:\n warnings.warn(\n 'SEA extension does not include data for the year {} - SEA-Extension not included'\n .format(year), ParserWarning)\n return None, None\n mt_sea = ['EMP', 'EMPE', 'H_EMP', 'H_EMPE']\n ds_use_sea = pd.concat([ds_sea.xs(key=vari, level='Variable',\n drop_level=False) for vari in mt_sea])\n ds_use_sea.drop(labels='TOT', level='Code', inplace=True)\n ds_use_sea.reset_index('Description', drop=True, inplace=True)\n if 'RoW' not in ds_use_sea.index.get_level_values('Country'):\n ds_RoW = ds_use_sea.xs('USA', level='Country', drop_level=False)\n ds_RoW.ix[:] = 0\n df_RoW = ds_RoW.reset_index()\n df_RoW['Country'] = 'RoW'\n ds_use_sea = pd.concat([ds_use_sea.reset_index(), df_RoW]\n ).set_index(['Country', 'Code', 'Variable'])\n ds_use_sea.fillna(value=0, inplace=True)\n df_use_sea = ds_use_sea.unstack(level=['Country', 'Code'])[str(year)]\n df_use_sea.index.names = IDX_NAMES['VA_row_single']\n df_use_sea.columns.names = IDX_NAMES['F_col']\n df_use_sea = df_use_sea.astype('float')\n df_unit = pd.DataFrame(data=['thousand persons', 'thousand persons',\n 'mill hours', 'mill hours'], columns=['unit'], index=df_use_sea\n .index)\n return df_use_sea, df_unit\n else:\n warnings.warn(\n 'SEA extension raw data file not found - SEA-Extension not included'\n , ParserWarning)\n return None, None\n",
"role": "user"
},
{
"content": "get_WIOD_SEA_extension",
"role": "assistant"
}
] |
def x(self, entry_type, entry):
meta_string = '{time} - {etype} - {entry}'.format(time=self._time(),
etype=entry_type.upper(), entry=entry)
self._content['history'].insert(0, meta_string)
self.logger(meta_string)
|
add_history
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, entry_type, entry):\n meta_string = '{time} - {etype} - {entry}'.format(time=self._time(),\n etype=entry_type.upper(), entry=entry)\n self._content['history'].insert(0, meta_string)\n self.logger(meta_string)\n",
"role": "user"
},
{
"content": "add_history",
"role": "assistant"
}
] |
def x(self, para, new_value, log=True):
if not new_value:
return
para = para.lower()
if para == 'history':
raise ValueError('History can only be extended - use method "note"')
old_value = self._content.get(para, None)
if new_value == old_value:
return
self._content[para] = new_value
if old_value and log:
self._add_history(entry_type='METADATA_CHANGE', entry=
'Changed parameter "{para}" from "{old}" to "{new}"'.format(
para=para, old=old_value, new=new_value))
|
change_meta
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, para, new_value, log=True):\n if not new_value:\n return\n para = para.lower()\n if para == 'history':\n raise ValueError('History can only be extended - use method \"note\"')\n old_value = self._content.get(para, None)\n if new_value == old_value:\n return\n self._content[para] = new_value\n if old_value and log:\n self._add_history(entry_type='METADATA_CHANGE', entry=\n 'Changed parameter \"{para}\" from \"{old}\" to \"{new}\"'.format(\n para=para, old=old_value, new=new_value))\n",
"role": "user"
},
{
"content": "change_meta",
"role": "assistant"
}
] |
def x(self):
if self._path_in_arc:
with zipfile.ZipFile(file=str(self._metadata_file)) as zf:
self._content = json.loads(zf.read(self._path_in_arc).decode(
'utf-8'), object_pairs_hook=OrderedDict)
else:
with self._metadata_file.open('r') as mdf:
self._content = json.load(mdf, object_pairs_hook=OrderedDict)
|
read_content
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if self._path_in_arc:\n with zipfile.ZipFile(file=str(self._metadata_file)) as zf:\n self._content = json.loads(zf.read(self._path_in_arc).decode(\n 'utf-8'), object_pairs_hook=OrderedDict)\n else:\n with self._metadata_file.open('r') as mdf:\n self._content = json.load(mdf, object_pairs_hook=OrderedDict)\n",
"role": "user"
},
{
"content": "read_content",
"role": "assistant"
}
] |
def x(self, location=None):
if location:
location = Path(location)
if os.path.splitext(str(location))[1] == '':
self._metadata_file = location / DEFAULT_FILE_NAMES['metadata']
else:
self._metadata_file = location
if self._metadata_file:
with self._metadata_file.open(mode='w') as mdf:
json.dump(self._content, mdf, indent=4)
else:
logging.error('No metadata file given for storing the file')
|
save
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, location=None):\n if location:\n location = Path(location)\n if os.path.splitext(str(location))[1] == '':\n self._metadata_file = location / DEFAULT_FILE_NAMES['metadata']\n else:\n self._metadata_file = location\n if self._metadata_file:\n with self._metadata_file.open(mode='w') as mdf:\n json.dump(self._content, mdf, indent=4)\n else:\n logging.error('No metadata file given for storing the file')\n",
"role": "user"
},
{
"content": "save",
"role": "assistant"
}
] |
def x(Z, Y):
x = np.reshape(np.sum(np.hstack((Z, Y)), 1), (-1, 1))
if type(Z) is pd.DataFrame:
x = pd.DataFrame(x, index=Z.index, columns=['indout'])
if type(x) is pd.Series:
x = pd.DataFrame(x)
if type(x) is pd.DataFrame:
x.columns = ['indout']
return x
|
calc_x
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(Z, Y):\n x = np.reshape(np.sum(np.hstack((Z, Y)), 1), (-1, 1))\n if type(Z) is pd.DataFrame:\n x = pd.DataFrame(x, index=Z.index, columns=['indout'])\n if type(x) is pd.Series:\n x = pd.DataFrame(x)\n if type(x) is pd.DataFrame:\n x.columns = ['indout']\n return x\n",
"role": "user"
},
{
"content": "calc_x",
"role": "assistant"
}
] |
def x(L, y):
x = L.dot(y)
if type(x) is pd.Series:
x = pd.DataFrame(x)
if type(x) is pd.DataFrame:
x.columns = ['indout']
return x
|
calc_x_from_L
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(L, y):\n x = L.dot(y)\n if type(x) is pd.Series:\n x = pd.DataFrame(x)\n if type(x) is pd.DataFrame:\n x.columns = ['indout']\n return x\n",
"role": "user"
},
{
"content": "calc_x_from_L",
"role": "assistant"
}
] |
def x(A, x):
if type(x) is pd.DataFrame or type(x) is pd.Series:
x = x.values
x = x.reshape((1, -1))
if type(A) is pd.DataFrame:
return pd.DataFrame(A.values * x, index=A.index, columns=A.columns)
else:
return A * x
|
calc_Z
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(A, x):\n if type(x) is pd.DataFrame or type(x) is pd.Series:\n x = x.values\n x = x.reshape((1, -1))\n if type(A) is pd.DataFrame:\n return pd.DataFrame(A.values * x, index=A.index, columns=A.columns)\n else:\n return A * x\n",
"role": "user"
},
{
"content": "calc_Z",
"role": "assistant"
}
] |
def x(Z, x):
if type(x) is pd.DataFrame or type(x) is pd.Series:
x = x.values
if type(x) is not np.ndarray and x == 0:
recix = 0
else:
with warnings.catch_warnings():
warnings.simplefilter('ignore')
recix = 1 / x
recix[recix == np.inf] = 0
recix = recix.reshape((1, -1))
if type(Z) is pd.DataFrame:
return pd.DataFrame(Z.values * recix, index=Z.index, columns=Z.columns)
else:
return Z * recix
|
calc_A
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(Z, x):\n if type(x) is pd.DataFrame or type(x) is pd.Series:\n x = x.values\n if type(x) is not np.ndarray and x == 0:\n recix = 0\n else:\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n recix = 1 / x\n recix[recix == np.inf] = 0\n recix = recix.reshape((1, -1))\n if type(Z) is pd.DataFrame:\n return pd.DataFrame(Z.values * recix, index=Z.index, columns=Z.columns)\n else:\n return Z * recix\n",
"role": "user"
},
{
"content": "calc_A",
"role": "assistant"
}
] |
def x(A):
I = np.eye(A.shape[0])
if type(A) is pd.DataFrame:
return pd.DataFrame(np.linalg.inv(I - A), index=A.index, columns=A.
columns)
else:
return np.linalg.inv(I - A)
|
calc_L
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(A):\n I = np.eye(A.shape[0])\n if type(A) is pd.DataFrame:\n return pd.DataFrame(np.linalg.inv(I - A), index=A.index, columns=A.\n columns)\n else:\n return np.linalg.inv(I - A)\n",
"role": "user"
},
{
"content": "calc_L",
"role": "assistant"
}
] |
def x(S, D_cba, Y, nr_sectors):
Y_diag = ioutil.diagonalize_blocks(Y.values, blocksize=nr_sectors)
Y_inv = np.linalg.inv(Y_diag)
M = D_cba.dot(Y_inv)
if type(D_cba) is pd.DataFrame:
M.columns = D_cba.columns
M.index = D_cba.index
return M
|
recalc_M
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(S, D_cba, Y, nr_sectors):\n Y_diag = ioutil.diagonalize_blocks(Y.values, blocksize=nr_sectors)\n Y_inv = np.linalg.inv(Y_diag)\n M = D_cba.dot(Y_inv)\n if type(D_cba) is pd.DataFrame:\n M.columns = D_cba.columns\n M.index = D_cba.index\n return M\n",
"role": "user"
},
{
"content": "recalc_M",
"role": "assistant"
}
] |
def x(S, L, Y, nr_sectors):
Y_diag = ioutil.diagonalize_blocks(Y.values, blocksize=nr_sectors)
x_diag = L.dot(Y_diag)
x_tot = x_diag.values.sum(1)
del Y_diag
D_cba = pd.DataFrame(S.values.dot(x_diag), index=S.index, columns=S.columns
)
D_pba = pd.DataFrame(S.values * x_tot.reshape((1, -1)), index=S.index,
columns=S.columns)
dom_block = np.zeros((nr_sectors, nr_sectors))
x_trade = ioutil.set_block(x_diag.values, dom_block)
D_imp = pd.DataFrame(S.values.dot(x_trade), index=S.index, columns=S.
columns)
x_exp = x_trade.sum(1)
D_exp = pd.DataFrame(S.values * x_exp.reshape((1, -1)), index=S.index,
columns=S.columns)
return D_cba, D_pba, D_imp, D_exp
|
calc_accounts
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(S, L, Y, nr_sectors):\n Y_diag = ioutil.diagonalize_blocks(Y.values, blocksize=nr_sectors)\n x_diag = L.dot(Y_diag)\n x_tot = x_diag.values.sum(1)\n del Y_diag\n D_cba = pd.DataFrame(S.values.dot(x_diag), index=S.index, columns=S.columns\n )\n D_pba = pd.DataFrame(S.values * x_tot.reshape((1, -1)), index=S.index,\n columns=S.columns)\n dom_block = np.zeros((nr_sectors, nr_sectors))\n x_trade = ioutil.set_block(x_diag.values, dom_block)\n D_imp = pd.DataFrame(S.values.dot(x_trade), index=S.index, columns=S.\n columns)\n x_exp = x_trade.sum(1)\n D_exp = pd.DataFrame(S.values * x_exp.reshape((1, -1)), index=S.index,\n columns=S.columns)\n return D_cba, D_pba, D_imp, D_exp\n",
"role": "user"
},
{
"content": "calc_accounts",
"role": "assistant"
}
] |
def x(url_db_view, url_db_content, mrio_regex, access_cookie=None):
returnvalue = namedtuple('url_content', ['raw_text', 'data_urls'])
url_text = requests.post(url_db_view, cookies=access_cookie).text
data_urls = [(url_db_content + ff) for ff in re.findall(mrio_regex,
url_text)]
return returnvalue(raw_text=url_text, data_urls=data_urls)
|
get_url_datafiles
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(url_db_view, url_db_content, mrio_regex, access_cookie=None):\n returnvalue = namedtuple('url_content', ['raw_text', 'data_urls'])\n url_text = requests.post(url_db_view, cookies=access_cookie).text\n data_urls = [(url_db_content + ff) for ff in re.findall(mrio_regex,\n url_text)]\n return returnvalue(raw_text=url_text, data_urls=data_urls)\n",
"role": "user"
},
{
"content": "get_url_datafiles",
"role": "assistant"
}
] |
def x(url_list, storage_folder, overwrite_existing, meta_handler,
access_cookie=None):
for url in url_list:
filename = os.path.basename(url)
if not overwrite_existing and filename in os.listdir(storage_folder):
continue
storage_file = os.path.join(storage_folder, filename)
req = requests.post(url, stream=True, cookies=access_cookie)
with open(storage_file, 'wb') as lf:
for chunk in req.iter_content(1024 * 5):
lf.write(chunk)
meta_handler._add_fileio('Downloaded {} to {}'.format(url, filename))
meta_handler.save()
return meta_handler
|
download_urls
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(url_list, storage_folder, overwrite_existing, meta_handler,\n access_cookie=None):\n for url in url_list:\n filename = os.path.basename(url)\n if not overwrite_existing and filename in os.listdir(storage_folder):\n continue\n storage_file = os.path.join(storage_folder, filename)\n req = requests.post(url, stream=True, cookies=access_cookie)\n with open(storage_file, 'wb') as lf:\n for chunk in req.iter_content(1024 * 5):\n lf.write(chunk)\n meta_handler._add_fileio('Downloaded {} to {}'.format(url, filename))\n meta_handler.save()\n return meta_handler\n",
"role": "user"
},
{
"content": "download_urls",
"role": "assistant"
}
] |
def x(storage_folder, years=None, overwrite_existing=False, satellite_urls=
WIOD_CONFIG['satellite_urls']):
try:
os.makedirs(storage_folder)
except FileExistsError:
pass
if type(years) is int or type(years) is str:
years = [years]
years = years if years else range(1995, 2012)
years = [str(yy).zfill(2)[-2:] for yy in years]
wiod_web_content = _get_url_datafiles(url_db_view=WIOD_CONFIG[
'url_db_view'], url_db_content=WIOD_CONFIG['url_db_content'],
mrio_regex='protected.*?wiot\\d\\d.*?xlsx')
restricted_wiod_io_urls = [url for url in wiod_web_content.data_urls if
re.search('(wiot)(\\d\\d)', os.path.basename(url)).group(2) in years]
meta = MRIOMetaData(location=storage_folder, description=
'WIOD metadata file for pymrio', name='WIOD', system='ixi', version
='data13')
meta = _download_urls(url_list=restricted_wiod_io_urls + satellite_urls,
storage_folder=storage_folder, overwrite_existing=
overwrite_existing, meta_handler=meta)
meta.save()
return meta
|
download_wiod2013
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(storage_folder, years=None, overwrite_existing=False, satellite_urls=\n WIOD_CONFIG['satellite_urls']):\n try:\n os.makedirs(storage_folder)\n except FileExistsError:\n pass\n if type(years) is int or type(years) is str:\n years = [years]\n years = years if years else range(1995, 2012)\n years = [str(yy).zfill(2)[-2:] for yy in years]\n wiod_web_content = _get_url_datafiles(url_db_view=WIOD_CONFIG[\n 'url_db_view'], url_db_content=WIOD_CONFIG['url_db_content'],\n mrio_regex='protected.*?wiot\\\\d\\\\d.*?xlsx')\n restricted_wiod_io_urls = [url for url in wiod_web_content.data_urls if\n re.search('(wiot)(\\\\d\\\\d)', os.path.basename(url)).group(2) in years]\n meta = MRIOMetaData(location=storage_folder, description=\n 'WIOD metadata file for pymrio', name='WIOD', system='ixi', version\n ='data13')\n meta = _download_urls(url_list=restricted_wiod_io_urls + satellite_urls,\n storage_folder=storage_folder, overwrite_existing=\n overwrite_existing, meta_handler=meta)\n meta.save()\n return meta\n",
"role": "user"
},
{
"content": "download_wiod2013",
"role": "assistant"
}
] |
def x(length):
s = '%.6f' % time.time()
whole, frac = map(int, s.split('.'))
res = '%d%d' % (whole, frac)
return res[:length]
|
get_timestamp
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(length):\n s = '%.6f' % time.time()\n whole, frac = map(int, s.split('.'))\n res = '%d%d' % (whole, frac)\n return res[:length]\n",
"role": "user"
},
{
"content": "get_timestamp",
"role": "assistant"
}
] |
def x(path):
if PY3:
return os.makedirs(path, exist_ok=True)
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
|
mkdir_p
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(path):\n if PY3:\n return os.makedirs(path, exist_ok=True)\n try:\n os.makedirs(path)\n except OSError as exc:\n if exc.errno == errno.EEXIST and os.path.isdir(path):\n pass\n else:\n raise\n",
"role": "user"
},
{
"content": "mkdir_p",
"role": "assistant"
}
] |
def x():
ozmq = __import__('zmq')
ozmq.Socket = zmq.Socket
ozmq.Context = zmq.Context
ozmq.Poller = zmq.Poller
ioloop = __import__('zmq.eventloop.ioloop')
ioloop.Poller = zmq.Poller
|
monkey_patch
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x():\n ozmq = __import__('zmq')\n ozmq.Socket = zmq.Socket\n ozmq.Context = zmq.Context\n ozmq.Poller = zmq.Poller\n ioloop = __import__('zmq.eventloop.ioloop')\n ioloop.Poller = zmq.Poller\n",
"role": "user"
},
{
"content": "monkey_patch",
"role": "assistant"
}
] |
def x(*extensions, name):
if type(extensions[0]) is tuple or type(extensions[0]) is list:
extensions = extensions[0]
FY_present = False
SY_present = False
SFY_columns = None
for ext in extensions:
if 'FY' in ext.get_DataFrame(data=False):
FY_present = True
SFY_columns = ext.FY.columns
if 'SY' in ext.get_DataFrame(data=False):
SY_present = True
SFY_columns = ext.SY.columns
set_dfs = [set(ext.get_DataFrame(data=False)) for ext in extensions]
df_dict = {key: None for key in set.intersection(*set_dfs)}
if FY_present:
df_dict['FY'] = None
if SY_present:
df_dict['SY'] = None
empty_df_dict = df_dict.copy()
attr_dict = {}
first_run = True
for ext in extensions:
for key in ext.__dict__:
if type(ext.__dict__[key]) is not pd.DataFrame:
if attr_dict.get(key, -99) == -99:
attr_dict[key] = ext.__dict__[key]
elif attr_dict[key] == ext.__dict__[key]:
continue
else:
attr_dict[key] = None
cur_dict = empty_df_dict.copy()
for df in cur_dict:
cur_dict[df] = getattr(ext, df)
if FY_present:
if 'FY' in ext.get_DataFrame(data=False):
cur_dict['FY'] = getattr(ext, 'FY')
else:
cur_dict['FY'] = pd.DataFrame(data=0, index=ext.get_index(),
columns=SFY_columns)
if SY_present:
if 'SY' in ext.get_DataFrame(data=False):
cur_dict['SY'] = getattr(ext, 'SY')
else:
cur_dict['SY'] = pd.DataFrame(data=0, index=ext.get_index(),
columns=SFY_columns)
for key in cur_dict:
if not first_run:
if cur_dict[key].index.names != df_dict[key].index.names:
cur_ind_names = list(cur_dict[key].index.names)
df_ind_names = list(df_dict[key].index.names)
cur_ind_names[0] = 'indicator'
df_ind_names[0] = cur_ind_names[0]
cur_dict[key].index.set_names(cur_ind_names, inplace=True)
df_dict[key].index.set_names(df_ind_names, inplace=True)
for ind in cur_ind_names:
if ind not in df_ind_names:
df_dict[key] = df_dict[key].set_index(pd.
DataFrame(data=None, index=df_dict[key].
index, columns=[ind])[ind], append=True)
for ind in df_ind_names:
if ind not in cur_ind_names:
cur_dict[key] = cur_dict[key].set_index(pd.
DataFrame(data=None, index=cur_dict[key].
index, columns=[ind])[ind], append=True)
df_dict[key] = pd.concat([df_dict[key], cur_dict[key]])
first_run = False
all_dict = dict(list(attr_dict.items()) + list(df_dict.items()))
all_dict['name'] = name
return Extension(**all_dict)
|
concate_extension
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(*extensions, name):\n if type(extensions[0]) is tuple or type(extensions[0]) is list:\n extensions = extensions[0]\n FY_present = False\n SY_present = False\n SFY_columns = None\n for ext in extensions:\n if 'FY' in ext.get_DataFrame(data=False):\n FY_present = True\n SFY_columns = ext.FY.columns\n if 'SY' in ext.get_DataFrame(data=False):\n SY_present = True\n SFY_columns = ext.SY.columns\n set_dfs = [set(ext.get_DataFrame(data=False)) for ext in extensions]\n df_dict = {key: None for key in set.intersection(*set_dfs)}\n if FY_present:\n df_dict['FY'] = None\n if SY_present:\n df_dict['SY'] = None\n empty_df_dict = df_dict.copy()\n attr_dict = {}\n first_run = True\n for ext in extensions:\n for key in ext.__dict__:\n if type(ext.__dict__[key]) is not pd.DataFrame:\n if attr_dict.get(key, -99) == -99:\n attr_dict[key] = ext.__dict__[key]\n elif attr_dict[key] == ext.__dict__[key]:\n continue\n else:\n attr_dict[key] = None\n cur_dict = empty_df_dict.copy()\n for df in cur_dict:\n cur_dict[df] = getattr(ext, df)\n if FY_present:\n if 'FY' in ext.get_DataFrame(data=False):\n cur_dict['FY'] = getattr(ext, 'FY')\n else:\n cur_dict['FY'] = pd.DataFrame(data=0, index=ext.get_index(),\n columns=SFY_columns)\n if SY_present:\n if 'SY' in ext.get_DataFrame(data=False):\n cur_dict['SY'] = getattr(ext, 'SY')\n else:\n cur_dict['SY'] = pd.DataFrame(data=0, index=ext.get_index(),\n columns=SFY_columns)\n for key in cur_dict:\n if not first_run:\n if cur_dict[key].index.names != df_dict[key].index.names:\n cur_ind_names = list(cur_dict[key].index.names)\n df_ind_names = list(df_dict[key].index.names)\n cur_ind_names[0] = 'indicator'\n df_ind_names[0] = cur_ind_names[0]\n cur_dict[key].index.set_names(cur_ind_names, inplace=True)\n df_dict[key].index.set_names(df_ind_names, inplace=True)\n for ind in cur_ind_names:\n if ind not in df_ind_names:\n df_dict[key] = df_dict[key].set_index(pd.\n DataFrame(data=None, index=df_dict[key].\n index, columns=[ind])[ind], append=True)\n for ind in df_ind_names:\n if ind not in cur_ind_names:\n cur_dict[key] = cur_dict[key].set_index(pd.\n DataFrame(data=None, index=cur_dict[key].\n index, columns=[ind])[ind], append=True)\n df_dict[key] = pd.concat([df_dict[key], cur_dict[key]])\n first_run = False\n all_dict = dict(list(attr_dict.items()) + list(df_dict.items()))\n all_dict['name'] = name\n return Extension(**all_dict)\n",
"role": "user"
},
{
"content": "concate_extension",
"role": "assistant"
}
] |
def x(self, force=False, _meta=None):
strwarn = None
for df in self.__basic__:
if getattr(self, df) is None:
if force:
strwarn = (
'Reset system warning - Recalculation after reset not possible because {} missing'
.format(df))
warnings.warn(strwarn, ResetWarning)
else:
raise ResetError(
"To few tables to recalculate the system after reset ({} missing) - reset can be forced by passing 'force=True')"
.format(df))
if _meta:
_meta._add_modify('Reset system to Z and Y')
if strwarn:
_meta._add_modify(strwarn)
[setattr(self, key, None) for key in self.get_DataFrame(data=False,
with_unit=False, with_population=False) if key not in self.__basic__]
return self
|
reset_full
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, force=False, _meta=None):\n strwarn = None\n for df in self.__basic__:\n if getattr(self, df) is None:\n if force:\n strwarn = (\n 'Reset system warning - Recalculation after reset not possible because {} missing'\n .format(df))\n warnings.warn(strwarn, ResetWarning)\n else:\n raise ResetError(\n \"To few tables to recalculate the system after reset ({} missing) - reset can be forced by passing 'force=True')\"\n .format(df))\n if _meta:\n _meta._add_modify('Reset system to Z and Y')\n if strwarn:\n _meta._add_modify(strwarn)\n [setattr(self, key, None) for key in self.get_DataFrame(data=False,\n with_unit=False, with_population=False) if key not in self.__basic__]\n return self\n",
"role": "user"
},
{
"content": "reset_full",
"role": "assistant"
}
] |
def x(self, force=False, _meta=None):
strwarn = None
for df in self.__basic__:
if getattr(self, df) is None:
if force:
strwarn = (
'Reset system warning - Recalculation after reset not possible because {} missing'
.format(df))
warnings.warn(strwarn, ResetWarning)
else:
raise ResetError(
"To few tables to recalculate the system after reset ({} missing) - reset can be forced by passing 'force=True')"
.format(df))
if _meta:
_meta._add_modify('Reset to absolute flows')
if strwarn:
_meta._add_modify(strwarn)
[setattr(self, key, None) for key in self.__non_agg_attributes__]
return self
|
reset_to_flows
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, force=False, _meta=None):\n strwarn = None\n for df in self.__basic__:\n if getattr(self, df) is None:\n if force:\n strwarn = (\n 'Reset system warning - Recalculation after reset not possible because {} missing'\n .format(df))\n warnings.warn(strwarn, ResetWarning)\n else:\n raise ResetError(\n \"To few tables to recalculate the system after reset ({} missing) - reset can be forced by passing 'force=True')\"\n .format(df))\n if _meta:\n _meta._add_modify('Reset to absolute flows')\n if strwarn:\n _meta._add_modify(strwarn)\n [setattr(self, key, None) for key in self.__non_agg_attributes__]\n return self\n",
"role": "user"
},
{
"content": "reset_to_flows",
"role": "assistant"
}
] |
def x(self):
[setattr(self, key, None) for key in self.get_DataFrame(data=False,
with_unit=False, with_population=False) if key not in self.
__coefficients__]
return self
|
reset_to_coefficients
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n [setattr(self, key, None) for key in self.get_DataFrame(data=False,\n with_unit=False, with_population=False) if key not in self.\n __coefficients__]\n return self\n",
"role": "user"
},
{
"content": "reset_to_coefficients",
"role": "assistant"
}
] |
def x(self, new_name=None):
_tmp = copy.deepcopy(self)
if not new_name:
new_name = self.name + '_copy'
if str(type(self)) == "<class 'pymrio.core.mriosystem.IOSystem'>":
_tmp.meta.note('IOSystem copy {new} based on {old}'.format(new=
new_name, old=self.meta.name))
_tmp.meta.change_meta('name', new_name, log=False)
else:
_tmp.name = new_name
return _tmp
|
copy
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, new_name=None):\n _tmp = copy.deepcopy(self)\n if not new_name:\n new_name = self.name + '_copy'\n if str(type(self)) == \"<class 'pymrio.core.mriosystem.IOSystem'>\":\n _tmp.meta.note('IOSystem copy {new} based on {old}'.format(new=\n new_name, old=self.meta.name))\n _tmp.meta.change_meta('name', new_name, log=False)\n else:\n _tmp.name = new_name\n return _tmp\n",
"role": "user"
},
{
"content": "copy",
"role": "assistant"
}
] |
def x(self, entries=None):
possible_dataframes = ['Y', 'FY']
for df in possible_dataframes:
if df in self.__dict__ and getattr(self, df) is not None:
try:
ind = getattr(self, df).columns.get_level_values('category'
).unique()
except (AssertionError, KeyError):
ind = getattr(self, df).columns.get_level_values(1).unique()
if entries:
if type(entries) is str:
entries = [entries]
ind = ind.tolist()
return [(None if ee not in entries else ee) for ee in ind]
else:
return ind
else:
logging.warn('No attributes available to get Y categories')
return None
|
get_Y_categories
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, entries=None):\n possible_dataframes = ['Y', 'FY']\n for df in possible_dataframes:\n if df in self.__dict__ and getattr(self, df) is not None:\n try:\n ind = getattr(self, df).columns.get_level_values('category'\n ).unique()\n except (AssertionError, KeyError):\n ind = getattr(self, df).columns.get_level_values(1).unique()\n if entries:\n if type(entries) is str:\n entries = [entries]\n ind = ind.tolist()\n return [(None if ee not in entries else ee) for ee in ind]\n else:\n return ind\n else:\n logging.warn('No attributes available to get Y categories')\n return None\n",
"role": "user"
},
{
"content": "get_Y_categories",
"role": "assistant"
}
] |
def x(self, as_dict=False, grouping_pattern=None):
possible_dataframes = ['A', 'L', 'Z', 'Y', 'F', 'FY', 'M', 'S', 'D_cba',
'D_pba', 'D_imp', 'D_exp', 'D_cba_reg', 'D_pba_reg', 'D_imp_reg',
'D_exp_reg', 'D_cba_cap', 'D_pba_cap', 'D_imp_cap', 'D_exp_cap']
for df in possible_dataframes:
if df in self.__dict__ and getattr(self, df) is not None:
orig_idx = getattr(self, df).index
break
else:
logging.warn('No attributes available to get index')
return None
if as_dict:
dd = {k: k for k in orig_idx}
if grouping_pattern:
for pattern, new_group in grouping_pattern.items():
if type(pattern) is str:
dd.update({k: new_group for k, v in dd.items() if re.
match(pattern, k)})
else:
dd.update({k: new_group for k, v in dd.items() if all([
re.match(pat, k[nr]) for nr, pat in enumerate(
pattern)])})
return dd
else:
return orig_idx
|
get_index
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, as_dict=False, grouping_pattern=None):\n possible_dataframes = ['A', 'L', 'Z', 'Y', 'F', 'FY', 'M', 'S', 'D_cba',\n 'D_pba', 'D_imp', 'D_exp', 'D_cba_reg', 'D_pba_reg', 'D_imp_reg',\n 'D_exp_reg', 'D_cba_cap', 'D_pba_cap', 'D_imp_cap', 'D_exp_cap']\n for df in possible_dataframes:\n if df in self.__dict__ and getattr(self, df) is not None:\n orig_idx = getattr(self, df).index\n break\n else:\n logging.warn('No attributes available to get index')\n return None\n if as_dict:\n dd = {k: k for k in orig_idx}\n if grouping_pattern:\n for pattern, new_group in grouping_pattern.items():\n if type(pattern) is str:\n dd.update({k: new_group for k, v in dd.items() if re.\n match(pattern, k)})\n else:\n dd.update({k: new_group for k, v in dd.items() if all([\n re.match(pat, k[nr]) for nr, pat in enumerate(\n pattern)])})\n return dd\n else:\n return orig_idx\n",
"role": "user"
},
{
"content": "get_index",
"role": "assistant"
}
] |
def x(self, index):
for df in self.get_DataFrame(data=True, with_population=False):
df.index = index
|
set_index
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, index):\n for df in self.get_DataFrame(data=True, with_population=False):\n df.index = index\n",
"role": "user"
},
{
"content": "set_index",
"role": "assistant"
}
] |
def x(self, data=False, with_unit=True, with_population=True):
for key in self.__dict__:
if key is 'unit' and not with_unit:
continue
if key is 'population' and not with_population:
continue
if type(self.__dict__[key]) is pd.DataFrame:
if data:
yield getattr(self, key)
else:
yield key
|
get_DataFrame
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, data=False, with_unit=True, with_population=True):\n for key in self.__dict__:\n if key is 'unit' and not with_unit:\n continue\n if key is 'population' and not with_population:\n continue\n if type(self.__dict__[key]) is pd.DataFrame:\n if data:\n yield getattr(self, key)\n else:\n yield key\n",
"role": "user"
},
{
"content": "get_DataFrame",
"role": "assistant"
}
] |
def x(self, path, table_format='txt', sep='\t', table_ext=None,
float_format='%.12g'):
if type(path) is str:
path = path.rstrip('\\')
path = Path(path)
path.mkdir(parents=True, exist_ok=True)
para_file_path = path / DEFAULT_FILE_NAMES['filepara']
file_para = dict()
file_para['files'] = dict()
if table_format in ['text', 'csv', 'txt']:
table_format = 'txt'
elif table_format in ['pickle', 'bin', 'binary', 'pkl']:
table_format = 'pkl'
else:
raise ValueError('Unknown table format "{}" - must be "txt" or "pkl"'
.format(table_format))
return None
if not table_ext:
if table_format == 'txt':
table_ext = '.txt'
if table_format == 'pkl':
table_ext = '.pkl'
if str(type(self)) == "<class 'pymrio.core.mriosystem.IOSystem'>":
file_para['systemtype'] = GENERIC_NAMES['iosys']
elif str(type(self)) == "<class 'pymrio.core.mriosystem.Extension'>":
file_para['systemtype'] = GENERIC_NAMES['ext']
file_para['name'] = self.name
else:
logging.warn('Unknown system type {} - set to "undef"'.format(str(
type(self))))
file_para['systemtype'] = 'undef'
for df, df_name in zip(self.get_DataFrame(data=True), self.get_DataFrame()
):
if type(df.index) is pd.MultiIndex:
nr_index_col = len(df.index.levels)
else:
nr_index_col = 1
if type(df.columns) is pd.MultiIndex:
nr_header = len(df.columns.levels)
else:
nr_header = 1
save_file = df_name + table_ext
save_file_with_path = path / save_file
logging.info('Save file {}'.format(save_file_with_path))
if table_format == 'txt':
df.to_csv(save_file_with_path, sep=sep, float_format=float_format)
else:
df.to_pickle(save_file_with_path)
file_para['files'][df_name] = dict()
file_para['files'][df_name]['name'] = save_file
file_para['files'][df_name]['nr_index_col'] = str(nr_index_col)
file_para['files'][df_name]['nr_header'] = str(nr_header)
with para_file_path.open(mode='w') as pf:
json.dump(file_para, pf, indent=4)
if file_para['systemtype'] == GENERIC_NAMES['iosys']:
if not self.meta:
self.meta = MRIOMetaData(name=self.name, location=path)
self.meta._add_fileio('Saved {} to {}'.format(self.name, path))
self.meta.save(location=path)
return self
|
save
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, path, table_format='txt', sep='\\t', table_ext=None,\n float_format='%.12g'):\n if type(path) is str:\n path = path.rstrip('\\\\')\n path = Path(path)\n path.mkdir(parents=True, exist_ok=True)\n para_file_path = path / DEFAULT_FILE_NAMES['filepara']\n file_para = dict()\n file_para['files'] = dict()\n if table_format in ['text', 'csv', 'txt']:\n table_format = 'txt'\n elif table_format in ['pickle', 'bin', 'binary', 'pkl']:\n table_format = 'pkl'\n else:\n raise ValueError('Unknown table format \"{}\" - must be \"txt\" or \"pkl\"'\n .format(table_format))\n return None\n if not table_ext:\n if table_format == 'txt':\n table_ext = '.txt'\n if table_format == 'pkl':\n table_ext = '.pkl'\n if str(type(self)) == \"<class 'pymrio.core.mriosystem.IOSystem'>\":\n file_para['systemtype'] = GENERIC_NAMES['iosys']\n elif str(type(self)) == \"<class 'pymrio.core.mriosystem.Extension'>\":\n file_para['systemtype'] = GENERIC_NAMES['ext']\n file_para['name'] = self.name\n else:\n logging.warn('Unknown system type {} - set to \"undef\"'.format(str(\n type(self))))\n file_para['systemtype'] = 'undef'\n for df, df_name in zip(self.get_DataFrame(data=True), self.get_DataFrame()\n ):\n if type(df.index) is pd.MultiIndex:\n nr_index_col = len(df.index.levels)\n else:\n nr_index_col = 1\n if type(df.columns) is pd.MultiIndex:\n nr_header = len(df.columns.levels)\n else:\n nr_header = 1\n save_file = df_name + table_ext\n save_file_with_path = path / save_file\n logging.info('Save file {}'.format(save_file_with_path))\n if table_format == 'txt':\n df.to_csv(save_file_with_path, sep=sep, float_format=float_format)\n else:\n df.to_pickle(save_file_with_path)\n file_para['files'][df_name] = dict()\n file_para['files'][df_name]['name'] = save_file\n file_para['files'][df_name]['nr_index_col'] = str(nr_index_col)\n file_para['files'][df_name]['nr_header'] = str(nr_header)\n with para_file_path.open(mode='w') as pf:\n json.dump(file_para, pf, indent=4)\n if file_para['systemtype'] == GENERIC_NAMES['iosys']:\n if not self.meta:\n self.meta = MRIOMetaData(name=self.name, location=path)\n self.meta._add_fileio('Saved {} to {}'.format(self.name, path))\n self.meta.save(location=path)\n return self\n",
"role": "user"
},
{
"content": "save",
"role": "assistant"
}
] |
def x(self, regions):
if type(regions) is list:
regions = {old: new for old, new in zip(self.get_regions(), regions)}
for df in self.get_DataFrame(data=True):
df.rename(index=regions, columns=regions, inplace=True)
try:
for ext in self.get_extensions(data=True):
for df in ext.get_DataFrame(data=True):
df.rename(index=regions, columns=regions, inplace=True)
except:
pass
self.meta._add_modify('Changed country names')
return self
|
rename_regions
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, regions):\n if type(regions) is list:\n regions = {old: new for old, new in zip(self.get_regions(), regions)}\n for df in self.get_DataFrame(data=True):\n df.rename(index=regions, columns=regions, inplace=True)\n try:\n for ext in self.get_extensions(data=True):\n for df in ext.get_DataFrame(data=True):\n df.rename(index=regions, columns=regions, inplace=True)\n except:\n pass\n self.meta._add_modify('Changed country names')\n return self\n",
"role": "user"
},
{
"content": "rename_regions",
"role": "assistant"
}
] |
def x(self, sectors):
if type(sectors) is list:
sectors = {old: new for old, new in zip(self.get_sectors(), sectors)}
for df in self.get_DataFrame(data=True):
df.rename(index=sectors, columns=sectors, inplace=True)
try:
for ext in self.get_extensions(data=True):
for df in ext.get_DataFrame(data=True):
df.rename(index=sectors, columns=sectors, inplace=True)
except:
pass
self.meta._add_modify('Changed sector names')
return self
|
rename_sectors
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, sectors):\n if type(sectors) is list:\n sectors = {old: new for old, new in zip(self.get_sectors(), sectors)}\n for df in self.get_DataFrame(data=True):\n df.rename(index=sectors, columns=sectors, inplace=True)\n try:\n for ext in self.get_extensions(data=True):\n for df in ext.get_DataFrame(data=True):\n df.rename(index=sectors, columns=sectors, inplace=True)\n except:\n pass\n self.meta._add_modify('Changed sector names')\n return self\n",
"role": "user"
},
{
"content": "rename_sectors",
"role": "assistant"
}
] |
def x(self, Y_categories):
if type(Y_categories) is list:
Y_categories = {old: new for old, new in zip(self.get_Y_categories(
), Y_categories)}
for df in self.get_DataFrame(data=True):
df.rename(index=Y_categories, columns=Y_categories, inplace=True)
try:
for ext in self.get_extensions(data=True):
for df in ext.get_DataFrame(data=True):
df.rename(index=Y_categories, columns=Y_categories, inplace
=True)
except:
pass
self.meta._add_modify('Changed Y category names')
return self
|
rename_Y_categories
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, Y_categories):\n if type(Y_categories) is list:\n Y_categories = {old: new for old, new in zip(self.get_Y_categories(\n ), Y_categories)}\n for df in self.get_DataFrame(data=True):\n df.rename(index=Y_categories, columns=Y_categories, inplace=True)\n try:\n for ext in self.get_extensions(data=True):\n for df in ext.get_DataFrame(data=True):\n df.rename(index=Y_categories, columns=Y_categories, inplace\n =True)\n except:\n pass\n self.meta._add_modify('Changed Y category names')\n return self\n",
"role": "user"
},
{
"content": "rename_Y_categories",
"role": "assistant"
}
] |
def x(self, x, Y, Y_agg=None, L=None, population=None):
if Y_agg is None:
try:
Y_agg = Y.sum(level='region', axis=1).reindex(self.get_regions(
), axis=1)
except (AssertionError, KeyError):
Y_agg = Y.sum(level=0, axis=1).reindex(self.get_regions(), axis=1)
y_vec = Y.sum(axis=0)
if self.F is None:
self.F = calc_F(self.S, x)
logging.debug('{} - F calculated'.format(self.name))
if self.S is None:
self.S = calc_S(self.F, x)
logging.debug('{} - S calculated'.format(self.name))
if self.FY is None and self.SY is not None:
self.FY = calc_FY(self.SY, y_vec)
logging.debug('{} - FY calculated'.format(self.name))
if self.SY is None and self.FY is not None:
self.SY = calc_SY(self.FY, y_vec)
logging.debug('{} - SY calculated'.format(self.name))
if self.M is None:
if L is not None:
self.M = calc_M(self.S, L)
logging.debug('{} - M calculated based on L'.format(self.name))
else:
try:
self.M = recalc_M(self.S, self.D_cba, Y=Y_agg, nr_sectors=
self.get_sectors().size)
logging.debug('{} - M calculated based on D_cba and Y'.
format(self.name))
except Exception as ex:
logging.debug('Recalculation of M not possible - cause: {}'
.format(ex))
FY_agg = 0
if self.FY is not None:
try:
FY_agg = self.FY.sum(level='region', axis=1).reindex(self.
get_regions(), axis=1)
except (AssertionError, KeyError):
FY_agg = self.FY.sum(level=0, axis=1).reindex(self.get_regions(
), axis=1)
if (self.D_cba is None or self.D_pba is None or self.D_imp is None or
self.D_exp is None):
if L is None:
logging.debug(
'Not possilbe to calculate D accounts - L not present')
return
else:
self.D_cba, self.D_pba, self.D_imp, self.D_exp = calc_accounts(self
.S, L, Y_agg, self.get_sectors().size)
logging.debug('{} - Accounts D calculated'.format(self.name))
if (self.D_cba_reg is None or self.D_pba_reg is None or self.D_imp_reg is
None or self.D_exp_reg is None):
try:
self.D_cba_reg = self.D_cba.sum(level='region', axis=1).reindex(
self.get_regions(), axis=1) + FY_agg
except (AssertionError, KeyError):
self.D_cba_reg = self.D_cba.sum(level=0, axis=1).reindex(self.
get_regions(), axis=1) + FY_agg
try:
self.D_pba_reg = self.D_pba.sum(level='region', axis=1).reindex(
self.get_regions(), axis=1) + FY_agg
except (AssertionError, KeyError):
self.D_pba_reg = self.D_pba.sum(level=0, axis=1).reindex(self.
get_regions(), axis=1) + FY_agg
try:
self.D_imp_reg = self.D_imp.sum(level='region', axis=1).reindex(
self.get_regions(), axis=1)
except (AssertionError, KeyError):
self.D_imp_reg = self.D_imp.sum(level=0, axis=1).reindex(self.
get_regions(), axis=1)
try:
self.D_exp_reg = self.D_exp.sum(level='region', axis=1).reindex(
self.get_regions(), axis=1)
except (AssertionError, KeyError):
self.D_exp_reg = self.D_exp.sum(level=0, axis=1).reindex(self.
get_regions(), axis=1)
logging.debug('{} - Accounts D for regions calculated'.format(self.
name))
if population is not None:
if type(population) is pd.DataFrame:
if population.columns.tolist() != self.D_cba_reg.columns.tolist():
logging.warning(
'Population regions are inconsistent with IO regions')
population = population.values
if (self.D_cba_cap is None or self.D_pba_cap is None or self.
D_imp_cap is None or self.D_exp_cap is None):
self.D_cba_cap = self.D_cba_reg.dot(np.diagflat(1.0 / population))
self.D_pba_cap = self.D_pba_reg.dot(np.diagflat(1.0 / population))
self.D_imp_cap = self.D_imp_reg.dot(np.diagflat(1.0 / population))
self.D_exp_cap = self.D_exp_reg.dot(np.diagflat(1.0 / population))
self.D_cba_cap.columns = self.D_cba_reg.columns
self.D_pba_cap.columns = self.D_pba_reg.columns
self.D_imp_cap.columns = self.D_imp_reg.columns
self.D_exp_cap.columns = self.D_exp_reg.columns
logging.debug('{} - Accounts D per capita calculated'.format(
self.name))
return self
|
calc_system
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, x, Y, Y_agg=None, L=None, population=None):\n if Y_agg is None:\n try:\n Y_agg = Y.sum(level='region', axis=1).reindex(self.get_regions(\n ), axis=1)\n except (AssertionError, KeyError):\n Y_agg = Y.sum(level=0, axis=1).reindex(self.get_regions(), axis=1)\n y_vec = Y.sum(axis=0)\n if self.F is None:\n self.F = calc_F(self.S, x)\n logging.debug('{} - F calculated'.format(self.name))\n if self.S is None:\n self.S = calc_S(self.F, x)\n logging.debug('{} - S calculated'.format(self.name))\n if self.FY is None and self.SY is not None:\n self.FY = calc_FY(self.SY, y_vec)\n logging.debug('{} - FY calculated'.format(self.name))\n if self.SY is None and self.FY is not None:\n self.SY = calc_SY(self.FY, y_vec)\n logging.debug('{} - SY calculated'.format(self.name))\n if self.M is None:\n if L is not None:\n self.M = calc_M(self.S, L)\n logging.debug('{} - M calculated based on L'.format(self.name))\n else:\n try:\n self.M = recalc_M(self.S, self.D_cba, Y=Y_agg, nr_sectors=\n self.get_sectors().size)\n logging.debug('{} - M calculated based on D_cba and Y'.\n format(self.name))\n except Exception as ex:\n logging.debug('Recalculation of M not possible - cause: {}'\n .format(ex))\n FY_agg = 0\n if self.FY is not None:\n try:\n FY_agg = self.FY.sum(level='region', axis=1).reindex(self.\n get_regions(), axis=1)\n except (AssertionError, KeyError):\n FY_agg = self.FY.sum(level=0, axis=1).reindex(self.get_regions(\n ), axis=1)\n if (self.D_cba is None or self.D_pba is None or self.D_imp is None or \n self.D_exp is None):\n if L is None:\n logging.debug(\n 'Not possilbe to calculate D accounts - L not present')\n return\n else:\n self.D_cba, self.D_pba, self.D_imp, self.D_exp = calc_accounts(self\n .S, L, Y_agg, self.get_sectors().size)\n logging.debug('{} - Accounts D calculated'.format(self.name))\n if (self.D_cba_reg is None or self.D_pba_reg is None or self.D_imp_reg is\n None or self.D_exp_reg is None):\n try:\n self.D_cba_reg = self.D_cba.sum(level='region', axis=1).reindex(\n self.get_regions(), axis=1) + FY_agg\n except (AssertionError, KeyError):\n self.D_cba_reg = self.D_cba.sum(level=0, axis=1).reindex(self.\n get_regions(), axis=1) + FY_agg\n try:\n self.D_pba_reg = self.D_pba.sum(level='region', axis=1).reindex(\n self.get_regions(), axis=1) + FY_agg\n except (AssertionError, KeyError):\n self.D_pba_reg = self.D_pba.sum(level=0, axis=1).reindex(self.\n get_regions(), axis=1) + FY_agg\n try:\n self.D_imp_reg = self.D_imp.sum(level='region', axis=1).reindex(\n self.get_regions(), axis=1)\n except (AssertionError, KeyError):\n self.D_imp_reg = self.D_imp.sum(level=0, axis=1).reindex(self.\n get_regions(), axis=1)\n try:\n self.D_exp_reg = self.D_exp.sum(level='region', axis=1).reindex(\n self.get_regions(), axis=1)\n except (AssertionError, KeyError):\n self.D_exp_reg = self.D_exp.sum(level=0, axis=1).reindex(self.\n get_regions(), axis=1)\n logging.debug('{} - Accounts D for regions calculated'.format(self.\n name))\n if population is not None:\n if type(population) is pd.DataFrame:\n if population.columns.tolist() != self.D_cba_reg.columns.tolist():\n logging.warning(\n 'Population regions are inconsistent with IO regions')\n population = population.values\n if (self.D_cba_cap is None or self.D_pba_cap is None or self.\n D_imp_cap is None or self.D_exp_cap is None):\n self.D_cba_cap = self.D_cba_reg.dot(np.diagflat(1.0 / population))\n self.D_pba_cap = self.D_pba_reg.dot(np.diagflat(1.0 / population))\n self.D_imp_cap = self.D_imp_reg.dot(np.diagflat(1.0 / population))\n self.D_exp_cap = self.D_exp_reg.dot(np.diagflat(1.0 / population))\n self.D_cba_cap.columns = self.D_cba_reg.columns\n self.D_pba_cap.columns = self.D_pba_reg.columns\n self.D_imp_cap.columns = self.D_imp_reg.columns\n self.D_exp_cap.columns = self.D_exp_reg.columns\n logging.debug('{} - Accounts D per capita calculated'.format(\n self.name))\n return self\n",
"role": "user"
},
{
"content": "calc_system",
"role": "assistant"
}
] |
def x(self, row, per_capita=False, sector=None, file_name=False, file_dpi=
600, population=None, **kwargs):
if type(per_capita) is not bool:
logging.error('per_capita parameter must be boolean')
return None
if type(row) is int:
row = self.D_cba.ix[row].name
name_row = str(row).replace('(', '').replace(')', '').replace("'", ''
).replace('[', '').replace(']', '')
if sector:
graph_name = name_row + ' for sector ' + sector
else:
graph_name = name_row + ' total account'
if per_capita:
graph_name = graph_name + ' - per capita'
graph_name = self.name + ' - ' + graph_name
if self.unit is not None:
try:
y_label_name = name_row + ' (' + str(self.unit.ix[row, 'unit'].
tolist()[0]) + ')'
except:
y_label_name = name_row + ' (' + str(self.unit.ix[row, 'unit']
) + ')'
else:
y_label_name = name_row
if 'kind' not in kwargs:
kwargs['kind'] = 'bar'
if 'colormap' not in kwargs:
kwargs['colormap'] = 'Spectral'
accounts = collections.OrderedDict()
if sector:
accounts['Footprint'] = 'D_cba'
accounts['Territorial'] = 'D_pba'
accounts['Imports'] = 'D_imp'
accounts['Exports'] = 'D_exp'
elif per_capita:
accounts['Footprint'] = 'D_cba_cap'
accounts['Territorial'] = 'D_pba_cap'
accounts['Imports'] = 'D_imp_cap'
accounts['Exports'] = 'D_exp_cap'
else:
accounts['Footprint'] = 'D_cba_reg'
accounts['Territorial'] = 'D_pba_reg'
accounts['Imports'] = 'D_imp_reg'
accounts['Exports'] = 'D_exp_reg'
data_row = pd.DataFrame(columns=[key for key in accounts])
for key in accounts:
if sector:
try:
_data = pd.DataFrame(getattr(self, accounts[key]).xs(key=
sector, axis=1, level='sector').ix[row].T)
except (AssertionError, KeyError):
_data = pd.DataFrame(getattr(self, accounts[key]).xs(key=
sector, axis=1, level=1).ix[row].T)
if per_capita:
if population is not None:
if type(population) is pd.DataFrame:
if population.columns.tolist(
) != self.D_cba_reg.columns.tolist():
logging.warning(
'Population regions are inconsistent with IO regions'
)
population = population.values
population = population.reshape((-1, 1))
_data = _data / population
else:
logging.error(
'Population must be given for sector results per capita'
)
return
else:
_data = pd.DataFrame(getattr(self, accounts[key]).ix[row].T)
_data.columns = [key]
data_row[key] = _data[key]
if 'title' not in kwargs:
kwargs['title'] = graph_name
ax = data_row.plot(**kwargs)
plt.xlabel('Regions')
plt.ylabel(y_label_name)
plt.legend(loc='best')
try:
plt.tight_layout()
except:
pass
if file_name:
plt.savefig(file_name, dpi=file_dpi)
return ax
|
plot_account
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, row, per_capita=False, sector=None, file_name=False, file_dpi=\n 600, population=None, **kwargs):\n if type(per_capita) is not bool:\n logging.error('per_capita parameter must be boolean')\n return None\n if type(row) is int:\n row = self.D_cba.ix[row].name\n name_row = str(row).replace('(', '').replace(')', '').replace(\"'\", ''\n ).replace('[', '').replace(']', '')\n if sector:\n graph_name = name_row + ' for sector ' + sector\n else:\n graph_name = name_row + ' total account'\n if per_capita:\n graph_name = graph_name + ' - per capita'\n graph_name = self.name + ' - ' + graph_name\n if self.unit is not None:\n try:\n y_label_name = name_row + ' (' + str(self.unit.ix[row, 'unit'].\n tolist()[0]) + ')'\n except:\n y_label_name = name_row + ' (' + str(self.unit.ix[row, 'unit']\n ) + ')'\n else:\n y_label_name = name_row\n if 'kind' not in kwargs:\n kwargs['kind'] = 'bar'\n if 'colormap' not in kwargs:\n kwargs['colormap'] = 'Spectral'\n accounts = collections.OrderedDict()\n if sector:\n accounts['Footprint'] = 'D_cba'\n accounts['Territorial'] = 'D_pba'\n accounts['Imports'] = 'D_imp'\n accounts['Exports'] = 'D_exp'\n elif per_capita:\n accounts['Footprint'] = 'D_cba_cap'\n accounts['Territorial'] = 'D_pba_cap'\n accounts['Imports'] = 'D_imp_cap'\n accounts['Exports'] = 'D_exp_cap'\n else:\n accounts['Footprint'] = 'D_cba_reg'\n accounts['Territorial'] = 'D_pba_reg'\n accounts['Imports'] = 'D_imp_reg'\n accounts['Exports'] = 'D_exp_reg'\n data_row = pd.DataFrame(columns=[key for key in accounts])\n for key in accounts:\n if sector:\n try:\n _data = pd.DataFrame(getattr(self, accounts[key]).xs(key=\n sector, axis=1, level='sector').ix[row].T)\n except (AssertionError, KeyError):\n _data = pd.DataFrame(getattr(self, accounts[key]).xs(key=\n sector, axis=1, level=1).ix[row].T)\n if per_capita:\n if population is not None:\n if type(population) is pd.DataFrame:\n if population.columns.tolist(\n ) != self.D_cba_reg.columns.tolist():\n logging.warning(\n 'Population regions are inconsistent with IO regions'\n )\n population = population.values\n population = population.reshape((-1, 1))\n _data = _data / population\n else:\n logging.error(\n 'Population must be given for sector results per capita'\n )\n return\n else:\n _data = pd.DataFrame(getattr(self, accounts[key]).ix[row].T)\n _data.columns = [key]\n data_row[key] = _data[key]\n if 'title' not in kwargs:\n kwargs['title'] = graph_name\n ax = data_row.plot(**kwargs)\n plt.xlabel('Regions')\n plt.ylabel(y_label_name)\n plt.legend(loc='best')\n try:\n plt.tight_layout()\n except:\n pass\n if file_name:\n plt.savefig(file_name, dpi=file_dpi)\n return ax\n",
"role": "user"
},
{
"content": "plot_account",
"role": "assistant"
}
] |
def x(self, path, per_region=True, per_capita=False, pic_size=1000, format=
'rst', ffname=None, **kwargs):
if not per_region and not per_capita:
return
_plt = plt.isinteractive()
_rcParams = mpl.rcParams.copy()
rcParams = {'figure.figsize': (10, 5), 'figure.dpi': 350,
'axes.titlesize': 20, 'axes.labelsize': 20}
plt.ioff()
if type(path) is str:
path = path.rstrip('\\')
path = Path(path)
path.mkdir(parents=True, exist_ok=True)
if ffname is None:
valid_char = string.ascii_letters + string.digits + '_'
ffname = self.name.replace(' ', '_')
ffname = ''.join([r for r in ffname if r in valid_char])
rep_spec = collections.namedtuple('rep_spec', ['make', 'spec_string',
'is_per_capita'])
reports_to_write = {'per region accounts': rep_spec(per_region,
'_per_region', False), 'per capita accounts': rep_spec(per_capita,
'_per_capita', True)}
logging.info('Write report for {}'.format(self.name))
fig_name_list = []
for arep in reports_to_write:
if not reports_to_write[arep].make:
continue
report_txt = []
report_txt.append('###########')
report_txt.append('MRIO report')
report_txt.append('###########')
report_txt.append('\n')
_ext = 'Extension: ' + self.name + ' - ' + str(arep)
report_txt.append(_ext)
report_txt.append('=' * len(_ext))
report_txt.append('.. contents::\n\n')
curr_ffname = ffname + reports_to_write[arep].spec_string
subfolder = path / curr_ffname
subfolder.mkdir(parents=True, exist_ok=True)
for row in self.get_rows():
name_row = str(row).replace('(', '').replace(')', '').replace("'",
'').replace(' ', '_').replace(', ', '_').replace('__', '_')
graph_name = self.name + ' - ' + str(row).replace('(', '').replace(
')', '').replace("'", '')
def x(varStr):
return re.sub('\\W|^(?=\\d)', '_', varStr)
file_name = clean(name_row + reports_to_write[arep].spec_string)
file_name = re.sub('_+', '_', file_name)
file_name = file_name[:50]
def x(a, c):
return a + '_' + str(c)
_loopco = 0
while file_name_nr(file_name, _loopco) in fig_name_list:
_loopco += 1
file_name = file_name_nr(file_name, _loopco)
fig_name_list.append(file_name)
file_name = file_name + '.png'
file_name = subfolder / file_name
file_name_rel = file_name.relative_to(path)
self.plot_account(row, file_name=file_name, per_capita=
reports_to_write[arep].is_per_capita, **kwargs)
plt.close()
report_txt.append(graph_name)
report_txt.append('-' * len(graph_name) + '\n\n')
report_txt.append('.. image:: ' + file_name_rel)
report_txt.append(' :width: {} \n'.format(int(pic_size)))
report_txt.append('\nReport written on ' + time.strftime(
'%Y%m%d %H%M%S'))
fin_txt = '\n'.join(report_txt)
if format is not 'rst':
try:
import docutils.core as dc
if format == 'tex':
format == 'latex'
fin_txt = dc.publish_string(fin_txt, writer_name=format,
settings_overrides={'output_encoding': 'unicode'})
except:
logging.warn(
'Module docutils not available - write rst instead')
format = 'rst'
format_str = {'latex': 'tex', 'tex': 'tex', 'rst': 'txt', 'txt':
'txt', 'html': 'html'}
_repfile = curr_ffname + '.' + format_str.get(format, str(format))
with open(path / _repfile, 'w') as out_file:
out_file.write(fin_txt)
logging.info('Report for {what} written to {file_where}'.format(
what=arep, file_where=str(_repfile)))
mpl.rcParams.update(_rcParams)
if _plt:
plt.ion()
|
report_accounts
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, path, per_region=True, per_capita=False, pic_size=1000, format=\n 'rst', ffname=None, **kwargs):\n if not per_region and not per_capita:\n return\n _plt = plt.isinteractive()\n _rcParams = mpl.rcParams.copy()\n rcParams = {'figure.figsize': (10, 5), 'figure.dpi': 350,\n 'axes.titlesize': 20, 'axes.labelsize': 20}\n plt.ioff()\n if type(path) is str:\n path = path.rstrip('\\\\')\n path = Path(path)\n path.mkdir(parents=True, exist_ok=True)\n if ffname is None:\n valid_char = string.ascii_letters + string.digits + '_'\n ffname = self.name.replace(' ', '_')\n ffname = ''.join([r for r in ffname if r in valid_char])\n rep_spec = collections.namedtuple('rep_spec', ['make', 'spec_string',\n 'is_per_capita'])\n reports_to_write = {'per region accounts': rep_spec(per_region,\n '_per_region', False), 'per capita accounts': rep_spec(per_capita,\n '_per_capita', True)}\n logging.info('Write report for {}'.format(self.name))\n fig_name_list = []\n for arep in reports_to_write:\n if not reports_to_write[arep].make:\n continue\n report_txt = []\n report_txt.append('###########')\n report_txt.append('MRIO report')\n report_txt.append('###########')\n report_txt.append('\\n')\n _ext = 'Extension: ' + self.name + ' - ' + str(arep)\n report_txt.append(_ext)\n report_txt.append('=' * len(_ext))\n report_txt.append('.. contents::\\n\\n')\n curr_ffname = ffname + reports_to_write[arep].spec_string\n subfolder = path / curr_ffname\n subfolder.mkdir(parents=True, exist_ok=True)\n for row in self.get_rows():\n name_row = str(row).replace('(', '').replace(')', '').replace(\"'\",\n '').replace(' ', '_').replace(', ', '_').replace('__', '_')\n graph_name = self.name + ' - ' + str(row).replace('(', '').replace(\n ')', '').replace(\"'\", '')\n\n def x(varStr):\n return re.sub('\\\\W|^(?=\\\\d)', '_', varStr)\n file_name = clean(name_row + reports_to_write[arep].spec_string)\n file_name = re.sub('_+', '_', file_name)\n file_name = file_name[:50]\n\n def x(a, c):\n return a + '_' + str(c)\n _loopco = 0\n while file_name_nr(file_name, _loopco) in fig_name_list:\n _loopco += 1\n file_name = file_name_nr(file_name, _loopco)\n fig_name_list.append(file_name)\n file_name = file_name + '.png'\n file_name = subfolder / file_name\n file_name_rel = file_name.relative_to(path)\n self.plot_account(row, file_name=file_name, per_capita=\n reports_to_write[arep].is_per_capita, **kwargs)\n plt.close()\n report_txt.append(graph_name)\n report_txt.append('-' * len(graph_name) + '\\n\\n')\n report_txt.append('.. image:: ' + file_name_rel)\n report_txt.append(' :width: {} \\n'.format(int(pic_size)))\n report_txt.append('\\nReport written on ' + time.strftime(\n '%Y%m%d %H%M%S'))\n fin_txt = '\\n'.join(report_txt)\n if format is not 'rst':\n try:\n import docutils.core as dc\n if format == 'tex':\n format == 'latex'\n fin_txt = dc.publish_string(fin_txt, writer_name=format,\n settings_overrides={'output_encoding': 'unicode'})\n except:\n logging.warn(\n 'Module docutils not available - write rst instead')\n format = 'rst'\n format_str = {'latex': 'tex', 'tex': 'tex', 'rst': 'txt', 'txt':\n 'txt', 'html': 'html'}\n _repfile = curr_ffname + '.' + format_str.get(format, str(format))\n with open(path / _repfile, 'w') as out_file:\n out_file.write(fin_txt)\n logging.info('Report for {what} written to {file_where}'.format(\n what=arep, file_where=str(_repfile)))\n mpl.rcParams.update(_rcParams)\n if _plt:\n plt.ion()\n",
"role": "user"
},
{
"content": "report_accounts",
"role": "assistant"
}
] |
def x(self):
possible_dataframes = ['F', 'FY', 'M', 'S', 'D_cba', 'D_pba', 'D_imp',
'D_exp', 'D_cba_reg', 'D_pba_reg', 'D_imp_reg', 'D_exp_reg',
'D_cba_cap', 'D_pba_cap', 'D_imp_cap', 'D_exp_cap']
for df in possible_dataframes:
if df in self.__dict__ and getattr(self, df) is not None:
return getattr(self, df).index.get_values()
else:
logging.warn('No attributes available to get row names')
return None
|
get_rows
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n possible_dataframes = ['F', 'FY', 'M', 'S', 'D_cba', 'D_pba', 'D_imp',\n 'D_exp', 'D_cba_reg', 'D_pba_reg', 'D_imp_reg', 'D_exp_reg',\n 'D_cba_cap', 'D_pba_cap', 'D_imp_cap', 'D_exp_cap']\n for df in possible_dataframes:\n if df in self.__dict__ and getattr(self, df) is not None:\n return getattr(self, df).index.get_values()\n else:\n logging.warn('No attributes available to get row names')\n return None\n",
"role": "user"
},
{
"content": "get_rows",
"role": "assistant"
}
] |
def x(self, row, name=None):
retdict = {}
for rowname, data in zip(self.get_DataFrame(), self.get_DataFrame(data=
True)):
retdict[rowname] = pd.DataFrame(data.ix[row])
if name:
retdict['name'] = name
return retdict
|
get_row_data
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, row, name=None):\n retdict = {}\n for rowname, data in zip(self.get_DataFrame(), self.get_DataFrame(data=\n True)):\n retdict[rowname] = pd.DataFrame(data.ix[row])\n if name:\n retdict['name'] = name\n return retdict\n",
"role": "user"
},
{
"content": "get_row_data",
"role": "assistant"
}
] |
def x(self, stressor, name=None):
if type(stressor) is int:
stressor = self.F.index[stressor]
if len(stressor) == 1:
stressor = stressor[0]
if not name:
if type(stressor) is str:
name = stressor
else:
name = '_'.join(stressor) + '_diag'
ext_diag = Extension(name)
ext_diag.F = pd.DataFrame(index=self.F.columns, columns=self.F.columns,
data=np.diag(self.F.loc[stressor, :]))
try:
ext_diag.unit = pd.DataFrame(index=ext_diag.F.index, columns=self.
unit.columns, data=self.unit.loc[stressor].unit)
except AttributeError:
ext_diag.unit = None
return ext_diag
|
diag_stressor
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, stressor, name=None):\n if type(stressor) is int:\n stressor = self.F.index[stressor]\n if len(stressor) == 1:\n stressor = stressor[0]\n if not name:\n if type(stressor) is str:\n name = stressor\n else:\n name = '_'.join(stressor) + '_diag'\n ext_diag = Extension(name)\n ext_diag.F = pd.DataFrame(index=self.F.columns, columns=self.F.columns,\n data=np.diag(self.F.loc[stressor, :]))\n try:\n ext_diag.unit = pd.DataFrame(index=ext_diag.F.index, columns=self.\n unit.columns, data=self.unit.loc[stressor].unit)\n except AttributeError:\n ext_diag.unit = None\n return ext_diag\n",
"role": "user"
},
{
"content": "diag_stressor",
"role": "assistant"
}
] |
def x(self):
if self.x is None and self.Z is None:
if self.L is None:
self.L = calc_L(self.A)
logging.info('Leontief matrix L calculated')
self.x = calc_x_from_L(self.L, self.Y.sum(axis=1))
self.meta._add_modify('Industry Output x calculated')
if self.Z is None:
self.Z = calc_Z(self.A, self.x)
self.meta._add_modify('Flow matrix Z calculated')
if self.x is None:
self.x = calc_x(self.Z, self.Y)
self.meta._add_modify('Industry output x calculated')
if self.A is None:
self.A = calc_A(self.Z, self.x)
self.meta._add_modify('Coefficient matrix A calculated')
if self.L is None:
self.L = calc_L(self.A)
self.meta._add_modify('Leontief matrix L calculated')
return self
|
calc_system
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n if self.x is None and self.Z is None:\n if self.L is None:\n self.L = calc_L(self.A)\n logging.info('Leontief matrix L calculated')\n self.x = calc_x_from_L(self.L, self.Y.sum(axis=1))\n self.meta._add_modify('Industry Output x calculated')\n if self.Z is None:\n self.Z = calc_Z(self.A, self.x)\n self.meta._add_modify('Flow matrix Z calculated')\n if self.x is None:\n self.x = calc_x(self.Z, self.Y)\n self.meta._add_modify('Industry output x calculated')\n if self.A is None:\n self.A = calc_A(self.Z, self.x)\n self.meta._add_modify('Coefficient matrix A calculated')\n if self.L is None:\n self.L = calc_L(self.A)\n self.meta._add_modify('Leontief matrix L calculated')\n return self\n",
"role": "user"
},
{
"content": "calc_system",
"role": "assistant"
}
] |
def x(self, extensions=None, Y_agg=None):
ext_list = list(self.get_extensions(data=False))
extensions = extensions or ext_list
if type(extensions) == str:
extensions = [extensions]
for ext_name in extensions:
self.meta._add_modify('Calculating accounts for extension {}'.
format(ext_name))
ext = getattr(self, ext_name)
ext.calc_system(x=self.x, Y=self.Y, L=self.L, Y_agg=Y_agg,
population=self.population)
return self
|
calc_extensions
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, extensions=None, Y_agg=None):\n ext_list = list(self.get_extensions(data=False))\n extensions = extensions or ext_list\n if type(extensions) == str:\n extensions = [extensions]\n for ext_name in extensions:\n self.meta._add_modify('Calculating accounts for extension {}'.\n format(ext_name))\n ext = getattr(self, ext_name)\n ext.calc_system(x=self.x, Y=self.Y, L=self.L, Y_agg=Y_agg,\n population=self.population)\n return self\n",
"role": "user"
},
{
"content": "calc_extensions",
"role": "assistant"
}
] |
def x(self, path, per_region=True, per_capita=False, pic_size=1000, format=
'rst', **kwargs):
for ext in self.get_extensions(data=True):
ext.report_accounts(path=path, per_region=per_region, per_capita=
per_capita, pic_size=pic_size, format=format, **kwargs)
|
report_accounts
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, path, per_region=True, per_capita=False, pic_size=1000, format=\n 'rst', **kwargs):\n for ext in self.get_extensions(data=True):\n ext.report_accounts(path=path, per_region=per_region, per_capita=\n per_capita, pic_size=pic_size, format=format, **kwargs)\n",
"role": "user"
},
{
"content": "report_accounts",
"role": "assistant"
}
] |
def x(self, data=False):
ext_list = [key for key in self.__dict__ if type(self.__dict__[key]) is
Extension]
for key in ext_list:
if data:
yield getattr(self, key)
else:
yield key
|
get_extensions
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, data=False):\n ext_list = [key for key in self.__dict__ if type(self.__dict__[key]) is\n Extension]\n for key in ext_list:\n if data:\n yield getattr(self, key)\n else:\n yield key\n",
"role": "user"
},
{
"content": "get_extensions",
"role": "assistant"
}
] |
def x(self, force=False):
super().reset_full(force=force, _meta=self.meta)
return self
|
reset_full
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, force=False):\n super().reset_full(force=force, _meta=self.meta)\n return self\n",
"role": "user"
},
{
"content": "reset_full",
"role": "assistant"
}
] |
def x(self, force=False):
self.reset_full()
[ee.reset_full() for ee in self.get_extensions(data=True)]
self.meta._add_modify('Reset all calculated data')
return self
|
reset_all_full
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, force=False):\n self.reset_full()\n [ee.reset_full() for ee in self.get_extensions(data=True)]\n self.meta._add_modify('Reset all calculated data')\n return self\n",
"role": "user"
},
{
"content": "reset_all_full",
"role": "assistant"
}
] |
def x(self, force=False):
super().reset_to_flows(force=force, _meta=self.meta)
return self
|
reset_to_flows
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, force=False):\n super().reset_to_flows(force=force, _meta=self.meta)\n return self\n",
"role": "user"
},
{
"content": "reset_to_flows",
"role": "assistant"
}
] |
def x(self, force=False):
self.reset_to_flows(force=force)
[ee.reset_to_flows(force=force) for ee in self.get_extensions(data=True)]
self.meta._add_modify('Reset full system to absolute flows')
return self
|
reset_all_to_flows
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, force=False):\n self.reset_to_flows(force=force)\n [ee.reset_to_flows(force=force) for ee in self.get_extensions(data=True)]\n self.meta._add_modify('Reset full system to absolute flows')\n return self\n",
"role": "user"
},
{
"content": "reset_all_to_flows",
"role": "assistant"
}
] |
def x(self):
self.reset_to_coefficients()
[ee.reset_to_coefficients() for ee in self.get_extensions(data=True)]
self.meta._add_modify('Reset full system to coefficients')
return self
|
reset_all_to_coefficients
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self):\n self.reset_to_coefficients()\n [ee.reset_to_coefficients() for ee in self.get_extensions(data=True)]\n self.meta._add_modify('Reset full system to coefficients')\n return self\n",
"role": "user"
},
{
"content": "reset_all_to_coefficients",
"role": "assistant"
}
] |
def x(self, path, table_format='txt', sep='\t', table_ext=None,
float_format='%.12g'):
if type(path) is str:
path = path.rstrip('\\')
path = Path(path)
path.mkdir(parents=True, exist_ok=True)
self.save(path=path, table_format=table_format, sep=sep, table_ext=
table_ext, float_format=float_format)
for ext, ext_name in zip(self.get_extensions(data=True), self.
get_extensions()):
ext_path = path / ext_name
ext.save(path=ext_path, table_format=table_format, sep=sep,
table_ext=table_ext, float_format=float_format)
return self
|
save_all
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, path, table_format='txt', sep='\\t', table_ext=None,\n float_format='%.12g'):\n if type(path) is str:\n path = path.rstrip('\\\\')\n path = Path(path)\n path.mkdir(parents=True, exist_ok=True)\n self.save(path=path, table_format=table_format, sep=sep, table_ext=\n table_ext, float_format=float_format)\n for ext, ext_name in zip(self.get_extensions(data=True), self.\n get_extensions()):\n ext_path = path / ext_name\n ext.save(path=ext_path, table_format=table_format, sep=sep,\n table_ext=table_ext, float_format=float_format)\n return self\n",
"role": "user"
},
{
"content": "save_all",
"role": "assistant"
}
] |
def x(self, ext=None):
if ext is None:
ext = list(self.get_extensions())
if type(ext) is str:
ext = [ext]
for ee in ext:
try:
del self.__dict__[ee]
except KeyError:
for exinstancename, exdata in zip(self.get_extensions(data=
False), self.get_extensions(data=True)):
if exdata.name == ee:
del self.__dict__[exinstancename]
finally:
self.meta._add_modify('Removed extension {}'.format(ee))
return self
|
remove_extension
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(self, ext=None):\n if ext is None:\n ext = list(self.get_extensions())\n if type(ext) is str:\n ext = [ext]\n for ee in ext:\n try:\n del self.__dict__[ee]\n except KeyError:\n for exinstancename, exdata in zip(self.get_extensions(data=\n False), self.get_extensions(data=True)):\n if exdata.name == ee:\n del self.__dict__[exinstancename]\n finally:\n self.meta._add_modify('Removed extension {}'.format(ee))\n return self\n",
"role": "user"
},
{
"content": "remove_extension",
"role": "assistant"
}
] |
def x(inp):
inp = np.asarray(inp)
nr_dim = np.ndim(inp)
if nr_dim == 1:
return True
elif nr_dim == 2 and 1 in inp.shape:
return True
else:
return False
|
is_vector
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(inp):\n inp = np.asarray(inp)\n nr_dim = np.ndim(inp)\n if nr_dim == 1:\n return True\n elif nr_dim == 2 and 1 in inp.shape:\n return True\n else:\n return False\n",
"role": "user"
},
{
"content": "is_vector",
"role": "assistant"
}
] |
def x(path):
path = Path(path)
if zipfile.is_zipfile(str(path)):
with zipfile.ZipFile(str(path)) as zz:
filelist = [info.filename for info in zz.infolist()]
iszip = True
else:
iszip = False
filelist = [str(f) for f in path.glob('**/*') if f.is_file()]
return namedtuple('repocontent', ['iszip', 'filelist'])(iszip, filelist)
|
get_repo_content
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(path):\n path = Path(path)\n if zipfile.is_zipfile(str(path)):\n with zipfile.ZipFile(str(path)) as zz:\n filelist = [info.filename for info in zz.infolist()]\n iszip = True\n else:\n iszip = False\n filelist = [str(f) for f in path.glob('**/*') if f.is_file()]\n return namedtuple('repocontent', ['iszip', 'filelist'])(iszip, filelist)\n",
"role": "user"
},
{
"content": "get_repo_content",
"role": "assistant"
}
] |
def x(path, path_in_arc=''):
if type(path) is str:
path = Path(path.rstrip('\\'))
if zipfile.is_zipfile(str(path)):
para_file_folder = str(path_in_arc)
with zipfile.ZipFile(file=str(path)) as zf:
files = zf.namelist()
else:
para_file_folder = str(path)
files = [str(f) for f in path.glob('**/*')]
if para_file_folder not in files:
para_file_full_path = os.path.join(para_file_folder,
DEFAULT_FILE_NAMES['filepara'])
else:
para_file_full_path = para_file_folder
para_file_folder = os.path.dirname(para_file_full_path)
if para_file_full_path not in files:
raise FileNotFoundError('File parameter file {} not found'.format(
para_file_full_path))
if zipfile.is_zipfile(str(path)):
with zipfile.ZipFile(file=str(path)) as zf:
para_file_content = json.loads(zf.read(para_file_full_path).
decode('utf-8'))
else:
with open(para_file_full_path, 'r') as pf:
para_file_content = json.load(pf)
return namedtuple('file_parameter', ['folder', 'name', 'content'])(
para_file_folder, os.path.basename(para_file_full_path),
para_file_content)
|
get_file_para
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(path, path_in_arc=''):\n if type(path) is str:\n path = Path(path.rstrip('\\\\'))\n if zipfile.is_zipfile(str(path)):\n para_file_folder = str(path_in_arc)\n with zipfile.ZipFile(file=str(path)) as zf:\n files = zf.namelist()\n else:\n para_file_folder = str(path)\n files = [str(f) for f in path.glob('**/*')]\n if para_file_folder not in files:\n para_file_full_path = os.path.join(para_file_folder,\n DEFAULT_FILE_NAMES['filepara'])\n else:\n para_file_full_path = para_file_folder\n para_file_folder = os.path.dirname(para_file_full_path)\n if para_file_full_path not in files:\n raise FileNotFoundError('File parameter file {} not found'.format(\n para_file_full_path))\n if zipfile.is_zipfile(str(path)):\n with zipfile.ZipFile(file=str(path)) as zf:\n para_file_content = json.loads(zf.read(para_file_full_path).\n decode('utf-8'))\n else:\n with open(para_file_full_path, 'r') as pf:\n para_file_content = json.load(pf)\n return namedtuple('file_parameter', ['folder', 'name', 'content'])(\n para_file_folder, os.path.basename(para_file_full_path),\n para_file_content)\n",
"role": "user"
},
{
"content": "get_file_para",
"role": "assistant"
}
] |
def x(agg_vector, pos_dict=None):
if isinstance(agg_vector, np.ndarray):
agg_vector = agg_vector.flatten().tolist()
if type(agg_vector[0]) == str:
str_vector = agg_vector
agg_vector = np.zeros(len(str_vector))
if pos_dict:
if len(pos_dict.keys()) != len(set(str_vector)):
raise ValueError(
'Posistion elements inconsistent with aggregation vector')
seen = pos_dict
else:
seen = {}
counter = 0
for ind, item in enumerate(str_vector):
if item not in seen:
seen[item] = counter
counter += 1
agg_vector[ind] = seen[item]
agg_vector = np.array(agg_vector, dtype=int)
agg_vector = agg_vector.reshape((1, -1))
row_corr = agg_vector
col_corr = np.arange(agg_vector.size)
agg_matrix = np.zeros((row_corr.max() + 1, col_corr.max() + 1))
agg_matrix[row_corr, col_corr] = 1
agg_matrix[np.tile(agg_vector == -1, (np.shape(agg_matrix)[0], 1))] = 0
return agg_matrix
|
build_agg_matrix
|
[
{
"content": "Given the source code of a python function, suggest a fitting name for the function.",
"role": "system"
},
{
"content": "def x(agg_vector, pos_dict=None):\n if isinstance(agg_vector, np.ndarray):\n agg_vector = agg_vector.flatten().tolist()\n if type(agg_vector[0]) == str:\n str_vector = agg_vector\n agg_vector = np.zeros(len(str_vector))\n if pos_dict:\n if len(pos_dict.keys()) != len(set(str_vector)):\n raise ValueError(\n 'Posistion elements inconsistent with aggregation vector')\n seen = pos_dict\n else:\n seen = {}\n counter = 0\n for ind, item in enumerate(str_vector):\n if item not in seen:\n seen[item] = counter\n counter += 1\n agg_vector[ind] = seen[item]\n agg_vector = np.array(agg_vector, dtype=int)\n agg_vector = agg_vector.reshape((1, -1))\n row_corr = agg_vector\n col_corr = np.arange(agg_vector.size)\n agg_matrix = np.zeros((row_corr.max() + 1, col_corr.max() + 1))\n agg_matrix[row_corr, col_corr] = 1\n agg_matrix[np.tile(agg_vector == -1, (np.shape(agg_matrix)[0], 1))] = 0\n return agg_matrix\n",
"role": "user"
},
{
"content": "build_agg_matrix",
"role": "assistant"
}
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.