text
stringlengths 0
828
|
|---|
return nb, resources"
|
1161,"def preprocess(
|
self, nb: ""NotebookNode"", resources: dict
|
) -> Tuple[""NotebookNode"", dict]:
|
""""""Preprocess the entire Notebook.""""""
|
exam_num = resources[""exam_num""]
|
time = resources[""time""]
|
date = resources[""date""]
|
nb.cells.insert(0, new_markdown_cell(source=""---""))
|
nb.cells.insert(0, new_markdown_cell(source=""""))
|
nb.cells.insert(0, exam_instructions_cell)
|
first_cell_source = (
|
""# ME 2233: Thermodynamic Principles\n\n""
|
f""# Exam {exam_num} - {time}\n\n# {date}""
|
)
|
nb.cells.insert(0, new_markdown_cell(source=first_cell_source))
|
return nb, resources"
|
1162,"def parse_from_dict(json_dict):
|
""""""
|
Given a Unified Uploader message, parse the contents and return a
|
MarketOrderList.
|
:param dict json_dict: A Unified Uploader message as a JSON dict.
|
:rtype: MarketOrderList
|
:returns: An instance of MarketOrderList, containing the orders
|
within.
|
""""""
|
order_columns = json_dict['columns']
|
order_list = MarketOrderList(
|
upload_keys=json_dict['uploadKeys'],
|
order_generator=json_dict['generator'],
|
)
|
for rowset in json_dict['rowsets']:
|
generated_at = parse_datetime(rowset['generatedAt'])
|
region_id = rowset['regionID']
|
type_id = rowset['typeID']
|
order_list.set_empty_region(region_id, type_id, generated_at)
|
for row in rowset['rows']:
|
order_kwargs = _columns_to_kwargs(
|
SPEC_TO_KWARG_CONVERSION, order_columns, row)
|
order_kwargs.update({
|
'region_id': region_id,
|
'type_id': type_id,
|
'generated_at': generated_at,
|
})
|
order_kwargs['order_issue_date'] = parse_datetime(order_kwargs['order_issue_date'])
|
order_list.add_order(MarketOrder(**order_kwargs))
|
return order_list"
|
1163,"def encode_to_json(order_list):
|
""""""
|
Encodes this list of MarketOrder instances to a JSON string.
|
:param MarketOrderList order_list: The order list to serialize.
|
:rtype: str
|
""""""
|
rowsets = []
|
for items_in_region_list in order_list._orders.values():
|
region_id = items_in_region_list.region_id
|
type_id = items_in_region_list.type_id
|
generated_at = gen_iso_datetime_str(items_in_region_list.generated_at)
|
rows = []
|
for order in items_in_region_list.orders:
|
issue_date = gen_iso_datetime_str(order.order_issue_date)
|
# The order in which these values are added is crucial. It must
|
# match STANDARD_ENCODED_COLUMNS.
|
rows.append([
|
order.price,
|
order.volume_remaining,
|
order.order_range,
|
order.order_id,
|
order.volume_entered,
|
order.minimum_volume,
|
order.is_bid,
|
issue_date,
|
order.order_duration,
|
order.station_id,
|
order.solar_system_id,
|
])
|
rowsets.append(dict(
|
generatedAt = generated_at,
|
regionID = region_id,
|
typeID = type_id,
|
rows = rows,
|
))
|
json_dict = {
|
'resultType': 'orders',
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.