hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 11 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 251 | max_stars_repo_name stringlengths 4 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 251 | max_issues_repo_name stringlengths 4 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 251 | max_forks_repo_name stringlengths 4 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.05M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.04M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8156afb1df8b300172b241e218362d0df0d09d97 | 297 | py | Python | setup.py | dilayercelik/neural-networks-tfw1 | 8f8100bad59d2d57ada7b8a7efb16544f805c9bd | [
"MIT"
] | null | null | null | setup.py | dilayercelik/neural-networks-tfw1 | 8f8100bad59d2d57ada7b8a7efb16544f805c9bd | [
"MIT"
] | null | null | null | setup.py | dilayercelik/neural-networks-tfw1 | 8f8100bad59d2d57ada7b8a7efb16544f805c9bd | [
"MIT"
] | null | null | null | from setuptools import setup
setup(name='neural_networks_tfw1',
version='0.1',
description='Implementing Neural Networks with Tensorflow',
packages=['neural_networks_tfw1'],
author='Dilay Fidan Ercelik',
author_email='dilay.ercelik@gmail.com',
zip_safe=False)
| 29.7 | 65 | 0.707071 |
8157314b8c5e999d455ae8518882f282b75cc228 | 4,581 | py | Python | half_json/json_util.py | half-pie/half-json | d8064e90ac769547c22db11bcbe47fcb4f1eb600 | [
"MIT"
] | 4 | 2020-08-04T15:14:25.000Z | 2021-08-18T18:29:03.000Z | half_json/json_util.py | half-pie/half-json | d8064e90ac769547c22db11bcbe47fcb4f1eb600 | [
"MIT"
] | 1 | 2019-06-04T15:01:31.000Z | 2019-06-04T15:01:31.000Z | half_json/json_util.py | half-pie/half-json | d8064e90ac769547c22db11bcbe47fcb4f1eb600 | [
"MIT"
] | 3 | 2019-06-01T14:16:32.000Z | 2021-06-25T10:10:47.000Z | # coding=utf8
import re
import json.decoder
from collections import namedtuple
from json.decoder import JSONDecoder
from json.scanner import py_make_scanner
from json.decoder import py_scanstring
def errmsg_inv(e):
assert isinstance(e, ValueError)
message = e.message
idx = message.rindex(':')
errmsg, left = message[:idx], message[idx + 1:]
numbers = re.compile(r'\d+').findall(left)
parser = e.__dict__.get("parser", "")
result = {
"parsers": e.__dict__.get("parsers", []),
"error": errors.get_decode_error(parser, errmsg),
"lineno": int(numbers[0]),
"colno": int(numbers[1]),
}
if len(numbers) == 3:
result["pos"] = int(numbers[2])
if len(numbers) > 3:
result["endlineno"] = int(numbers[2])
result["endcolno"] = int(numbers[3])
result["pos"] = int(numbers[4])
result["end"] = int(numbers[5])
return result
def record_parser_name(parser):
return new_parser
def make_decoder():
json.decoder.scanstring = record_parser_name(py_scanstring)
decoder = JSONDecoder()
decoder.parse_object = record_parser_name(decoder.parse_object)
decoder.parse_array = record_parser_name(decoder.parse_array)
decoder.parse_string = record_parser_name(py_scanstring)
decoder.parse_object = record_parser_name(decoder.parse_object)
decoder.scan_once = py_make_scanner(decoder)
return decoder
decoder = make_decoder()
DecodeResult = namedtuple('DecodeResult', ['success', 'exception', 'err_info'])
| 32.260563 | 104 | 0.652696 |
8157a3f9c8c5a1a22e0af65a58d5048d55b4c514 | 1,963 | py | Python | capa-system/capaSystem.py | slumbermachine/capatimelapse | 446e2a276b8ab0cf8d2f2292858cf2b540eb4748 | [
"MIT"
] | 2 | 2017-06-13T20:49:50.000Z | 2019-04-09T10:14:24.000Z | capa-system/capaSystem.py | slumbermachine/capatimelapse | 446e2a276b8ab0cf8d2f2292858cf2b540eb4748 | [
"MIT"
] | null | null | null | capa-system/capaSystem.py | slumbermachine/capatimelapse | 446e2a276b8ab0cf8d2f2292858cf2b540eb4748 | [
"MIT"
] | null | null | null | #!/usr/bin/python
#####################################################################
# Name : capaSystem.py
# Description : Read system data and update db for web display
# Environment : Tested under Raspberry Pi Rasbian Jessie Summer 17
# Author : Steve Osteen sosteen@gmail.com
######################################################################
import MySQLdb
import sys
import time
from subprocess import Popen, PIPE
import logging
import logging.handlers
log = logging.getLogger('CapaTimeLapseLog')
log.setLevel(logging.DEBUG) # prod: logging.ERROR
handler = logging.handlers.SysLogHandler(address='/dev/log')
formatter = logging.Formatter('%(name)-12s %(levelname)-8s %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
if __name__ == '__main__':
main()
| 30.671875 | 101 | 0.596536 |
815971e46cc7b24062e27d29f7d4f09a3aec13fb | 3,770 | py | Python | wagtail/wagtailadmin/tasks.py | willcodefortea/wagtail | 2723b85ed8f356bde89d9541105b8cea4812d6a1 | [
"BSD-3-Clause"
] | null | null | null | wagtail/wagtailadmin/tasks.py | willcodefortea/wagtail | 2723b85ed8f356bde89d9541105b8cea4812d6a1 | [
"BSD-3-Clause"
] | null | null | null | wagtail/wagtailadmin/tasks.py | willcodefortea/wagtail | 2723b85ed8f356bde89d9541105b8cea4812d6a1 | [
"BSD-3-Clause"
] | null | null | null | from django.template.loader import render_to_string
from django.core.mail import send_mail
from django.conf import settings
from django.contrib.auth import get_user_model
from django.db.models import Q
from wagtail.wagtailcore.models import PageRevision, GroupPagePermission
from wagtail.wagtailusers.models import UserProfile
# The following will check to see if we can import task from celery -
# if not then we definitely haven't installed it
try:
from celery.decorators import task
NO_CELERY = False
except:
NO_CELERY = True
# However, we could have installed celery for other projects. So we will also
# check if we have defined the BROKER_URL setting. If not then definitely we
# haven't configured it.
if NO_CELERY or not hasattr(settings, 'BROKER_URL'):
# So if we enter here we will define a different "task" decorator that
# just returns the original function and sets its delay attribute to
# point to the original function: This way, the send_notification
# function will be actually called instead of the the
# send_notification.delay()
| 36.960784 | 145 | 0.734218 |
815a258e49c9c6abc6816370b4272cf95e62bbe1 | 3,506 | py | Python | app/map_sup_enrich_compose.py | onap/sdc-dcae-d-tosca-lab | b0120c1671e8987387ccae4f21793ceb303f471c | [
"Apache-2.0"
] | 1 | 2021-10-15T19:47:42.000Z | 2021-10-15T19:47:42.000Z | app/map_sup_enrich_compose.py | onap/archive-sdc-dcae-d-tosca-lab | b0120c1671e8987387ccae4f21793ceb303f471c | [
"Apache-2.0"
] | null | null | null | app/map_sup_enrich_compose.py | onap/archive-sdc-dcae-d-tosca-lab | b0120c1671e8987387ccae4f21793ceb303f471c | [
"Apache-2.0"
] | 1 | 2021-10-15T19:47:34.000Z | 2021-10-15T19:47:34.000Z | #Author: Shu Shi
#emaiL: shushi@research.att.com
from toscalib.tosca_workbook import ToscaWorkBook
from toscalib.tosca_builder import ToscaBuilder
import getopt, sys, json, logging
if __name__ == "__main__":
main() | 35.414141 | 187 | 0.582145 |
815a83e6c63111824a00c57370e405a878ff8494 | 2,872 | py | Python | gollama/backend/tests/test_api/test_shorthand.py | benjaminhubbell/gollama | 193e9eddf26d295b9a34474ae7fb93e2a91ef73a | [
"MIT"
] | 1 | 2020-08-26T19:02:25.000Z | 2020-08-26T19:02:25.000Z | gollama/backend/tests/test_api/test_shorthand.py | benjaminhubbell/gollama | 193e9eddf26d295b9a34474ae7fb93e2a91ef73a | [
"MIT"
] | 18 | 2020-06-05T06:42:22.000Z | 2021-06-04T23:51:19.000Z | gollama/backend/tests/test_api/test_shorthand.py | benjaminhubbell/gollama | 193e9eddf26d295b9a34474ae7fb93e2a91ef73a | [
"MIT"
] | 3 | 2020-08-17T02:58:11.000Z | 2020-08-18T00:03:00.000Z | from django.test import TestCase
from rest_framework.test import APIClient
| 43.515152 | 112 | 0.597493 |
815b05fc82778af22f4d9f4104ce255be5442149 | 1,937 | py | Python | lusidtools/lpt/qry_scopes.py | fossabot/lusid-python-tools | 93b2fa8085a0a6550d12d036bd89248aba6e5718 | [
"MIT"
] | 1 | 2020-04-27T12:27:23.000Z | 2020-04-27T12:27:23.000Z | lusidtools/lpt/qry_scopes.py | entityoneuk/lusid-python-tools | ee13d92673d01cfc9f7c427ed053e7a1e8d64973 | [
"MIT"
] | null | null | null | lusidtools/lpt/qry_scopes.py | entityoneuk/lusid-python-tools | ee13d92673d01cfc9f7c427ed053e7a1e8d64973 | [
"MIT"
] | null | null | null | import pandas as pd
import dateutil
from lusidtools.lpt import lpt
from lusidtools.lpt import lse
from lusidtools.lpt import stdargs
from .either import Either
import re
import urllib.parse
rexp = re.compile(r".*page=([^=']{10,}).*")
TOOLNAME = "scopes"
TOOLTIP = "List scopes"
# Standalone tool
| 26.534247 | 85 | 0.575116 |
815d2bb0d4f56879066adfa37185b3b120de6583 | 8,457 | py | Python | qqbot/qqbotcls.py | skarl-api/qqbot | 825ce91c080f4a315860e26df70d687a4ded7159 | [
"MIT"
] | null | null | null | qqbot/qqbotcls.py | skarl-api/qqbot | 825ce91c080f4a315860e26df70d687a4ded7159 | [
"MIT"
] | null | null | null | qqbot/qqbotcls.py | skarl-api/qqbot | 825ce91c080f4a315860e26df70d687a4ded7159 | [
"MIT"
] | 1 | 2020-03-30T08:06:24.000Z | 2020-03-30T08:06:24.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
QQBot -- A conversation robot base on Tencent's SmartQQ
Website -- https://github.com/pandolia/qqbot/
Author -- pandolia@yeah.net
"""
import sys, os
p = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if p not in sys.path:
sys.path.insert(0, p)
import sys, subprocess, time
from apscheduler.schedulers.background import BackgroundScheduler
from collections import defaultdict
from qqbot.qconf import QConf
from qqbot.utf8logger import INFO, CRITICAL, ERROR, WARN
from qqbot.qsession import QLogin, RequestError
from qqbot.exitcode import RESTART, POLL_ERROR, FRESH_RESTART
from qqbot.common import StartDaemonThread, Import
from qqbot.qterm import QTermServer
from qqbot.mainloop import MainLoop, Put
from qqbot.groupmanager import GroupManager
for name, slots in QQBot.slotsTable.items():
setattr(QQBot, name, wrap(slots))
QQBotSlot = QQBot.AddSlot
QQBotSched = QQBot.AddSched
if __name__ == '__main__':
bot = QQBot()
bot.Login(user='hcj')
gl = bot.List('group')
ml = bot.List(gl[0])
m = ml[0]
| 29.262976 | 78 | 0.551614 |
815d74b7b1790e6b997d1b97f06db916a5d075c4 | 65 | py | Python | tests/gear_scripts/buitin_runtime_func_GearsBuilder.py | jsam/redgrease | 245755b34bce287c63abb6624478cdf8189816b6 | [
"MIT"
] | 17 | 2021-02-26T23:03:39.000Z | 2022-01-26T11:21:49.000Z | tests/gear_scripts/buitin_runtime_func_GearsBuilder.py | jsam/redgrease | 245755b34bce287c63abb6624478cdf8189816b6 | [
"MIT"
] | 87 | 2021-02-16T08:54:59.000Z | 2021-08-18T07:21:39.000Z | tests/gear_scripts/buitin_runtime_func_GearsBuilder.py | jsam/redgrease | 245755b34bce287c63abb6624478cdf8189816b6 | [
"MIT"
] | 3 | 2021-04-21T07:57:43.000Z | 2021-10-04T09:13:14.000Z | from redgrease import GearsBuilder
gb = GearsBuilder()
gb.run()
| 13 | 34 | 0.769231 |
815eed7c81aa394ca156f60562a609ed561c8f68 | 4,253 | py | Python | tests/tests.py | ActivityWatch/activitywatch-old | e69b071ff701368cee7bac5d01e5936c200e58be | [
"MIT"
] | 4 | 2017-01-30T16:27:18.000Z | 2017-09-28T19:14:13.000Z | tests/tests.py | ActivityWatch/activitywatch-old | e69b071ff701368cee7bac5d01e5936c200e58be | [
"MIT"
] | null | null | null | tests/tests.py | ActivityWatch/activitywatch-old | e69b071ff701368cee7bac5d01e5936c200e58be | [
"MIT"
] | 2 | 2020-06-22T07:11:51.000Z | 2020-12-11T02:46:22.000Z | from copy import copy
from itertools import groupby
import unittest
from datetime import datetime, timedelta
from typing import List
from activitywatch.base import Watcher, Activity, Logger
from activitywatch.settings import Settings
from activitywatch.utils import floor_datetime, ceil_datetime
from activitywatch.filters.split import split_by_interval, overlaps
from activitywatch.filters.chunk import chunk_by_tags
HOUR = timedelta(hours=1)
| 32.715385 | 105 | 0.652481 |
815ef73f8d3016d9f5e536e42e63dc78b0cafca1 | 254,077 | py | Python | lang/it/basic_vocabulary_it.py | gtoffoli/commons-cops | e4b1f556c550e25bb2e6a9eabe8db963877c08d3 | [
"MIT"
] | 5 | 2016-11-13T02:41:02.000Z | 2020-01-20T10:01:26.000Z | lang/it/basic_vocabulary_it.py | gtoffoli/commons | 8b51a08a37c6d0b38fd4ecde82c20036c2dc168f | [
"MIT"
] | null | null | null | lang/it/basic_vocabulary_it.py | gtoffoli/commons | 8b51a08a37c6d0b38fd4ecde82c20036c2dc168f | [
"MIT"
] | null | null | null | voc_it = [
['a', 'noun', 'c'],
['a', 'preposition', 'a'],
['abbagliante', 'pres_part', 'c'],
['abbagliante', 'adjective', 'c'],
['abbagliante', 'noun', 'c'],
['abbaiare', 'verb', 'c'],
['abbandonare', 'verb', 'a'],
['abbandonato', 'past_part', 'b'],
['abbandonato', 'adjective', 'b'],
['abbandono', 'noun', 'b'],
['abbassare', 'verb', 'a'],
['abbasso', 'adverb', 'c'],
['abbasso', 'exclamation', 'c'],
['abbastanza', 'adverb', 'a'],
['abbattere', 'verb', 'b'],
['abbeverare', 'verb', 'c'],
['abbigliamento', 'noun', 'b'],
['abbinare', 'verb', 'b'],
['abbonamento', 'noun', 'b'],
['abbonare', 'verb', 'c'],
['abbondante', 'pres_part', 'b'],
['abbondante', 'adjective', 'b'],
['abbondare', 'verb', 'c'],
['abbottonare', 'verb', 'c'],
['abbracciare', 'verb', 'a'],
['abbraccio', 'noun', 'b'],
['abbreviare', 'verb', 'c'],
['abbronzare', 'verb', 'c'],
['abete', 'noun', 'c'],
['abile', 'adjective', 'b'],
['abilit', 'noun', 'b'],
['abisso', 'noun', 'b'],
['abitante', 'pres_part', 'b'],
['abitante', 'adjective', 'b'],
['abitante', 'noun', 'b'],
['abitare', 'verb', 'a'],
['abitare', 'noun', 'a'],
['abitazione', 'noun', 'b'],
['abito', 'noun', 'a'],
['abituale', 'adjective', 'b'],
['abituare', 'verb', 'a'],
['abitudine', 'noun', 'a'],
['abolire', 'verb', 'b'],
['abortire', 'verb', 'c'],
['aborto', 'noun', 'c'],
['abruzzese', 'adjective', 'c'],
['abruzzese', 'noun', 'c'],
['abusare', 'verb', 'c'],
['abuso', 'noun', 'b'],
['acca', 'noun', 'c'],
['accademia', 'noun', 'b'],
['accademico', 'adjective', 'b'],
['accademico', 'noun', 'b'],
['accadere', 'verb', 'a'],
['accampamento', 'noun', 'c'],
['accanto', 'adverb', 'a'],
['accappatoio', 'noun', 'c'],
['accarezzare', 'verb', 'b'],
['accattone', 'noun', 'c'],
['accavallare', 'verb', 'c'],
['accecare', 'verb', 'c'],
['accedere', 'verb', 'b'],
['accelerare', 'verb', 'b'],
['acceleratore', 'adjective', 'c'],
['acceleratore', 'noun', 'c'],
['accelerazione', 'noun', 'b'],
['accendere', 'verb', 'a'],
['accendino', 'noun', 'c'],
['accennare', 'verb', 'b'],
['accenno', 'noun', 'c'],
['accentare', 'verb', 'c'],
['accertamento', 'noun', 'b'],
['accertare', 'verb', 'b'],
['acceso', 'past_part', 'b'],
['acceso', 'adjective', 'b'],
['accesso', 'noun', 'a'],
['accessorio', 'adjective', 'b'],
['accessorio', 'noun', 'b'],
['accetta', 'noun', 'c'],
['accettabile', 'adjective', 'b'],
['accettare', 'verb', 'a'],
['acchiappare', 'verb', 'c'],
['acciacco', 'noun', 'c'],
['acciaio', 'noun', 'b'],
['accidente', 'noun', 'b'],
['acciuga', 'noun', 'c'],
['accogliente', 'pres_part', 'c'],
['accogliente', 'adjective', 'c'],
['accoglienza', 'noun', 'b'],
['accogliere', 'verb', 'a'],
['accoltellare', 'verb', 'c'],
['accomodare', 'verb', 'b'],
['accompagnare', 'verb', 'a'],
['acconsentire', 'verb', 'c'],
['accontentare', 'verb', 'b'],
['accorciare', 'verb', 'c'],
['accordare', 'verb', 'b'],
['accordo', 'noun', 'a'],
['accorgersi', 'verb', 'a'],
['accorrere', 'verb', 'c'],
['accostare', 'verb', 'b'],
['accudire', 'verb', 'c'],
['accumulare', 'verb', 'b'],
['accumulatore', 'adjective', 'c'],
['accumulatore', 'noun', 'c'],
['accurato', 'past_part', 'b'],
['accurato', 'adjective', 'b'],
['accusa', 'noun', 'a'],
['accusare', 'verb', 'a'],
['accento', 'noun', 'b'],
['acerbo', 'adjective', 'c'],
['aceto', 'noun', 'c'],
['acido', 'adjective', 'b'],
['acido', 'noun', 'b'],
['acqua', 'noun', 'a'],
['acquarello', 'noun', 'c'],
['acquario', 'noun', 'c'],
['acquasanta', 'noun', 'c'],
['acquisire', 'verb', 'b'],
['acquisizione', 'noun', 'b'],
['acquistare', 'verb', 'a'],
['acquisto', 'noun', 'a'],
['acquolina', 'noun', 'c'],
['acrobata', 'noun', 'c'],
['acuto', 'adjective', 'b'],
['acuto', 'noun', 'b'],
['adattare', 'verb', 'b'],
['adattatore', 'noun', 'c'],
['adatto', 'adjective', 'a'],
['addetto', 'past_part', 'b'],
['addetto', 'adjective', 'b'],
['addetto', 'noun', 'b'],
['addio', 'exclamation', 'b'],
['addio', 'noun', 'b'],
['addirittura', 'adverb', 'a'],
['addizione', 'noun', 'c'],
['addobbare', 'verb', 'c'],
['addolcire', 'verb', 'c'],
['addomesticare', 'verb', 'c'],
['addormentarsi', 'verb', 'b'],
['addormentato', 'past_part', 'c'],
['addormentato', 'adjective', 'c'],
['addossare', 'verb', 'a'],
['addosso', 'adverb', 'c'],
['addosso', 'exclamation', 'c'],
['addrizzare', 'verb', 'c'],
['adeguare', 'verb', 'b'],
['adeguato', 'past_part', 'b'],
['adeguato', 'adjective', 'b'],
['adeguato', 'noun', 'b'],
['aderente', 'pres_part', 'c'],
['aderente', 'adjective', 'c'],
['aderente', 'noun', 'c'],
['aderire', 'verb', 'b'],
['adesione', 'noun', 'b'],
['adesso', 'adverb', 'a'],
['adolescente', 'adjective', 'a'],
['adolescente', 'noun', 'a'],
['adolescenza', 'noun', 'b'],
['adoperare', 'verb', 'b'],
['adorare', 'verb', 'a'],
['adottare', 'verb', 'a'],
['adozione', 'noun', 'b'],
['adriatico', 'adjective', 'c'],
['adulto', 'adjective', 'a'],
['adulto', 'noun', 'a'],
['aereo', 'adjective', 'a'],
['aereo', 'noun', 'a'],
['aereo', 'noun', 'b'],
['aeroplano', 'noun', 'c'],
['aeroporto', 'noun', 'b'],
['afa', 'noun', 'c'],
['affacciare', 'verb', 'b'],
['affamare', 'verb', 'c'],
['affamato', 'past_part', 'c'],
['affamato', 'adjective', 'c'],
['affamato', 'noun', 'c'],
['affannarsi', 'verb', 'c'],
['affannato', 'past_part', 'c'],
['affannato', 'adjective', 'c'],
['affanno', 'noun', 'c'],
['affare', 'noun', 'a'],
['affascinante', 'pres_part', 'b'],
['affascinante', 'adjective', 'b'],
['affascinare', 'verb', 'b'],
['affaticare', 'verb', 'c'],
['affatto', 'adverb', 'a'],
['affermare', 'verb', 'a'],
['affermazione', 'noun', 'b'],
['afferrare', 'verb', 'b'],
['affettare', 'verb', 'c'],
['affettato', 'past_part', 'c'],
['affettato', 'adjective', 'c'],
['affettato', 'noun', 'c'],
['affetto', 'noun', 'b'],
['affetto', 'adjective', 'b'],
['affettuoso', 'adjective', 'b'],
['affezionato', 'past_part', 'c'],
['affezionato', 'adjective', 'c'],
['affiancare', 'verb', 'b'],
['affidamento', 'noun', 'b'],
['affidare', 'verb', 'a'],
['affilato', 'past_part', 'c'],
['affilato', 'adjective', 'c'],
['affinch', 'conjunction', 'b'],
['affittare', 'verb', 'b'],
['affitto', 'noun', 'b'],
['affogare', 'verb', 'c'],
['affollare', 'verb', 'c'],
['affondare', 'verb', 'b'],
['affresco', 'noun', 'b'],
['affrontare', 'verb', 'a'],
['affumicare', 'verb', 'c'],
['africano', 'adjective', 'b'],
['africano', 'noun', 'b'],
['agenda', 'noun', 'b'],
['agente', 'pres_part', 'a'],
['agente', 'adjective', 'a'],
['agente', 'noun', 'a'],
['agenzia', 'noun', 'a'],
['agganciare', 'verb', 'b'],
['aggettivo', 'noun', 'b'],
['aggiornamento', 'noun', 'b'],
['aggiornare', 'verb', 'b'],
['aggirare', 'verb', 'b'],
['aggiungere', 'verb', 'a'],
['aggiustare', 'verb', 'b'],
['aggrapparsi', 'verb', 'b'],
['aggravare', 'verb', 'c'],
['aggredire', 'verb', 'b'],
['aggressione', 'noun', 'b'],
['aggressivo', 'adjective', 'b'],
['agiato', 'past_part', 'c'],
['agiato', 'adjective', 'c'],
['agile', 'adjective', 'c'],
['agio', 'noun', 'b'],
['agire', 'verb', 'a'],
['agitare', 'verb', 'b'],
['agitazione', 'noun', 'b'],
['aglio', 'noun', 'c'],
['agnello', 'noun', 'b'],
['ago', 'noun', 'b'],
['agonia', 'noun', 'c'],
['agosto', 'noun', 'a'],
['agricolo', 'adjective', 'b'],
['agricoltore', 'noun', 'c'],
['agricoltura', 'noun', 'b'],
['agrume', 'noun', 'c'],
['aguzzare', 'verb', 'c'],
['aguzzo', 'adjective', 'c'],
['aiuola', 'noun', 'c'],
['aiutare', 'verb', 'a'],
['aiuto', 'noun', 'a'],
['aiuto', 'exclamation', 'a'],
['ala', 'noun', 'a'],
['alba', 'noun', 'a'],
['albanese', 'adjective', 'b'],
['albanese', 'noun', 'b'],
['albergo', 'noun', 'a'],
['albero', 'noun', 'a'],
['albicocca', 'noun', 'c'],
['albicocca', 'adjective', 'c'],
['album', 'noun', 'a'],
['alcol', 'noun', 'b'],
['alcuno', 'adjective', 'a'],
['alcuno', 'pronoun', 'a'],
['alfabeto', 'noun', 'c'],
['alga', 'noun', 'c'],
['algerino', 'adjective', 'c'],
['algerino', 'noun', 'c'],
['alieno', 'adjective', 'b'],
['alieno', 'noun', 'b'],
['alimentare', 'adjective', 'b'],
['alimentare', 'noun', 'b'],
['alimentare', 'verb', 'b'],
['alimentari', 'noun', 'c'],
['alimentazione', 'noun', 'b'],
['alimento', 'noun', 'b'],
['alito', 'noun', 'c'],
['allacciare', 'verb', 'c'],
['allagare', 'verb', 'c'],
['allargare', 'verb', 'b'],
['allarmare', 'verb', 'c'],
['allarme', 'noun', 'b'],
['allattare', 'verb', 'c'],
['alleanza', 'noun', 'b'],
['allearsi', 'verb', 'c'],
['alleato', 'past_part', 'b'],
['alleato', 'adjective', 'b'],
['alleato', 'noun', 'b'],
['allegato', 'past_part', 'b'],
['allegato', 'adjective', 'b'],
['allegato', 'noun', 'b'],
['alleggerire', 'verb', 'c'],
['allegria', 'noun', 'b'],
['allegro', 'adjective', 'b'],
['allegro', 'adverb', 'b'],
['allegro', 'noun', 'b'],
['allenamento', 'noun', 'b'],
['allenare', 'verb', 'b'],
['allenatore', 'adjective', 'b'],
['allenatore', 'noun', 'b'],
['allentare', 'verb', 'c'],
['allergia', 'noun', 'c'],
['allevare', 'verb', 'b'],
['allievo', 'noun', 'b'],
['allineare', 'verb', 'c'],
['alloggio', 'noun', 'b'],
['allontanare', 'verb', 'a'],
['allora', 'adverb', 'a'],
['allora', 'conjunction', 'a'],
['alluce', 'noun', 'c'],
['alludere', 'verb', 'b'],
['alluminio', 'noun', 'c'],
['allungare', 'verb', 'a'],
['alluvione', 'noun', 'c'],
['almeno', 'adverb', 'a'],
['alquanto', 'adjective', 'b'],
['alquanto', 'pronoun', 'b'],
['alquanto', 'adverb', 'b'],
['altalena', 'noun', 'c'],
['altamente', 'adverb', 'b'],
['altare', 'noun', 'b'],
['alterare', 'verb', 'b'],
['alternare', 'verb', 'b'],
['alternativa', 'noun', 'b'],
['alternativo', 'adjective', 'b'],
['alterno', 'adjective', 'c'],
['altezza', 'noun', 'a'],
['alto', 'adjective', 'a'],
['alto', 'noun', 'a'],
['alto', 'adverb', 'a'],
['altoatesino', 'adjective', 'c'],
['altoatesino', 'noun', 'c'],
['altopiano', 'noun', 'c'],
['altrettanto', 'adjective', 'a'],
['altrettanto', 'pronoun', 'a'],
['altrettanto', 'adverb', 'a'],
['altrimenti', 'adverb', 'a'],
['altro', 'adjective', 'a'],
['altro', 'pronoun', 'a'],
['altro', 'adverb', 'a'],
['altrove', 'adverb', 'b'],
['altrui', 'adjective', 'b'],
['altrui', 'pronoun', 'b'],
['alunno', 'noun', 'b'],
['alveare', 'noun', 'c'],
['alzare', 'verb', 'a'],
['amante', 'pres_part', 'a'],
['amante', 'adjective', 'a'],
['amante', 'noun', 'a'],
['amare', 'verb', 'a'],
['amaro', 'adjective', 'b'],
['amaro', 'noun', 'b'],
['amato', 'past_part', 'b'],
['amato', 'adjective', 'b'],
['amato', 'noun', 'b'],
['ambasciata', 'noun', 'c'],
['ambientale', 'adjective', 'a'],
['ambientare', 'verb', 'b'],
['ambiente', 'noun', 'a'],
['ambiente', 'adjective', 'a'],
['ambito', 'noun', 'a'],
['ambizione', 'noun', 'b'],
['ambulanza', 'noun', 'b'],
['americano', 'adjective', 'a'],
['americano', 'noun', 'a'],
['amicizia', 'noun', 'a'],
['amico', 'adjective', 'a'],
['amico', 'noun', 'a'],
['ammaccare', 'verb', 'c'],
['ammalarsi', 'verb', 'b'],
['ammalato', 'past_part', 'c'],
['ammalato', 'adjective', 'c'],
['ammalato', 'noun', 'c'],
['ammanettare', 'verb', 'c'],
['ammassare', 'verb', 'c'],
['ammasso', 'noun', 'c'],
['ammazzare', 'verb', 'a'],
['ammettere', 'verb', 'a'],
['amministrativo', 'adjective', 'b'],
['amministrativo', 'noun', 'b'],
['amministratore', 'noun', 'b'],
['amministrazione', 'noun', 'a'],
['ammirare', 'verb', 'b'],
['ammissione', 'noun', 'b'],
['ammobiliare', 'verb', 'c'],
['ammoniaca', 'noun', 'c'],
['ammorbidente', 'pres_part', 'c'],
['ammorbidente', 'adjective', 'c'],
['ammorbidente', 'noun', 'c'],
['ammucchiare', 'verb', 'c'],
['ammuffire', 'verb', 'c'],
['amore', 'noun', 'a'],
['amoroso', 'adjective', 'b'],
['amoroso', 'noun', 'b'],
['ampiamente', 'adverb', 'b'],
['ampio', 'adjective', 'a'],
['ampio', 'noun', 'a'],
['amplificatore', 'adjective', 'c'],
['amplificatore', 'noun', 'c'],
['analcolico', 'adjective', 'c'],
['analcolico', 'noun', 'c'],
['analfabeta', 'adjective', 'c'],
['analfabeta', 'noun', 'c'],
['analisi', 'noun', 'a'],
['analitico', 'adjective', 'b'],
['analizzare', 'verb', 'a'],
['analogo', 'adjective', 'b'],
['ananas', 'noun', 'c'],
['anarchico', 'adjective', 'c'],
['anarchico', 'noun', 'c'],
['anatra', 'noun', 'c'],
['anche', 'conjunction', 'a'],
['anche', 'adverb', 'a'],
['anconetano', 'adjective', 'c'],
['anconetano', 'noun', 'c'],
['ancora', 'adverb', 'a'],
['ancora', 'conjunction', 'a'],
['ancorare', 'verb', 'b'],
['andamento', 'noun', 'b'],
['andare', 'verb', 'a'],
['andata', 'noun', 'c'],
['anello', 'noun', 'a'],
['angelo', 'noun', 'a'],
['angolare', 'adjective', 'b'],
['angolare', 'noun', 'b'],
['angolo', 'noun', 'a'],
['angoscia', 'noun', 'b'],
['anima', 'noun', 'a'],
['animale', 'noun', 'a'],
['animale', 'adjective', 'b'],
['animare', 'verb', 'a'],
['animato', 'past_part', 'b'],
['animato', 'adjective', 'b'],
['animato', 'adverb', 'b'],
['animo', 'noun', 'b'],
['animo', 'exclamation', 'b'],
['annacquare', 'verb', 'c'],
['annaffiare', 'verb', 'c'],
['annebbiare', 'verb', 'c'],
['anniversario', 'noun', 'b'],
['anniversario', 'adjective', 'b'],
['anno', 'noun', 'a'],
['annodare', 'verb', 'c'],
['annoiare', 'verb', 'b'],
['annotare', 'verb', 'b'],
['annuale', 'adjective', 'b'],
['annuale', 'noun', 'b'],
['annuire', 'verb', 'b'],
['annullare', 'verb', 'b'],
['annunciare', 'verb', 'a'],
['annuncio', 'noun', 'b'],
['annusare', 'verb', 'c'],
['anonimo', 'adjective', 'b'],
['anonimo', 'noun', 'b'],
['ansia', 'noun', 'a'],
['ansioso', 'adjective', 'b'],
['ansioso', 'noun', 'b'],
['antartico', 'adjective', 'c'],
['antartico', 'noun', 'c'],
['antenna', 'noun', 'b'],
['anteprima', 'noun', 'b'],
['anteriore', 'adjective', 'b'],
['anticalcare', 'adjective', 'c'],
['antichit', 'noun', 'c'],
['anticipare', 'verb', 'b'],
['anticipo', 'noun', 'b'],
['antico', 'adjective', 'a'],
['antico', 'noun', 'a'],
['antipasto', 'noun', 'c'],
['antirughe', 'adjective', 'c'],
['antirughe', 'noun', 'c'],
['antropologia', 'noun', 'b'],
['anulare', 'adjective', 'c'],
['anulare', 'noun', 'c'],
['anzi', 'adverb', 'a'],
['anzi', 'preposition', 'a'],
['anziano', 'adjective', 'a'],
['anziano', 'noun', 'a'],
['anzich', 'conjunction', 'b'],
['aostano', 'adjective', 'c'],
['aostano', 'noun', 'c'],
['ape', 'noun', 'b'],
['aperitivo', 'noun', 'c'],
['aperitivo', 'adjective', 'c'],
['aperto', 'past_part', 'a'],
['aperto', 'adjective', 'a'],
['aperto', 'noun', 'a'],
['aperto', 'adverb', 'a'],
['apertura', 'noun', 'a'],
['aspettativa', 'noun', 'b'],
['apostolo', 'noun', 'c'],
['appalto', 'noun', 'b'],
['appannare', 'verb', 'c'],
['apparato', 'noun', 'b'],
['apparecchiare', 'verb', 'c'],
['apparecchiatura', 'noun', 'c'],
['apparecchio', 'noun', 'b'],
['apparente', 'pres_part', 'b'],
['apparente', 'adjective', 'b'],
['apparentemente', 'adverb', 'b'],
['apparenza', 'noun', 'b'],
['apparire', 'verb', 'a'],
['apparizione', 'noun', 'b'],
['appartamento', 'noun', 'a'],
['appartenenza', 'noun', 'b'],
['appartenere', 'verb', 'a'],
['appassionare', 'verb', 'b'],
['appassionarsi', 'verb', 'c'],
['appassionato', 'past_part', 'b'],
['appassionato', 'adjective', 'b'],
['appassionato', 'noun', 'b'],
['appello', 'noun', 'b'],
['appena', 'adverb', 'a'],
['appena', 'conjunction', 'a'],
['appendere', 'verb', 'b'],
['appendicite', 'noun', 'c'],
['appenninico', 'adjective', 'c'],
['appeso', 'past_part', 'c'],
['appeso', 'adjective', 'c'],
['appeso', 'noun', 'c'],
['appiccicare', 'verb', 'c'],
['appiglio', 'noun', 'c'],
['applauso', 'noun', 'b'],
['applicare', 'verb', 'a'],
['applicazione', 'noun', 'b'],
['appoggiare', 'verb', 'a'],
['appoggio', 'noun', 'b'],
['apposito', 'adjective', 'b'],
['apposta', 'adverb', 'b'],
['apposta', 'adjective', 'b'],
['apprendere', 'verb', 'b'],
['apprendimento', 'noun', 'b'],
['apprendista', 'noun', 'c'],
['apprezzare', 'verb', 'a'],
['approccio', 'noun', 'b'],
['approfittare', 'verb', 'b'],
['approfondimento', 'noun', 'b'],
['approfondire', 'verb', 'b'],
['approvare', 'verb', 'b'],
['approvazione', 'noun', 'b'],
['appuntamento', 'noun', 'a'],
['appuntire', 'verb', 'c'],
['appunto', 'noun', 'b'],
['appunto', 'adverb', 'a'],
['aprile', 'noun', 'a'],
['aprire', 'verb', 'a'],
['apriscatole', 'noun', 'c'],
['aquila', 'noun', 'c'],
['aquilano', 'adjective', 'c'],
['aquilano', 'noun', 'c'],
['aquilone', 'noun', 'c'],
['arabo', 'adjective', 'a'],
['arabo', 'noun', 'a'],
['arachide', 'noun', 'c'],
['aragosta', 'noun', 'c'],
['aranciata', 'noun', 'c'],
['arancio', 'noun', 'c'],
['arare', 'verb', 'c'],
['aratro', 'noun', 'c'],
['arbitro', 'noun', 'b'],
['archeologo', 'noun', 'c'],
['architettare', 'verb', 'b'],
['architetto', 'noun', 'b'],
['architettonico', 'adjective', 'b'],
['architettura', 'noun', 'b'],
['archiviare', 'verb', 'b'],
['archivio', 'noun', 'b'],
['arco', 'noun', 'a'],
['arcobaleno', 'noun', 'c'],
['area', 'noun', 'a'],
['argentino', 'adjective', 'b'],
['argentino', 'noun', 'b'],
['argento', 'noun', 'b'],
['argomentare', 'verb', 'b'],
['argomentazione', 'noun', 'b'],
['argomento', 'noun', 'a'],
['aria', 'noun', 'a'],
['aristocratico', 'adjective', 'c'],
['aristocratico', 'noun', 'c'],
['aritmetica', 'noun', 'c'],
['aritmetico', 'adjective', 'c'],
['aritmetico', 'noun', 'c'],
['arma', 'noun', 'a'],
['armadio', 'noun', 'b'],
['armamento', 'noun', 'c'],
['armare', 'verb', 'b'],
['armato', 'past_part', 'b'],
['armato', 'adjective', 'b'],
['armato', 'noun', 'b'],
['armonia', 'noun', 'b'],
['aroma', 'noun', 'c'],
['arrabbiarsi', 'verb', 'a'],
['arrampicarsi', 'verb', 'b'],
['arredamento', 'noun', 'b'],
['arredare', 'verb', 'c'],
['arrendersi', 'verb', 'b'],
['arrendersi', 'verb', 'c'],
['arrestare', 'verb', 'a'],
['arresto', 'noun', 'b'],
['arricchire', 'verb', 'b'],
['arrivare', 'verb', 'a'],
['arrivederci', 'exclamation', 'b'],
['arrivederci', 'noun', 'b'],
['arrivo', 'noun', 'a'],
['arrosto', 'noun', 'c'],
['arrosto', 'adjective', 'c'],
['arrosto', 'adverb', 'c'],
['arrugginire', 'verb', 'c'],
['arte', 'noun', 'a'],
['arteria', 'noun', 'b'],
['artico', 'adjective', 'c'],
['artico', 'noun', 'c'],
['articolare', 'verb', 'b'],
['articolare', 'noun', 'b'],
['articolazione', 'noun', 'b'],
['articolo', 'noun', 'a'],
['artificiale', 'adjective', 'b'],
['artigianale', 'adjective', 'c'],
['artigiano', 'noun', 'b'],
['artigiano', 'adjective', 'b'],
['artiglieria', 'noun', 'c'],
['artiglio', 'noun', 'c'],
['artista', 'noun', 'a'],
['artistico', 'adjective', 'a'],
['artistico', 'noun', 'a'],
['ascella', 'noun', 'c'],
['ascensore', 'noun', 'b'],
['ascesa', 'noun', 'b'],
['ascesso', 'noun', 'c'],
['ascia', 'noun', 'c'],
['asciugamano', 'noun', 'b'],
['asciugare', 'verb', 'b'],
['asciutto', 'adjective', 'b'],
['asciutto', 'noun', 'b'],
['ascoltare', 'verb', 'a'],
['ascolto', 'noun', 'b'],
['asfaltare', 'verb', 'c'],
['asfalto', 'noun', 'c'],
['asiatico', 'adjective', 'b'],
['asiatico', 'noun', 'b'],
['asilo', 'noun', 'b'],
['asino', 'noun', 'b'],
['asma', 'noun', 'c'],
['asparago', 'noun', 'c'],
['aspettare', 'verb', 'a'],
['aspetto', 'noun', 'a'],
['aspirapolvere', 'noun', 'c'],
['aspirare', 'verb', 'b'],
['aspirazione', 'noun', 'b'],
['aspro', 'adjective', 'b'],
['aspro', 'noun', 'b'],
['assaggiare', 'verb', 'b'],
['assaggio', 'noun', 'c'],
['assai', 'adverb', 'a'],
['assai', 'adjective', 'a'],
['assai', 'noun', 'a'],
['assalire', 'verb', 'c'],
['assaltare', 'verb', 'c'],
['assalto', 'noun', 'b'],
['assaporare', 'verb', 'c'],
['assassinare', 'verb', 'b'],
['assassinio', 'noun', 'c'],
['assassino', 'noun', 'b'],
['assassino', 'adjective', 'b'],
['asse', 'noun', 'b'],
['assediare', 'verb', 'c'],
['assegnare', 'verb', 'b'],
['assegno', 'noun', 'b'],
['assemblea', 'noun', 'b'],
['assente', 'adjective', 'b'],
['assente', 'noun', 'b'],
['assenza', 'noun', 'a'],
['assicurare', 'verb', 'a'],
['assicurazione', 'noun', 'b'],
['assieme', 'adverb', 'a'],
['assieme', 'noun', 'a'],
['assistente', 'pres_part', 'b'],
['assistente', 'adjective', 'b'],
['assistente', 'noun', 'b'],
['assistenza', 'noun', 'b'],
['assistere', 'verb', 'a'],
['associare', 'verb', 'b'],
['associazione', 'noun', 'a'],
['assolutamente', 'adverb', 'a'],
['assoluto', 'adjective', 'a'],
['assoluto', 'noun', 'a'],
['assoluzione', 'noun', 'c'],
['assolvere', 'verb', 'b'],
['assomigliare', 'verb', 'b'],
['assorbente', 'pres_part', 'c'],
['assorbente', 'adjective', 'c'],
['assorbente', 'noun', 'c'],
['assorbire', 'verb', 'b'],
['assordare', 'verb', 'c'],
['assumere', 'verb', 'a'],
['assunzione', 'noun', 'b'],
['assurdo', 'adjective', 'a'],
['assurdo', 'noun', 'a'],
['asta', 'noun', 'b'],
['astemio', 'adjective', 'c'],
['astemio', 'noun', 'c'],
['astratto', 'past_part', 'b'],
['astratto', 'adjective', 'b'],
['astratto', 'noun', 'b'],
['astronave', 'noun', 'c'],
['astuccio', 'noun', 'c'],
['astuto', 'adjective', 'c'],
['astuto', 'noun', 'c'],
['astuzia', 'noun', 'c'],
['ateniese', 'adjective', 'c'],
['ateniese', 'noun', 'c'],
['ateo', 'adjective', 'b'],
['ateo', 'noun', 'b'],
['atlantico', 'adjective', 'c'],
['atleta', 'noun', 'b'],
['atmosfera', 'noun', 'a'],
['atomica', 'noun', 'c'],
['atomico', 'adjective', 'b'],
['atomo', 'noun', 'b'],
['atrio', 'noun', 'c'],
['atroce', 'adjective', 'b'],
['attaccante', 'pres_part', 'c'],
['attaccante', 'adjective', 'c'],
['attaccante', 'noun', 'c'],
['attaccapanni', 'noun', 'c'],
['attaccare', 'verb', 'a'],
['attacco', 'noun', 'a'],
['atteggiamento', 'noun', 'a'],
['atteggiare', 'verb', 'c'],
['attendere', 'verb', 'a'],
['attenere', 'verb', 'b'],
['attentamente', 'adverb', 'b'],
['attentare', 'verb', 'c'],
['attentato', 'noun', 'b'],
['attento', 'adjective', 'a'],
['attenzione', 'noun', 'a'],
['atterraggio', 'noun', 'c'],
['atterrare', 'verb', 'b'],
['attesa', 'noun', 'a'],
['attestare', 'verb', 'b'],
['attimo', 'noun', 'a'],
['attingere', 'verb', 'b'],
['attirare', 'verb', 'b'],
['attivare', 'verb', 'b'],
['attivit', 'noun', 'a'],
['attivo', 'adjective', 'a'],
['attivo', 'noun', 'a'],
['atto', 'noun', 'a'],
['attore', 'noun', 'a'],
['attorno', 'adverb', 'a'],
['attrarre', 'verb', 'b'],
['attraversare', 'verb', 'a'],
['attraverso', 'preposition', 'a'],
['attraverso', 'adverb', 'a'],
['attrazione', 'noun', 'b'],
['attrezzare', 'verb', 'b'],
['attrezzatura', 'noun', 'b'],
['attrezzo', 'noun', 'b'],
['attribuire', 'verb', 'b'],
['attrice', 'noun', 'b'],
['attuale', 'adjective', 'a'],
['attualit', 'noun', 'b'],
['attualmente', 'adverb', 'b'],
['attuare', 'verb', 'b'],
['augurare', 'verb', 'b'],
['augurio', 'noun', 'b'],
['aula', 'noun', 'b'],
['aumentare', 'verb', 'a'],
['aumento', 'noun', 'a'],
['australiano', 'adjective', 'c'],
['australiano', 'noun', 'c'],
['austriaco', 'adjective', 'b'],
['austriaco', 'noun', 'b'],
['autentico', 'adjective', 'b'],
['autentico', 'noun', 'b'],
['autista', 'noun', 'b'],
['auto', 'noun', 'a'],
['autoambulanza', 'noun', 'c'],
['autobotte', 'noun', 'c'],
['autobus', 'noun', 'b'],
['autografo', 'adjective', 'c'],
['autografo', 'noun', 'c'],
['automaticamente', 'adverb', 'b'],
['automatico', 'adjective', 'b'],
['automatico', 'noun', 'b'],
['automobile', 'noun', 'b'],
['automobilista', 'noun', 'c'],
['autonomia', 'noun', 'b'],
['autonomo', 'adjective', 'b'],
['autonomo', 'noun', 'b'],
['autore', 'noun', 'a'],
['autorevole', 'adjective', 'c'],
['autorit', 'noun', 'a'],
['autorizzare', 'verb', 'a'],
['autoscontro', 'noun', 'c'],
['autoscuola', 'noun', 'c'],
['autostop', 'noun', 'c'],
['autostrada', 'noun', 'b'],
['autotreno', 'noun', 'c'],
['autunno', 'noun', 'b'],
['avambraccio', 'noun', 'c'],
['avanguardia', 'noun', 'b'],
['avanti', 'adverb', 'a'],
['avanti', 'adjective', 'a'],
['avanti', 'loc-comando', 'a'],
['avanti', 'preposition', 'a'],
['avanti', 'noun', 'a'],
['avanzare', 'verb', 'a'],
['avanzato', 'past_part', 'b'],
['avanzato', 'adjective', 'b'],
['avanzo', 'noun', 'c'],
['avarizia', 'noun', 'c'],
['avaro', 'adjective', 'c'],
['avaro', 'noun', 'c'],
['avena', 'noun', 'c'],
['avere', 'verb', 'a'],
['aviazione', 'noun', 'c'],
['avvantaggiare', 'verb', 'c'],
['avvelenare', 'verb', 'b'],
['avvelenato', 'past_part', 'c'],
['avvelenato', 'adjective', 'c'],
['avvenimento', 'noun', 'b'],
['avvenire', 'adjective', 'a'],
['avvenire', 'noun', 'a'],
['avventura', 'noun', 'a'],
['avverare', 'verb', 'c'],
['avversario', 'noun', 'b'],
['avvertire', 'verb', 'a'],
['avviamento', 'noun', 'c'],
['avviare', 'verb', 'a'],
['avvicinare', 'verb', 'a'],
['avvio', 'noun', 'b'],
['avvisare', 'verb', 'b'],
['avviso', 'noun', 'b'],
['avvitare', 'verb', 'c'],
['avvocato', 'noun', 'a'],
['avvolgere', 'verb', 'b'],
['azienda', 'noun', 'a'],
['aziendale', 'adjective', 'b'],
['azione', 'noun', 'a'],
['azione', 'noun', 'b'],
['azzardare', 'verb', 'b'],
['azzardo', 'noun', 'c'],
['azzurro', 'noun', 'a'],
['azzurro', 'adjective', 'a'],
['babbo', 'noun', 'b'],
['baby', 'noun', 'b'],
['baby', 'adjective', 'b'],
['babydoll', 'noun', 'c'],
['bacca', 'noun', 'c'],
['baccal', 'noun', 'c'],
['bacheca', 'noun', 'b'],
['baciare', 'verb', 'a'],
['bacinella', 'noun', 'c'],
['bacino', 'noun', 'b'],
['bacio', 'noun', 'a'],
['baco', 'noun', 'c'],
['badare', 'verb', 'b'],
['baffo', 'noun', 'b'],
['bagagliaio', 'noun', 'c'],
['bagaglio', 'noun', 'b'],
['bagnare', 'verb', 'b'],
['bagnato', 'past_part', 'b'],
['bagnato', 'adjective', 'b'],
['bagnato', 'noun', 'b'],
['bagno', 'noun', 'a'],
['bagnoschiuma', 'noun', 'c'],
['balcone', 'noun', 'b'],
['balena', 'noun', 'b'],
['balia', 'noun', 'b'],
['ballare', 'verb', 'a'],
['ballerina', 'noun', 'c'],
['ballerino', 'noun', 'c'],
['ballerino', 'adjective', 'c'],
['balletto', 'noun', 'c'],
['ballo', 'noun', 'b'],
['balsamo', 'noun', 'c'],
['bambina', 'noun', 'a'],
['bambinaia', 'noun', 'c'],
['bambino', 'noun', 'a'],
['bambino', 'adjective', 'a'],
['bambola', 'noun', 'b'],
['banale', 'adjective', 'b'],
['banana', 'noun', 'c'],
['banca', 'noun', 'a'],
['bancarella', 'noun', 'c'],
['bancario', 'adjective', 'b'],
['bancario', 'noun', 'b'],
['banco', 'noun', 'b'],
['bancone', 'noun', 'b'],
['band', 'noun', 'b'],
['banda', 'noun', 'b'],
['bandiera', 'noun', 'b'],
['bando', 'noun', 'b'],
['bar', 'noun', 'a'],
['bara', 'noun', 'b'],
['baracca', 'noun', 'c'],
['barba', 'noun', 'b'],
['barbabietola', 'noun', 'c'],
['barbaro', 'adjective', 'b'],
['barbaro', 'noun', 'b'],
['barca', 'noun', 'a'],
['barella', 'noun', 'c'],
['barese', 'adjective', 'c'],
['barese', 'noun', 'c'],
['barile', 'noun', 'c'],
['barista', 'noun', 'c'],
['barriera', 'noun', 'b'],
['basare', 'verb', 'a'],
['base', 'noun', 'a'],
['basetta', 'noun', 'c'],
['basilica', 'noun', 'b'],
['basilico', 'noun', 'c'],
['basket', 'noun', 'c'],
['basso', 'adjective', 'a'],
['basso', 'noun', 'a'],
['basso', 'adverb', 'a'],
['bastardo', 'adjective', 'b'],
['bastardo', 'noun', 'b'],
['bastare', 'verb', 'a'],
['bastonare', 'verb', 'c'],
['bastone', 'noun', 'b'],
['battaglia', 'noun', 'a'],
['battello', 'noun', 'c'],
['battere', 'verb', 'a'],
['battere', 'noun', 'a'],
['batteria', 'noun', 'b'],
['batterio', 'noun', 'b'],
['batticuore', 'noun', 'c'],
['battipanni', 'noun', 'c'],
['battito', 'noun', 'c'],
['battuta', 'noun', 'a'],
['batuffolo', 'noun', 'c'],
['baule', 'noun', 'c'],
['bava', 'noun', 'c'],
['bavaglio', 'noun', 'c'],
['beato', 'past_part', 'b'],
['beato', 'adjective', 'b'],
['beato', 'noun', 'b'],
['beccare', 'verb', 'b'],
['befana', 'noun', 'c'],
['beffa', 'noun', 'c'],
['beh', 'exclamation', 'a'],
['belare', 'verb', 'c'],
['belga', 'adjective', 'c'],
['belga', 'noun', 'c'],
['bella', 'noun', 'b'],
['bellezza', 'noun', 'a'],
['bello', 'adjective', 'a'],
['bello', 'noun', 'a'],
['bench', 'conjunction', 'b'],
['benda', 'noun', 'c'],
['bene', 'adverb', 'a'],
['bene', 'exclamation', 'a'],
['bene', 'noun', 'a'],
['benedetto', 'past_part', 'b'],
['benedetto', 'adjective', 'b'],
['benedetto', 'noun', 'b'],
['beneficenza', 'noun', 'c'],
['beneficio', 'noun', 'b'],
['benessere', 'noun', 'b'],
['benestante', 'adjective', 'c'],
['benestante', 'noun', 'c'],
['bens', 'conjunction', 'b'],
['bens', 'adverb', 'b'],
['benvenuto', 'adjective', 'b'],
['benvenuto', 'noun', 'b'],
['benzina', 'noun', 'b'],
['benzinaio', 'noun', 'c'],
['bere', 'verb', 'a'],
['bere', 'noun', 'a'],
['berlinese', 'adjective', 'c'],
['berlinese', 'noun', 'c'],
['berretto', 'noun', 'c'],
['bersaglio', 'noun', 'b'],
['besciamella', 'noun', 'c'],
['bestemmia', 'noun', 'c'],
['bestia', 'noun', 'b'],
['bestiale', 'adjective', 'c'],
['bevanda', 'noun', 'b'],
['bevitore', 'noun', 'c'],
['bevuta', 'noun', 'c'],
['bi', 'noun', 'c'],
['bianco', 'adjective', 'a'],
['bianco', 'noun', 'a'],
['bibbia', 'noun', 'b'],
['bibita', 'noun', 'c'],
['biblico', 'adjective', 'b'],
['biblico', 'noun', 'b'],
['bibliografia', 'noun', 'b'],
['biblioteca', 'noun', 'b'],
['bicchiere', 'noun', 'a'],
['bici', 'noun', 'b'],
['bicicletta', 'noun', 'b'],
['bid', 'noun', 'c'],
['bidello', 'noun', 'c'],
['biglia', 'noun', 'c'],
['biglietteria', 'noun', 'c'],
['biglietto', 'noun', 'a'],
['bikini', 'noun', 'c'],
['bilancia', 'noun', 'b'],
['bilancio', 'noun', 'b'],
['biliardo', 'noun', 'c'],
['bimba', 'noun', 'b'],
['bimbo', 'noun', 'b'],
['binario', 'noun', 'c'],
['biografia', 'noun', 'b'],
['biologia', 'noun', 'b'],
['biologico', 'adjective', 'b'],
['biologico', 'noun', 'b'],
['bionda', 'noun', 'b'],
['biondo', 'adjective', 'b'],
['biondo', 'noun', 'b'],
['birichino', 'noun', 'c'],
['birichino', 'adjective', 'c'],
['birillo', 'noun', 'c'],
['birra', 'noun', 'b'],
['bisbigliare', 'verb', 'c'],
['biscia', 'noun', 'c'],
['biscotto', 'adjective', 'b'],
['biscotto', 'noun', 'b'],
['bisnonno', 'noun', 'c'],
['bisognare', 'verb', 'a'],
['bisogno', 'noun', 'a'],
['bistecca', 'noun', 'c'],
['bistecchiera', 'noun', 'c'],
['bisticciare', 'verb', 'c'],
['bit', 'noun', 'b'],
['bizzarro', 'adjective', 'b'],
['bloccare', 'verb', 'a'],
['blocco', 'noun', 'b'],
['blocco', 'noun', 'b'],
['blog', 'noun', 'a'],
['blu', 'adjective', 'a'],
['blu', 'noun', 'a'],
['bocca', 'noun', 'a'],
['bocchino', 'noun', 'c'],
['boccia', 'noun', 'c'],
['bocciare', 'verb', 'b'],
['bocciatura', 'noun', 'c'],
['bocciolo', 'noun', 'c'],
['boccone', 'noun', 'c'],
['boh', 'exclamation', 'b'],
['boia', 'noun', 'c'],
['boia', 'adjective', 'c'],
['bolla', 'noun', 'b'],
['bolletta', 'noun', 'b'],
['bollito', 'past_part', 'c'],
['bollito', 'adjective', 'c'],
['bollito', 'noun', 'c'],
['bollitore', 'noun', 'c'],
['bollo', 'noun', 'c'],
['bolognese', 'adjective', 'c'],
['bolognese', 'noun', 'c'],
['bolzanino', 'adjective', 'c'],
['bolzanino', 'noun', 'c'],
['bomba', 'noun', 'b'],
['bombardare', 'verb', 'b'],
['bombola', 'noun', 'c'],
['bomboniera', 'noun', 'c'],
['bont', 'noun', 'b'],
['bordo', 'noun', 'a'],
['borgata', 'noun', 'c'],
['borghese', 'adjective', 'b'],
['borghese', 'noun', 'b'],
['borghesia', 'noun', 'c'],
['borgo', 'noun', 'b'],
['borotalco', 'noun', 'c'],
['borsa', 'noun', 'a'],
['borsa', 'noun', 'b'],
['borsetta', 'noun', 'c'],
['bosco', 'noun', 'a'],
['bosniaco', 'adjective', 'c'],
['bosniaco', 'noun', 'c'],
['boss', 'noun', 'b'],
['bossolo', 'noun', 'c'],
['botanica', 'noun', 'c'],
['botta', 'noun', 'b'],
['botte', 'noun', 'c'],
['bottega', 'noun', 'b'],
['bottegaio', 'noun', 'c'],
['bottegaio', 'adjective', 'c'],
['bottiglia', 'noun', 'a'],
['botto', 'noun', 'c'],
['bottone', 'noun', 'b'],
['bovino', 'adjective', 'c'],
['bovino', 'noun', 'c'],
['box', 'noun', 'b'],
['boxer', 'noun', 'c'],
['braccialetto', 'noun', 'c'],
['bracciante', 'noun', 'c'],
['braccio', 'noun', 'a'],
['branco', 'noun', 'b'],
['brand', 'noun', 'b'],
['brandello', 'noun', 'c'],
['brano', 'noun', 'a'],
['brasiliano', 'adjective', 'b'],
['brasiliano', 'noun', 'b'],
['bravo', 'adjective', 'a'],
['bravo', 'noun', 'a'],
['bravo', 'exclamation', 'a'],
['bresaola', 'noun', 'c'],
['bretella', 'noun', 'c'],
['breve', 'adjective', 'a'],
['breve', 'adverb', 'a'],
['breve', 'noun', 'a'],
['briciola', 'noun', 'c'],
['brigantaggio', 'noun', 'c'],
['brigante', 'noun', 'c'],
['brillante', 'pres_part', 'b'],
['brillante', 'adjective', 'b'],
['brillante', 'noun', 'b'],
['brillantina', 'noun', 'c'],
['brillare', 'verb', 'b'],
['brina', 'noun', 'c'],
['brioche', 'noun', 'c'],
['britannico', 'adjective', 'b'],
['britannico', 'noun', 'b'],
['brivido', 'noun', 'b'],
['brocca', 'noun', 'c'],
['brogliaccio', 'noun', 'b'],
['bronchite', 'noun', 'c'],
['brontolare', 'verb', 'c'],
['bronzo', 'noun', 'b'],
['bruciare', 'verb', 'a'],
['bruciato', 'past_part', 'b'],
['bruciato', 'adjective', 'b'],
['bruciato', 'noun', 'b'],
['bruciatura', 'noun', 'c'],
['bruco', 'noun', 'c'],
['bruco', 'adjective', 'c'],
['bruschetta', 'noun', 'c'],
['brutale', 'adjective', 'c'],
['brutto', 'adjective', 'a'],
['brutto', 'noun', 'a'],
['brutto', 'adverb', 'a'],
['buca', 'noun', 'b'],
['bucare', 'verb', 'b'],
['bucato', 'noun', 'c'],
['buccia', 'noun', 'c'],
['buco', 'noun', 'a'],
['budino', 'noun', 'c'],
['bufala', 'noun', 'c'],
['bufalo', 'noun', 'c'],
['bufera', 'noun', 'c'],
['buffet', 'noun', 'c'],
['buffo', 'adjective', 'b'],
['buffo', 'noun', 'b'],
['bugia', 'noun', 'b'],
['bugiardo', 'adjective', 'b'],
['bugiardo', 'noun', 'b'],
['buio', 'adjective', 'a'],
['buio', 'noun', 'a'],
['bulgaro', 'adjective', 'c'],
['bulgaro', 'noun', 'c'],
['buonafede', 'noun', 'c'],
['buonasera', 'exclamation', 'b'],
['buongiorno', 'exclamation', 'a'],
['buongusto', 'noun', 'c'],
['buono', 'adjective', 'a'],
['buono', 'noun', 'a'],
['buono', 'adverb', 'a'],
['buonuomo', 'noun', 'c'],
['burattino', 'noun', 'c'],
['burocrazia', 'noun', 'c'],
['burrasca', 'noun', 'c'],
['burro', 'noun', 'b'],
['burrone', 'noun', 'c'],
['business', 'noun', 'b'],
['business', 'adjective', 'b'],
['bussare', 'verb', 'b'],
['bussola', 'noun', 'c'],
['busta', 'noun', 'b'],
['bustina', 'noun', 'c'],
['busto', 'noun', 'c'],
['buttare', 'verb', 'a'],
['cabina', 'noun', 'b'],
['cacao', 'noun', 'c'],
['cacca', 'noun', 'b'],
['caccia', 'noun', 'a'],
['cacciare', 'verb', 'a'],
['cacciatore', 'noun', 'b'],
['cacciavite', 'noun', 'c'],
['cadavere', 'noun', 'a'],
['cadere', 'verb', 'a'],
['cadere', 'noun', 'a'],
['caduta', 'noun', 'b'],
['caff', 'noun', 'a'],
['caff', 'adjective', 'a'],
['caffellatte', 'noun', 'c'],
['caffellatte', 'adjective', 'c'],
['caffettiera', 'noun', 'c'],
['cagare', 'verb', 'b'],
['cagliaritano', 'adjective', 'c'],
['cagliaritano', 'noun', 'c'],
['calabrese', 'adjective', 'c'],
['calabrese', 'noun', 'c'],
['calabrone', 'noun', 'c'],
['calamaro', 'noun', 'c'],
['calamita', 'noun', 'c'],
['calare', 'verb', 'b'],
['calcagno', 'noun', 'c'],
['calciare', 'verb', 'c'],
['calciatore', 'noun', 'b'],
['calcinaccio', 'noun', 'c'],
['calcio', 'noun', 'a'],
['calcolare', 'verb', 'b'],
['calcolatore', 'adjective', 'c'],
['calcolatore', 'noun', 'c'],
['calcolatrice', 'noun', 'c'],
['calcolo', 'noun', 'b'],
['caldo', 'adjective', 'a'],
['caldo', 'noun', 'a'],
['caldo', 'adverb', 'a'],
['calendario', 'noun', 'b'],
['calligrafia', 'noun', 'c'],
['callo', 'noun', 'c'],
['calma', 'noun', 'b'],
['calmare', 'verb', 'b'],
['calmo', 'adjective', 'b'],
['calo', 'noun', 'b'],
['calore', 'noun', 'a'],
['calpestare', 'verb', 'c'],
['calunnia', 'noun', 'c'],
['calvario', 'noun', 'c'],
['calza', 'noun', 'b'],
['calzare', 'verb', 'c'],
['calzatura', 'noun', 'c'],
['calzino', 'noun', 'c'],
['calzolaio', 'noun', 'c'],
['calzoleria', 'noun', 'c'],
['calzone', 'noun', 'c'],
['cambiamento', 'noun', 'a'],
['cambiare', 'verb', 'a'],
['cambio', 'noun', 'a'],
['camera', 'noun', 'a'],
['camerata', 'noun', 'c'],
['cameriere', 'noun', 'b'],
['camicetta', 'noun', 'c'],
['camicia', 'noun', 'b'],
['caminetto', 'noun', 'c'],
['camion', 'noun', 'a'],
['camionista', 'noun', 'c'],
['cammello', 'noun', 'c'],
['cammello', 'adjective', 'c'],
['camminare', 'verb', 'a'],
['camminata', 'noun', 'c'],
['cammino', 'noun', 'b'],
['camomilla', 'noun', 'c'],
['camorra', 'noun', 'b'],
['campagna', 'noun', 'a'],
['campana', 'noun', 'b'],
['campanella', 'noun', 'c'],
['campanello', 'noun', 'b'],
['campanile', 'noun', 'c'],
['campano', 'adjective', 'c'],
['campano', 'noun', 'c'],
['campare', 'verb', 'b'],
['campeggio', 'noun', 'c'],
['campionato', 'noun', 'b'],
['campione', 'noun', 'a'],
['campo', 'noun', 'a'],
['campobassano', 'adjective', 'c'],
['campobassano', 'noun', 'c'],
['camposanto', 'noun', 'c'],
['canadese', 'adjective', 'c'],
['canadese', 'noun', 'c'],
['canaglia', 'noun', 'c'],
['canale', 'noun', 'a'],
['canapa', 'noun', 'c'],
['canarino', 'noun', 'c'],
['canarino', 'adjective', 'c'],
['cancellare', 'verb', 'a'],
['cancellatura', 'noun', 'c'],
['cancello', 'noun', 'b'],
['cancro', 'noun', 'b'],
['candela', 'noun', 'b'],
['candeliere', 'noun', 'c'],
['candidare', 'verb', 'b'],
['candidato', 'past_part', 'a'],
['candidato', 'adjective', 'a'],
['candidato', 'noun', 'a'],
['candido', 'adjective', 'b'],
['cane', 'noun', 'a'],
['canestro', 'noun', 'c'],
['canguro', 'noun', 'c'],
['canna', 'noun', 'b'],
['cannibale', 'adjective', 'c'],
['cannibale', 'noun', 'c'],
['cannuccia', 'noun', 'c'],
['canone', 'noun', 'b'],
['canottiera', 'noun', 'c'],
['canotto', 'noun', 'c'],
['cantante', 'pres_part', 'b'],
['cantante', 'adjective', 'b'],
['cantante', 'noun', 'b'],
['cantare', 'verb', 'a'],
['cantautore', 'noun', 'c'],
['cantiere', 'noun', 'b'],
['cantilena', 'noun', 'c'],
['cantina', 'noun', 'b'],
['canto', 'noun', 'a'],
['canzone', 'noun', 'a'],
['caos', 'noun', 'b'],
['capace', 'adjective', 'a'],
['capacit', 'noun', 'a'],
['capanna', 'noun', 'b'],
['capannone', 'noun', 'b'],
['caparra', 'noun', 'c'],
['capello', 'noun', 'a'],
['capire', 'verb', 'a'],
['capitale', 'adjective', 'a'],
['capitale', 'noun', 'a'],
['capitano', 'noun', 'a'],
['capitare', 'verb', 'a'],
['capitolo', 'noun', 'a'],
['capo', 'noun', 'a'],
['capodanno', 'noun', 'c'],
['capogiro', 'noun', 'c'],
['capolavoro', 'noun', 'b'],
['capoluogo', 'noun', 'c'],
['caporale', 'noun', 'b'],
['caporale', 'adjective', 'b'],
['caposquadra', 'noun', 'c'],
['capotavola', 'noun', 'c'],
['capoufficio', 'noun', 'c'],
['cappa', 'noun', 'c'],
['cappella', 'noun', 'b'],
['cappelliera', 'noun', 'c'],
['cappello', 'noun', 'b'],
['cappero', 'noun', 'c'],
['cappotto', 'noun', 'c'],
['cappuccino', 'adjective', 'c'],
['cappuccino', 'noun', 'c'],
['cappuccino', 'adjective', 'c'],
['cappuccio', 'noun', 'c'],
['capra', 'noun', 'b'],
['capriccio', 'noun', 'b'],
['capriola', 'noun', 'c'],
['carabiniere', 'noun', 'a'],
['caramella', 'noun', 'b'],
['caramella', 'adjective', 'b'],
['carattere', 'noun', 'a'],
['caratteristica', 'noun', 'a'],
['caratteristico', 'adjective', 'b'],
['caratterizzare', 'verb', 'a'],
['carbone', 'noun', 'b'],
['carburante', 'pres_part', 'c'],
['carburante', 'adjective', 'c'],
['carburante', 'noun', 'c'],
['carcassa', 'noun', 'c'],
['carcerato', 'past_part', 'c'],
['carcerato', 'adjective', 'c'],
['carcerato', 'noun', 'c'],
['carcere', 'noun', 'a'],
['carciofino', 'noun', 'c'],
['carciofo', 'noun', 'c'],
['cardellino', 'noun', 'c'],
['cardiaco', 'adjective', 'b'],
['cardiaco', 'noun', 'b'],
['cardigan', 'noun', 'c'],
['cardinale', 'adjective', 'b'],
['cardinale', 'noun', 'b'],
['cardinale', 'adjective', 'b'],
['carenza', 'noun', 'b'],
['carica', 'noun', 'loc-comando'],
['caricare', 'verb', 'a'],
['carico', 'noun', 'a'],
['carico', 'adjective', 'b'],
['carino', 'adjective', 'a'],
['carit', 'noun', 'b'],
['carnagione', 'noun', 'c'],
['carne', 'noun', 'a'],
['carnevale', 'noun', 'c'],
['carnivoro', 'adjective', 'c'],
['carnivoro', 'noun', 'c'],
['carnoso', 'adjective', 'c'],
['carnoso', 'noun', 'c'],
['caro', 'adjective', 'a'],
['caro', 'adverb', 'a'],
['caro', 'noun', 'a'],
['carosello', 'noun', 'c'],
['carovana', 'noun', 'c'],
['carriera', 'noun', 'a'],
['carro', 'noun', 'b'],
['carrozzeria', 'noun', 'c'],
['carta', 'noun', 'a'],
['cartaceo', 'adjective', 'b'],
['cartella', 'noun', 'b'],
['cartello', 'noun', 'b'],
['cartoleria', 'noun', 'c'],
['cartolina', 'noun', 'b'],
['cartone', 'noun', 'b'],
['cartuccia', 'noun', 'c'],
['casa', 'noun', 'a'],
['casalinga', 'noun', 'c'],
['casalingo', 'adjective', 'c'],
['casalingo', 'noun', 'c'],
['cascare', 'verb', 'b'],
['cascata', 'noun', 'c'],
['casco', 'noun', 'c'],
['caserma', 'noun', 'b'],
['casetta', 'noun', 'b'],
['casino', 'noun', 'a'],
['caso', 'noun', 'a'],
['cassa', 'noun', 'a'],
['cassaforte', 'noun', 'c'],
['cassapanca', 'noun', 'c'],
['casseruola', 'noun', 'c'],
['cassetta', 'noun', 'b'],
['cassettiera', 'noun', 'c'],
['cassetto', 'noun', 'b'],
['cassiera', 'noun', 'c'],
['castagna', 'noun', 'c'],
['castagno', 'noun', 'c'],
['castano', 'adjective', 'c'],
['castello', 'noun', 'a'],
['castoro', 'noun', 'c'],
['casuale', 'adjective', 'b'],
['casuale', 'noun', 'b'],
['catalogo', 'noun', 'b'],
['catanzarese', 'adjective', 'c'],
['catanzarese', 'noun', 'c'],
['catarro', 'noun', 'c'],
['catasta', 'noun', 'c'],
['catastrofe', 'noun', 'b'],
['catechismo', 'noun', 'c'],
['categoria', 'noun', 'a'],
['catena', 'noun', 'a'],
['catenaccio', 'noun', 'c'],
['catino', 'noun', 'c'],
['catrame', 'noun', 'c'],
['cattedrale', 'adjective', 'b'],
['cattedrale', 'noun', 'b'],
['cattivo', 'adjective', 'a'],
['cattivo', 'noun', 'a'],
['cattolico', 'adjective', 'a'],
['cattolico', 'noun', 'a'],
['catturare', 'verb', 'b'],
['causa', 'noun', 'a'],
['causare', 'verb', 'a'],
['cavalcare', 'verb', 'b'],
['cavaliere', 'noun', 'a'],
['cavalletta', 'noun', 'c'],
['cavallo', 'noun', 'a'],
['cavare', 'verb', 'b'],
['cavatappi', 'noun', 'c'],
['caverna', 'noun', 'c'],
['caviglia', 'noun', 'b'],
['cavit', 'noun', 'b'],
['cavo', 'adjective', 'b'],
['cavo', 'noun', 'b'],
['cavo', 'noun', 'b'],
['cavolo', 'noun', 'b'],
['cazzata', 'noun', 'b'],
['cazzo', 'noun', 'a'],
['ce', 'pronoun', 'a'],
['ce', 'adverb', 'a'],
['cece', 'noun', 'c'],
['ceco', 'adjective', 'c'],
['ceco', 'noun', 'c'],
['cecoslovacco', 'adjective', 'c'],
['cecoslovacco', 'noun', 'c'],
['cedere', 'verb', 'a'],
['celare', 'verb', 'b'],
['celebrare', 'verb', 'b'],
['celebre', 'adjective', 'b'],
['celeste', 'adjective', 'b'],
['celeste', 'noun', 'b'],
['cella', 'noun', 'b'],
['cellula', 'noun', 'a'],
['cellulare', 'adjective', 'a'],
['cellulare', 'noun', 'a'],
['cemento', 'noun', 'b'],
['cena', 'noun', 'a'],
['cenare', 'verb', 'b'],
['cenere', 'noun', 'b'],
['cenere', 'adjective', 'b'],
['cenno', 'noun', 'b'],
['centesimo', 'adjective', 'b'],
['centesimo', 'noun', 'b'],
['centimetro', 'noun', 'b'],
['centinaio', 'noun', 'a'],
['cento', 'adjective', 'a'],
['cento', 'noun', 'a'],
['centrale', 'adjective', 'a'],
['centrale', 'noun', 'a'],
['centralino', 'noun', 'c'],
['centrare', 'verb', 'b'],
['centro', 'noun', 'a'],
['centroamericano', 'adjective', 'c'],
['centroamericano', 'noun', 'c'],
['ceramica', 'noun', 'b'],
['cercare', 'verb', 'a'],
['cerchio', 'noun', 'b'],
['cereale', 'noun', 'c'],
['cereale', 'adjective', 'c'],
['cerebrale', 'adjective', 'b'],
['cerebrale', 'noun', 'b'],
['cerimonia', 'noun', 'b'],
['cerino', 'noun', 'c'],
['cerniera', 'noun', 'c'],
['cerotto', 'noun', 'c'],
['certamente', 'adverb', 'a'],
['certezza', 'noun', 'a'],
['certificare', 'verb', 'b'],
['certificato', 'past_part', 'b'],
['certificato', 'adjective', 'b'],
['certificato', 'noun', 'b'],
['certo', 'adjective', 'a'],
['certo', 'adjective', 'a'],
['certo', 'pronoun', 'a'],
['certo', 'adverb', 'a'],
['cervello', 'noun', 'a'],
['cervo', 'noun', 'c'],
['cespuglio', 'noun', 'b'],
['cessare', 'verb', 'b'],
['cesso', 'noun', 'b'],
['cestino', 'noun', 'c'],
['cesto', 'noun', 'c'],
['cetriolo', 'noun', 'c'],
['chat', 'noun', 'b'],
['che', 'pronoun', 'a'],
['che', 'adjective', 'a'],
['che', 'noun', 'a'],
['chewingum', 'noun', 'c'],
['chi', 'pronoun', 'a'],
['chiacchiera', 'noun', 'b'],
['chiacchierare', 'verb', 'b'],
['chiamare', 'verb', 'a'],
['chiamata', 'noun', 'b'],
['chiaramente', 'adverb', 'a'],
['chiarezza', 'noun', 'b'],
['chiarire', 'verb', 'a'],
['chiaro', 'adjective', 'a'],
['chiaro', 'noun', 'a'],
['chiaro', 'adverb', 'a'],
['chiasso', 'noun', 'c'],
['chiave', 'noun', 'a'],
['chiazza', 'noun', 'c'],
['chiedere', 'verb', 'a'],
['chiesa', 'noun', 'a'],
['chilo', 'noun', 'b'],
['chilogrammo', 'noun', 'c'],
['chilometro', 'noun', 'a'],
['chimico', 'adjective', 'a'],
['chimico', 'noun', 'a'],
['china', 'noun', 'c'],
['chinare', 'verb', 'b'],
['chinotto', 'noun', 'c'],
['chiodo', 'noun', 'b'],
['chiosco', 'noun', 'b'],
['chirurgia', 'noun', 'b'],
['chirurgico', 'adjective', 'b'],
['chirurgico', 'noun', 'b'],
['chirurgo', 'noun', 'b'],
['chiss', 'adverb', 'a'],
['chitarra', 'noun', 'b'],
['chiudere', 'verb', 'a'],
['chiunque', 'pronoun', 'a'],
['chiuso', 'past_part', 'a'],
['chiuso', 'adjective', 'a'],
['chiuso', 'noun', 'a'],
['chiuso', 'adverb', 'a'],
['chiusura', 'noun', 'b'],
['ci', 'noun', 'c'],
['ci', 'pronoun', 'a'],
['ci', 'adverb', 'a'],
['ciabatta', 'noun', 'c'],
['ciambella', 'noun', 'c'],
['ciao', 'exclamation', 'a'],
['ciascuno', 'adjective', 'a'],
['ciascuno', 'pronoun', 'a'],
['cibare', 'verb', 'c'],
['cibo', 'noun', 'a'],
['cicatrice', 'noun', 'b'],
['ciclismo', 'noun', 'b'],
['ciclista', 'noun', 'c'],
['ciclo', 'noun', 'b'],
['cicogna', 'noun', 'c'],
['cicoria', 'noun', 'c'],
['cieco', 'adjective', 'b'],
['cieco', 'noun', 'b'],
['cielo', 'noun', 'a'],
['cifra', 'noun', 'a'],
['ciglio', 'noun', 'b'],
['cigno', 'noun', 'c'],
['cileno', 'adjective', 'c'],
['cileno', 'noun', 'c'],
['ciliegia', 'noun', 'c'],
['ciliegia', 'adjective', 'c'],
['ciliegio', 'noun', 'c'],
['cilindro', 'noun', 'c'],
['cima', 'noun', 'c'],
['cimice', 'noun', 'c'],
['ciminiera', 'noun', 'c'],
['cimitero', 'noun', 'b'],
['cinema', 'noun', 'a'],
['cinematografico', 'adjective', 'b'],
['cinese', 'adjective', 'a'],
['cinese', 'noun', 'a'],
['cinghia', 'noun', 'c'],
['cinghiale', 'noun', 'c'],
['cinguettare', 'verb', 'c'],
['cinguettio', 'noun', 'c'],
['cinico', 'adjective', 'c'],
['cinico', 'noun', 'c'],
['cinquanta', 'adjective', 'a'],
['cinquanta', 'noun', 'a'],
['cinque', 'adjective', 'a'],
['cinque', 'noun', 'a'],
['cinquecento', 'adjective', 'b'],
['cinquecento', 'noun', 'b'],
['cintura', 'noun', 'b'],
['cinturino', 'noun', 'c'],
['ci', 'pronoun', 'a'],
['ciocca', 'noun', 'c'],
['cioccolatino', 'noun', 'c'],
['cioccolato', 'noun', 'b'],
['cioccolato', 'adjective', 'b'],
['cio', 'conjunction', 'a'],
['ciotola', 'noun', 'c'],
['cipolla', 'noun', 'b'],
['cipresso', 'noun', 'c'],
['cipriota', 'adjective', 'c'],
['cipriota', 'noun', 'c'],
['circa', 'preposition', 'a'],
['circa', 'adverb', 'a'],
['circa', 'noun', 'a'],
['circo', 'noun', 'b'],
['circolare', 'adjective', 'b'],
['circolare', 'noun', 'b'],
['circolare', 'verb', 'b'],
['circolazione', 'noun', 'b'],
['circolo', 'noun', 'b'],
['circondare', 'verb', 'a'],
['circostanza', 'noun', 'a'],
['circuito', 'noun', 'b'],
['citare', 'verb', 'a'],
['citato', 'past_part', 'b'],
['citato', 'adjective', 'b'],
['citato', 'noun', 'b'],
['citazione', 'noun', 'b'],
['citofono', 'noun', 'c'],
['citt', 'noun', 'a'],
['cittadina', 'noun', 'b'],
['cittadinanza', 'noun', 'b'],
['cittadino', 'adjective', 'a'],
['cittadino', 'noun', 'a'],
['ciuffo', 'noun', 'c'],
['civile', 'adjective', 'a'],
['civile', 'noun', 'a'],
['civilt', 'noun', 'b'],
['clacson', 'noun', 'c'],
['clan', 'noun', 'b'],
['clandestino', 'adjective', 'b'],
['clandestino', 'noun', 'b'],
['classe', 'noun', 'a'],
['classico', 'adjective', 'a'],
['classico', 'noun', 'a'],
['classifica', 'noun', 'b'],
['classificare', 'verb', 'b'],
['clero', 'noun', 'c'],
['cliccare', 'verb', 'b'],
['cliente', 'noun', 'a'],
['clima', 'noun', 'b'],
['clinica', 'noun', 'b'],
['clinico', 'adjective', 'b'],
['clinico', 'noun', 'b'],
['clistere', 'noun', 'c'],
['cloro', 'noun', 'c'],
['club', 'noun', 'b'],
['cobra', 'noun', 'c'],
['cocaina', 'noun', 'b'],
['coccinella', 'noun', 'c'],
['coccio', 'noun', 'c'],
['cocciuto', 'adjective', 'c'],
['cocciuto', 'noun', 'c'],
['cocco', 'noun', 'c'],
['coccodrillo', 'noun', 'c'],
['coccola', 'noun', 'c'],
['coccolare', 'verb', 'c'],
['cocomero', 'noun', 'c'],
['coda', 'noun', 'a'],
['codice', 'noun', 'a'],
['coerente', 'adjective', 'b'],
['cofano', 'noun', 'c'],
['cogliere', 'verb', 'a'],
['coglione', 'noun', 'a'],
['cognato', 'noun', 'b'],
['cognato', 'adjective', 'b'],
['cognome', 'noun', 'b'],
['coincidenza', 'noun', 'b'],
['coincidere', 'verb', 'b'],
['coinvolgere', 'verb', 'a'],
['coinvolgimento', 'noun', 'b'],
['colare', 'verb', 'b'],
['colata', 'noun', 'c'],
['colazione', 'noun', 'b'],
['colera', 'noun', 'c'],
['colica', 'noun', 'c'],
['colino', 'noun', 'c'],
['colla', 'noun', 'c'],
['collaborare', 'verb', 'b'],
['collaboratore', 'noun', 'b'],
['collaborazione', 'noun', 'b'],
['collana', 'noun', 'b'],
['collant', 'noun', 'c'],
['collant', 'adjective', 'c'],
['collare', 'noun', 'c'],
['collasso', 'noun', 'c'],
['collaterale', 'adjective', 'b'],
['collaterale', 'noun', 'b'],
['colle', 'noun', 'c'],
['collega', 'noun', 'a'],
['collegamento', 'noun', 'b'],
['collegare', 'verb', 'a'],
['collegio', 'noun', 'b'],
['collera', 'noun', 'c'],
['colletta', 'noun', 'c'],
['collettivo', 'adjective', 'b'],
['collettivo', 'noun', 'b'],
['collezione', 'noun', 'b'],
['collina', 'noun', 'b'],
['collo', 'noun', 'a'],
['collocare', 'verb', 'b'],
['colloquio', 'noun', 'b'],
['colluttorio', 'noun', 'c'],
['colmo', 'noun', 'c'],
['colomba', 'noun', 'b'],
['colombo', 'noun', 'c'],
['colonna', 'noun', 'a'],
['colonnello', 'noun', 'b'],
['colorante', 'pres_part', 'c'],
['colorante', 'adjective', 'c'],
['colorante', 'noun', 'c'],
['colorare', 'verb', 'b'],
['colorato', 'past_part', 'b'],
['colorato', 'adjective', 'b'],
['colore', 'noun', 'a'],
['coloro', 'pronoun', 'a'],
['colosso', 'noun', 'c'],
['colpa', 'noun', 'a'],
['colpevole', 'adjective', 'b'],
['colpevole', 'noun', 'b'],
['colpire', 'verb', 'a'],
['colpo', 'noun', 'a'],
['coltellata', 'noun', 'c'],
['coltello', 'noun', 'a'],
['coltivare', 'verb', 'b'],
['coltivazione', 'noun', 'c'],
['colto', 'adjective', 'b'],
['colto', 'noun', 'b'],
['colui', 'pronoun', 'b'],
['coma', 'noun', 'b'],
['comandamento', 'noun', 'b'],
['comandante', 'pres_part', 'b'],
['comandante', 'adjective', 'b'],
['comandante', 'noun', 'b'],
['comandare', 'verb', 'b'],
['comando', 'noun', 'b'],
['combaciare', 'verb', 'c'],
['combattente', 'pres_part', 'c'],
['combattente', 'adjective', 'c'],
['combattente', 'noun', 'c'],
['combattere', 'verb', 'a'],
['combattimento', 'noun', 'b'],
['combinare', 'verb', 'b'],
['combinazione', 'noun', 'b'],
['come', 'adverb', 'a'],
['come', 'conjunction', 'a'],
['cometa', 'noun', 'c'],
['comfort', 'noun', 'c'],
['comico', 'adjective', 'b'],
['comico', 'noun', 'b'],
['cominciare', 'verb', 'a'],
['cominciare', 'noun', 'a'],
['comitato', 'noun', 'b'],
['comma', 'noun', 'b'],
['commedia', 'noun', 'b'],
['commentare', 'verb', 'a'],
['commento', 'noun', 'a'],
['commerciale', 'adjective', 'a'],
['commerciale', 'noun', 'a'],
['commerciante', 'pres_part', 'b'],
['commerciante', 'adjective', 'b'],
['commerciante', 'noun', 'b'],
['commercio', 'noun', 'b'],
['commettere', 'verb', 'a'],
['commissariato', 'noun', 'b'],
['commissario', 'noun', 'a'],
['commissione', 'noun', 'a'],
['community', 'noun', 'b'],
['commuovere', 'verb', 'b'],
['comodino', 'noun', 'c'],
['comodit', 'noun', 'c'],
['comodo', 'adjective', 'a'],
['comodo', 'noun', 'a'],
['compagnia', 'noun', 'a'],
['compagno', 'noun', 'a'],
['compagno', 'adjective', 'a'],
['comparire', 'verb', 'a'],
['comparsa', 'noun', 'b'],
['compassione', 'noun', 'c'],
['compasso', 'noun', 'c'],
['compatibile', 'adjective', 'b'],
['compatriota', 'noun', 'c'],
['compatto', 'adjective', 'b'],
['compatto', 'noun', 'b'],
['compensare', 'verb', 'b'],
['compenso', 'noun', 'b'],
['competente', 'adjective', 'b'],
['competente', 'noun', 'b'],
['competenza', 'noun', 'b'],
['competere', 'verb', 'b'],
['competizione', 'noun', 'b'],
['compiangere', 'verb', 'c'],
['compiere', 'verb', 'a'],
['compilare', 'verb', 'b'],
['compito', 'noun', 'a'],
['compleanno', 'noun', 'b'],
['complessivo', 'adjective', 'b'],
['complesso', 'noun', 'b'],
['complesso', 'adjective', 'a'],
['completamente', 'adverb', 'a'],
['completare', 'verb', 'b'],
['completo', 'adjective', 'a'],
['completo', 'noun', 'a'],
['complicare', 'verb', 'b'],
['complicato', 'past_part', 'b'],
['complicato', 'adjective', 'b'],
['complice', 'noun', 'b'],
['complice', 'adjective', 'b'],
['complimento', 'noun', 'b'],
['complotto', 'noun', 'c'],
['componente', 'pres_part', 'b'],
['componente', 'adjective', 'b'],
['componente', 'noun', 'b'],
['comporre', 'verb', 'a'],
['comportamento', 'noun', 'a'],
['comportare', 'verb', 'a'],
['composizione', 'noun', 'b'],
['composto', 'past_part', 'b'],
['composto', 'adjective', 'b'],
['composto', 'noun', 'b'],
['comprare', 'verb', 'a'],
['comprendere', 'verb', 'a'],
['comprensibile', 'adjective', 'b'],
['comprensione', 'noun', 'b'],
['comprensivo', 'adjective', 'c'],
['compreso', 'past_part', 'a'],
['compreso', 'adjective', 'a'],
['compromesso', 'noun', 'b'],
['compromettere', 'verb', 'b'],
['computer', 'noun', 'a'],
['comunale', 'adjective', 'b'],
['comunale', 'noun', 'b'],
['comune', 'adjective', 'a'],
['comune', 'noun', 'a'],
['comune', 'noun', 'a'],
['comunicare', 'verb', 'a'],
['comunicazione', 'noun', 'a'],
['comunione', 'noun', 'b'],
['comunismo', 'noun', 'b'],
['comunista', 'adjective', 'a'],
['comunista', 'noun', 'a'],
['comunit', 'noun', 'a'],
['comunque', 'adverb', 'a'],
['comunque', 'conjunction', 'a'],
['con', 'preposition', 'a'],
['conca', 'noun', 'c'],
['concedere', 'verb', 'b'],
['concentrare', 'verb', 'a'],
['concentrazione', 'noun', 'b'],
['concepire', 'noun', 'b'],
['concerto', 'noun', 'a'],
['concessione', 'noun', 'b'],
['concesso', 'past_part', 'b'],
['concesso', 'adjective', 'b'],
['concetto', 'past_part', 'a'],
['concetto', 'adjective', 'a'],
['concetto', 'noun', 'a'],
['concezione', 'noun', 'b'],
['conchiglia', 'noun', 'c'],
['concime', 'noun', 'c'],
['concludere', 'verb', 'a'],
['conclusione', 'noun', 'a'],
['concordare', 'verb', 'b'],
['concorrente', 'pres_part', 'b'],
['concorrente', 'adjective', 'b'],
['concorrente', 'noun', 'b'],
['concorrenza', 'noun', 'b'],
['concorrere', 'verb', 'b'],
['concorso', 'noun', 'b'],
['concreto', 'adjective', 'a'],
['concreto', 'noun', 'a'],
['condanna', 'noun', 'b'],
['condannare', 'verb', 'a'],
['condimento', 'noun', 'c'],
['condividere', 'verb', 'a'],
['condizionare', 'verb', 'b'],
['condizione', 'noun', 'a'],
['condoglianza', 'noun', 'c'],
['condominio', 'noun', 'b'],
['condotta', 'noun', 'b'],
['condurre', 'verb', 'a'],
['conduttore', 'adjective', 'b'],
['conduttore', 'noun', 'b'],
['conduttura', 'noun', 'c'],
['conferenza', 'noun', 'b'],
['conferire', 'verb', 'b'],
['conferma', 'noun', 'b'],
['confermare', 'verb', 'a'],
['confessare', 'verb', 'b'],
['confessione', 'noun', 'b'],
['confessore', 'noun', 'c'],
['confetto', 'noun', 'c'],
['confetto', 'adjective', 'c'],
['confettura', 'noun', 'c'],
['confezione', 'noun', 'b'],
['conficcare', 'verb', 'c'],
['confidare', 'verb', 'b'],
['confidenza', 'noun', 'b'],
['confine', 'noun', 'a'],
['conflitto', 'noun', 'b'],
['confondere', 'verb', 'a'],
['confortare', 'verb', 'c'],
['confrontare', 'verb', 'b'],
['confronto', 'noun', 'a'],
['confusione', 'noun', 'b'],
['confuso', 'past_part', 'b'],
['confuso', 'adjective', 'b'],
['congedo', 'noun', 'c'],
['congelare', 'verb', 'b'],
['congelatore', 'noun', 'c'],
['congestione', 'noun', 'c'],
['congiura', 'noun', 'c'],
['congresso', 'noun', 'b'],
['coniglio', 'noun', 'b'],
['coniugato', 'past_part', 'c'],
['coniugato', 'adjective', 'c'],
['coniugato', 'noun', 'c'],
['coniuge', 'noun', 'b'],
['connessione', 'noun', 'b'],
['connettere', 'verb', 'b'],
['cono', 'noun', 'b'],
['conoscenza', 'noun', 'a'],
['conoscere', 'verb', 'a'],
['conosciuto', 'past_part', 'b'],
['conosciuto', 'adjective', 'b'],
['conosciuto', 'noun', 'b'],
['conquista', 'noun', 'b'],
['conquistare', 'verb', 'a'],
['consapevole', 'adjective', 'b'],
['consapevolezza', 'noun', 'b'],
['consegna', 'noun', 'b'],
['consegnare', 'verb', 'a'],
['conseguente', 'pres_part', 'b'],
['conseguente', 'adjective', 'b'],
['conseguente', 'noun', 'b'],
['conseguenza', 'noun', 'a'],
['conseguire', 'verb', 'b'],
['consenso', 'noun', 'b'],
['consentire', 'verb', 'a'],
['conservare', 'verb', 'a'],
['conservazione', 'noun', 'b'],
['considerare', 'verb', 'a'],
['considerazione', 'noun', 'a'],
['consigliare', 'verb', 'a'],
['consigliere', 'noun', 'b'],
['consiglio', 'noun', 'a'],
['consistente', 'pres_part', 'b'],
['consistente', 'adjective', 'b'],
['consistenza', 'noun', 'b'],
['consistere', 'verb', 'b'],
['consolare', 'verb', 'b'],
['consonante', 'noun', 'c'],
['consorzio', 'noun', 'b'],
['constatare', 'verb', 'b'],
['consueto', 'adjective', 'b'],
['consueto', 'noun', 'b'],
['consulente', 'adjective', 'b'],
['consulente', 'noun', 'b'],
['consulenza', 'noun', 'b'],
['consultare', 'verb', 'b'],
['consumare', 'verb', 'a'],
['consumatore', 'noun', 'b'],
['consumatore', 'adjective', 'b'],
['consumazione', 'noun', 'c'],
['consumo', 'noun', 'b'],
['contachilometri', 'noun', 'c'],
['contadino', 'noun', 'b'],
['contadino', 'adjective', 'b'],
['contagiare', 'verb', 'c'],
['contagio', 'noun', 'c'],
['contagioso', 'adjective', 'c'],
['contagocce', 'noun', 'c'],
['contaminare', 'verb', 'b'],
['contante', 'pres_part', 'b'],
['contante', 'adjective', 'b'],
['contante', 'noun', 'b'],
['contare', 'verb', 'a'],
['contatore', 'noun', 'c'],
['contattare', 'verb', 'b'],
['contatto', 'noun', 'a'],
['conte', 'noun', 'b'],
['contemplare', 'verb', 'b'],
['contemporaneamente', 'adverb', 'b'],
['contemporaneo', 'adjective', 'a'],
['contemporaneo', 'noun', 'a'],
['contenere', 'verb', 'a'],
['contenitore', 'adjective', 'b'],
['contenitore', 'noun', 'b'],
['contentare', 'verb', 'b'],
['contentezza', 'noun', 'c'],
['contento', 'adjective', 'a'],
['contenuto', 'past_part', 'a'],
['contenuto', 'adjective', 'a'],
['contenuto', 'noun', 'a'],
['contestare', 'verb', 'b'],
['contestazione', 'noun', 'b'],
['contesto', 'noun', 'a'],
['continente', 'noun', 'b'],
['continuamente', 'adverb', 'b'],
['continuare', 'verb', 'a'],
['continuazione', 'noun', 'b'],
['continuit', 'noun', 'b'],
['continuo', 'adjective', 'a'],
['continuo', 'noun', 'a'],
['continuo', 'adverb', 'a'],
['conto', 'noun', 'a'],
['contorno', 'noun', 'b'],
['contrabbandiere', 'noun', 'c'],
['contrabbando', 'noun', 'c'],
['contraccambiare', 'verb', 'c'],
['contraddizione', 'noun', 'b'],
['contrario', 'adjective', 'a'],
['contrario', 'noun', 'a'],
['contrarre', 'verb', 'b'],
['contrastare', 'verb', 'b'],
['contrasto', 'noun', 'b'],
['contratto', 'noun', 'a'],
['contribuire', 'verb', 'b'],
['contributo', 'noun', 'b'],
['contro', 'preposition', 'a'],
['contro', 'adverb', 'a'],
['contro', 'noun', 'a'],
['controllare', 'verb', 'a'],
['controllo', 'noun', 'a'],
['controllore', 'noun', 'c'],
['convegno', 'noun', 'b'],
['conveniente', 'pres_part', 'b'],
['conveniente', 'adjective', 'b'],
['convenire', 'verb', 'b'],
['convenzione', 'noun', 'b'],
['conversazione', 'noun', 'a'],
['conversione', 'noun', 'b'],
['convertire', 'verb', 'b'],
['convincente', 'pres_part', 'b'],
['convincente', 'adjective', 'b'],
['convincere', 'verb', 'a'],
['convinto', 'past_part', 'b'],
['convinto', 'adjective', 'b'],
['convinzione', 'noun', 'b'],
['convivenza', 'noun', 'b'],
['convivere', 'verb', 'b'],
['convocare', 'verb', 'b'],
['convulsione', 'noun', 'c'],
['coordinamento', 'noun', 'b'],
['coordinare', 'verb', 'b'],
['coperchio', 'noun', 'c'],
['coperta', 'noun', 'b'],
['copertina', 'noun', 'b'],
['coperto', 'past_part', 'b'],
['coperto', 'adjective', 'b'],
['coperto', 'noun', 'b'],
['copertura', 'noun', 'b'],
['copia', 'noun', 'a'],
['copiare', 'verb', 'b'],
['copione', 'noun', 'b'],
['coppa', 'noun', 'b'],
['coppia', 'noun', 'a'],
['copricostume', 'noun', 'c'],
['copriletto', 'noun', 'c'],
['coprire', 'verb', 'a'],
['copyright', 'noun', 'b'],
['coraggio', 'noun', 'a'],
['coraggio', 'exclamation', 'a'],
['coraggioso', 'adjective', 'b'],
['corallo', 'noun', 'c'],
['corallo', 'adjective', 'c'],
['corazza', 'noun', 'c'],
['corazzata', 'noun', 'c'],
['corazziere', 'noun', 'c'],
['corda', 'noun', 'a'],
['coriandolo', 'noun', 'c'],
['coricare', 'verb', 'c'],
['cornacchia', 'noun', 'c'],
['cornetto', 'noun', 'c'],
['cornice', 'noun', 'b'],
['corno', 'noun', 'b'],
['cornuto', 'adjective', 'c'],
['cornuto', 'noun', 'c'],
['coro', 'noun', 'b'],
['corona', 'noun', 'b'],
['corpo', 'noun', 'a'],
['corporatura', 'noun', 'c'],
['correggere', 'verb', 'a'],
['corrente', 'pres_part', 'a'],
['corrente', 'adjective', 'a'],
['corrente', 'noun', 'a'],
['corrente', 'adverb', 'a'],
['correre', 'verb', 'a'],
['correttamente', 'adverb', 'b'],
['corretto', 'past_part', 'b'],
['corretto', 'adjective', 'b'],
['correzione', 'noun', 'c'],
['corridoio', 'noun', 'b'],
['corridore', 'adjective', 'c'],
['corridore', 'noun', 'c'],
['corriera', 'noun', 'c'],
['corriere', 'noun', 'a'],
['corrispondente', 'pres_part', 'b'],
['corrispondente', 'adjective', 'b'],
['corrispondente', 'noun', 'b'],
['corrispondenza', 'noun', 'b'],
['corrispondere', 'verb', 'a'],
['corruzione', 'noun', 'b'],
['corsa', 'noun', 'a'],
['corsia', 'noun', 'c'],
['corso', 'noun', 'a'],
['corte', 'noun', 'a'],
['corteccia', 'noun', 'c'],
['corteggiare', 'verb', 'c'],
['cortesia', 'noun', 'b'],
['cortile', 'noun', 'b'],
['corto', 'adjective', 'a'],
['corvo', 'noun', 'c'],
['cosa', 'noun', 'a'],
['coscia', 'noun', 'b'],
['cosciente', 'adjective', 'c'],
['coscienza', 'noun', 'a'],
['cos', 'adverb', 'a'],
['cosiddetto', 'adjective', 'a'],
['costa', 'noun', 'a'],
['costante', 'adjective', 'b'],
['costante', 'noun', 'b'],
['costantemente', 'adverb', 'b'],
['costare', 'verb', 'a'],
['costellazione', 'noun', 'b'],
['costituire', 'verb', 'a'],
['costituzionale', 'adjective', 'b'],
['costituzione', 'noun', 'b'],
['costo', 'noun', 'a'],
['costoso', 'adjective', 'b'],
['costringere', 'verb', 'a'],
['costruire', 'verb', 'a'],
['costruttivo', 'adjective', 'b'],
['costruzione', 'noun', 'a'],
['costume', 'noun', 'a'],
['cotoletta', 'noun', 'c'],
['cotone', 'noun', 'b'],
['cottura', 'noun', 'c'],
['covare', 'verb', 'c'],
['covo', 'noun', 'c'],
['cozza', 'noun', 'c'],
['cracker', 'noun', 'c'],
['cranio', 'noun', 'b'],
['cravatta', 'noun', 'b'],
['creare', 'verb', 'a'],
['creativit', 'noun', 'b'],
['creativo', 'adjective', 'b'],
['creativo', 'noun', 'b'],
['creatura', 'noun', 'b'],
['creazione', 'noun', 'b'],
['credente', 'pres_part', 'b'],
['credente', 'adjective', 'b'],
['credente', 'noun', 'b'],
['credenza', 'noun', 'c'],
['credere', 'verb', 'a'],
['credere', 'noun', 'a'],
['credibile', 'adjective', 'b'],
['credito', 'noun', 'a'],
['creditore', 'noun', 'b'],
['credo', 'noun', 'c'],
['crema', 'noun', 'b'],
['crema', 'adjective', 'b'],
['crepaccio', 'noun', 'c'],
['crpe', 'noun', 'c'],
['crescente', 'pres_part', 'b'],
['crescente', 'adjective', 'b'],
['crescente', 'noun', 'b'],
['crescere', 'verb', 'a'],
['crescita', 'noun', 'a'],
['cretino', 'adjective', 'b'],
['cretino', 'noun', 'b'],
['criceto', 'noun', 'c'],
['criminale', 'adjective', 'b'],
['criminale', 'noun', 'b'],
['crimine', 'noun', 'b'],
['criniera', 'noun', 'c'],
['crisantemo', 'noun', 'c'],
['crisi', 'noun', 'a'],
['cristallo', 'noun', 'b'],
['cristianesimo', 'noun', 'b'],
['cristiano', 'adjective', 'a'],
['cristiano', 'noun', 'a'],
['criterio', 'noun', 'b'],
['critica', 'noun', 'a'],
['criticare', 'verb', 'b'],
['critico', 'adjective', 'a'],
['critico', 'noun', 'a'],
['croato', 'adjective', 'c'],
['croato', 'noun', 'c'],
['croce', 'noun', 'b'],
['crocifiggere', 'verb', 'c'],
['crocifisso', 'past_part', 'c'],
['crocifisso', 'adjective', 'c'],
['crocifisso', 'noun', 'c'],
['crollare', 'verb', 'b'],
['cronaca', 'noun', 'b'],
['cronico', 'adjective', 'b'],
['cronico', 'noun', 'b'],
['cronista', 'noun', 'c'],
['crostaceo', 'noun', 'c'],
['crostino', 'noun', 'c'],
['crudele', 'adjective', 'b'],
['crudele', 'noun', 'b'],
['crudo', 'adjective', 'b'],
['crudo', 'noun', 'b'],
['cu', 'noun', 'c'],
['cubo', 'noun', 'b'],
['cubo', 'adjective', 'b'],
['cucchiaio', 'noun', 'b'],
['cuccia', 'noun', 'c'],
['cucciolo', 'noun', 'b'],
['cucina', 'noun', 'a'],
['cucinare', 'verb', 'a'],
['cucire', 'verb', 'b'],
['cucito', 'past_part', 'c'],
['cucito', 'adjective', 'c'],
['cucito', 'noun', 'c'],
['cucitura', 'noun', 'c'],
['cuffia', 'noun', 'b'],
['cugino', 'noun', 'b'],
['cui', 'pronoun', 'a'],
['cullare', 'verb', 'c'],
['culo', 'noun', 'a'],
['culto', 'noun', 'b'],
['cultura', 'noun', 'a'],
['culturale', 'adjective', 'a'],
['cumulo', 'noun', 'c'],
['cuocere', 'verb', 'b'],
['cuoco', 'noun', 'b'],
['cuore', 'noun', 'a'],
['cupo', 'adjective', 'b'],
['cupo', 'noun', 'b'],
['cura', 'noun', 'a'],
['curare', 'verb', 'a'],
['curiosare', 'verb', 'b'],
['curiosit', 'noun', 'b'],
['curioso', 'adjective', 'a'],
['curioso', 'noun', 'a'],
['curriculum', 'noun', 'b'],
['curva', 'noun', 'b'],
['curvo', 'adjective', 'b'],
['curvo', 'noun', 'b'],
['cuscino', 'noun', 'b'],
['custode', 'noun', 'b'],
['custode', 'adjective', 'b'],
['custodia', 'noun', 'b'],
['custodire', 'verb', 'b'],
['da', 'preposition', 'a'],
['dado', 'noun', 'c'],
['danese', 'adjective', 'c'],
['danese', 'noun', 'c'],
['dannato', 'past_part', 'b'],
['dannato', 'adjective', 'b'],
['dannato', 'noun', 'b'],
['danneggiare', 'verb', 'b'],
['danno', 'noun', 'a'],
['dannoso', 'adjective', 'c'],
['danza', 'noun', 'b'],
['dappertutto', 'adverb', 'b'],
['dare', 'verb', 'a'],
['dare', 'noun', 'a'],
['data', 'noun', 'a'],
['dato', 'past_part', 'a'],
['dato', 'adjective', 'a'],
['dato', 'noun', 'a'],
['dattero', 'noun', 'c'],
['davanti', 'adverb', 'a'],
['davanti', 'adjective', 'a'],
['davanti', 'noun', 'a'],
['davanzale', 'noun', 'c'],
['davvero', 'adverb', 'a'],
['dea', 'noun', 'b'],
['debito', 'noun', 'a'],
['debole', 'adjective', 'a'],
['debole', 'noun', 'a'],
['debolezza', 'noun', 'b'],
['decennio', 'noun', 'b'],
['decidere', 'verb', 'a'],
['decina', 'noun', 'a'],
['decisamente', 'adverb', 'b'],
['decisione', 'noun', 'a'],
['decisivo', 'adjective', 'b'],
['deciso', 'past_part', 'b'],
['deciso', 'adjective', 'b'],
['decorare', 'verb', 'b'],
['decorato', 'past_part', 'c'],
['decorato', 'adjective', 'c'],
['decorato', 'noun', 'c'],
['decorazione', 'noun', 'b'],
['decoroso', 'adjective', 'c'],
['decreto', 'noun', 'b'],
['dedica', 'noun', 'c'],
['dedicare', 'verb', 'a'],
['dedurre', 'verb', 'b'],
['deficiente', 'adjective', 'b'],
['deficiente', 'noun', 'b'],
['definire', 'verb', 'a'],
['definitivamente', 'adverb', 'b'],
['definitivo', 'adjective', 'a'],
['definitivo', 'noun', 'a'],
['definizione', 'noun', 'a'],
['deformare', 'verb', 'c'],
['deforme', 'adjective', 'c'],
['deforme', 'noun', 'c'],
['defunto', 'past_part', 'b'],
['defunto', 'adjective', 'b'],
['defunto', 'noun', 'b'],
['degno', 'adjective', 'b'],
['degradare', 'verb', 'b'],
['delegare', 'verb', 'b'],
['delegato', 'past_part', 'b'],
['delegato', 'adjective', 'b'],
['delegato', 'noun', 'b'],
['delegazione', 'noun', 'c'],
['delfino', 'noun', 'c'],
['delicatezza', 'noun', 'c'],
['delicato', 'adjective', 'b'],
['delicato', 'noun', 'b'],
['delinquente', 'pres_part', 'c'],
['delinquente', 'adjective', 'c'],
['delinquente', 'noun', 'c'],
['delirare', 'verb', 'c'],
['delirio', 'noun', 'b'],
['delitto', 'noun', 'b'],
['delizia', 'noun', 'c'],
['delizioso', 'adjective', 'b'],
['deludere', 'verb', 'b'],
['delusione', 'noun', 'b'],
['deluso', 'past_part', 'b'],
['deluso', 'adjective', 'b'],
['deluso', 'noun', 'b'],
['democratico', 'adjective', 'b'],
['democratico', 'noun', 'b'],
['democrazia', 'noun', 'a'],
['democristiano', 'adjective', 'c'],
['democristiano', 'noun', 'c'],
['demoralizzare', 'verb', 'c'],
['denaro', 'noun', 'a'],
['denominare', 'verb', 'b'],
['denso', 'adjective', 'b'],
['dente', 'noun', 'a'],
['dentiera', 'noun', 'c'],
['dentifricio', 'noun', 'c'],
['dentista', 'noun', 'b'],
['dentro', 'adverb', 'a'],
['dentro', 'preposition', 'a'],
['dentro', 'noun', 'a'],
['denuncia', 'noun', 'b'],
['denunciare', 'verb', 'a'],
['deodorante', 'pres_part', 'c'],
['deodorante', 'adjective', 'c'],
['deodorante', 'noun', 'c'],
['depilazione', 'noun', 'c'],
['deporre', 'verb', 'b'],
['depositare', 'verb', 'b'],
['deposito', 'noun', 'b'],
['deposizione', 'noun', 'b'],
['depressione', 'noun', 'b'],
['deprimere', 'verb', 'b'],
['depuratore', 'adjective', 'c'],
['depuratore', 'noun', 'c'],
['deputato', 'past_part', 'b'],
['deputato', 'adjective', 'b'],
['deputato', 'noun', 'b'],
['derivare', 'verb', 'a'],
['derubare', 'verb', 'c'],
['descrivere', 'verb', 'a'],
['descrizione', 'noun', 'a'],
['deserto', 'noun', 'b'],
['deserto', 'adjective', 'b'],
['desiderare', 'verb', 'a'],
['desiderio', 'noun', 'a'],
['design', 'noun', 'b'],
['dessert', 'noun', 'c'],
['destinare', 'verb', 'a'],
['destinazione', 'noun', 'b'],
['destino', 'noun', 'a'],
['destra', 'noun', 'a'],
['destro', 'adjective', 'a'],
['destro', 'noun', 'a'],
['detective', 'noun', 'b'],
['detenere', 'verb', 'b'],
['detenuto', 'past_part', 'c'],
['detenuto', 'adjective', 'c'],
['detenuto', 'noun', 'c'],
['determinare', 'verb', 'a'],
['determinato', 'past_part', 'a'],
['determinato', 'adjective', 'a'],
['determinazione', 'noun', 'b'],
['detersivo', 'adjective', 'c'],
['detersivo', 'noun', 'c'],
['dettagliato', 'past_part', 'b'],
['dettagliato', 'adjective', 'b'],
['dettaglio', 'noun', 'a'],
['dettare', 'verb', 'b'],
['dettato', 'past_part', 'c'],
['dettato', 'adjective', 'c'],
['dettato', 'noun', 'c'],
['devastare', 'verb', 'b'],
['deviare', 'verb', 'c'],
['deviazione', 'noun', 'c'],
['di', 'preposition', 'a'],
['di', 'noun', 'c'],
['diagnosi', 'noun', 'b'],
['dialetto', 'noun', 'a'],
['dialogare', 'verb', 'b'],
['dialogo', 'noun', 'a'],
['diamante', 'noun', 'a'],
['diametro', 'noun', 'b'],
['diario', 'noun', 'b'],
['diario', 'adjective', 'b'],
['diavolo', 'noun', 'a'],
['dibattito', 'noun', 'b'],
['dicembre', 'noun', 'a'],
['dichiarare', 'verb', 'a'],
['dichiarazione', 'noun', 'a'],
['diciotto', 'adjective', 'b'],
['diciotto', 'noun', 'b'],
['dieci', 'adjective', 'a'],
['dieci', 'noun', 'a'],
['diecimila', 'adjective', 'b'],
['diecimila', 'noun', 'b'],
['dieta', 'noun', 'b'],
['dietetico', 'adjective', 'c'],
['dietro', 'preposition', 'a'],
['dietro', 'adverb', 'a'],
['dietro', 'adjective', 'a'],
['dietro', 'noun', 'a'],
['difendere', 'verb', 'a'],
['difensore', 'adjective', 'b'],
['difensore', 'noun', 'b'],
['difesa', 'noun', 'a'],
['difetto', 'noun', 'b'],
['differente', 'pres_part', 'a'],
['differente', 'adjective', 'a'],
['differenza', 'noun', 'a'],
['difficile', 'adjective', 'a'],
['difficile', 'noun', 'a'],
['difficilmente', 'adverb', 'b'],
['difficolt', 'noun', 'a'],
['diffidente', 'adjective', 'c'],
['diffidente', 'noun', 'c'],
['diffidenza', 'noun', 'c'],
['diffondere', 'verb', 'a'],
['diffusione', 'noun', 'b'],
['diffuso', 'past_part', 'b'],
['diffuso', 'adjective', 'b'],
['diga', 'noun', 'c'],
['digestione', 'noun', 'c'],
['digestivo', 'adjective', 'c'],
['digestivo', 'noun', 'c'],
['digitale', 'adjective', 'b'],
['digitale', 'noun', 'b'],
['digiunare', 'verb', 'c'],
['dignit', 'noun', 'b'],
['diluvio', 'noun', 'c'],
['dimagrante', 'pres_part', 'c'],
['dimagrante', 'adjective', 'c'],
['dimensione', 'noun', 'a'],
['dimenticare', 'verb', 'a'],
['dimettere', 'verb', 'b'],
['dimezzare', 'verb', 'c'],
['diminuire', 'verb', 'b'],
['dimostrare', 'verb', 'a'],
['dimostrazione', 'noun', 'b'],
['dinamica', 'noun', 'b'],
['dinamico', 'adjective', 'b'],
['dinosauro', 'noun', 'c'],
['dintorno', 'adverb', 'b'],
['dintorno', 'noun', 'b'],
['dio', 'noun', 'a'],
['dipartimento', 'noun', 'b'],
['dipendente', 'pres_part', 'a'],
['dipendente', 'adjective', 'a'],
['dipendente', 'noun', 'a'],
['dipendenza', 'noun', 'b'],
['dipendere', 'verb', 'a'],
['dipingere', 'verb', 'b'],
['dipinto', 'past_part', 'b'],
['dipinto', 'adjective', 'b'],
['dipinto', 'noun', 'b'],
['diploma', 'noun', 'b'],
['diplomatico', 'adjective', 'b'],
['diplomatico', 'noun', 'b'],
['dire', 'verb', 'a'],
['dire', 'noun', 'a'],
['diretta', 'noun', 'b'],
['direttamente', 'adverb', 'a'],
['diretto', 'past_part', 'a'],
['diretto', 'adjective', 'a'],
['diretto', 'noun', 'a'],
['direttore', 'noun', 'a'],
['direttore', 'adjective', 'a'],
['direttrice', 'noun', 'c'],
['direzione', 'noun', 'a'],
['dirigente', 'adjective', 'b'],
['dirigente', 'noun', 'b'],
['dirigere', 'verb', 'a'],
['diritto', 'noun', 'a'],
['disagio', 'noun', 'b'],
['disastro', 'noun', 'b'],
['disattento', 'adjective', 'c'],
['discarica', 'noun', 'b'],
['discendere', 'verb', 'b'],
['discepolo', 'noun', 'b'],
['discesa', 'noun', 'b'],
['disciplina', 'noun', 'b'],
['disco', 'noun', 'a'],
['discordia', 'noun', 'c'],
['discorso', 'noun', 'a'],
['discoteca', 'noun', 'b'],
['discreto', 'adjective', 'b'],
['discreto', 'noun', 'b'],
['discussione', 'noun', 'a'],
['discusso', 'past_part', 'b'],
['discusso', 'adjective', 'b'],
['discutere', 'verb', 'a'],
['disegnare', 'verb', 'a'],
['disegno', 'noun', 'a'],
['diseredare', 'verb', 'c'],
['disgrazia', 'noun', 'b'],
['disinfettante', 'pres_part', 'c'],
['disinfettante', 'adjective', 'c'],
['disinfettare', 'verb', 'c'],
['disinteresse', 'noun', 'c'],
['disoccupazione', 'noun', 'b'],
['disonesto', 'adjective', 'c'],
['disonesto', 'noun', 'c'],
['disordinato', 'past_part', 'c'],
['disordinato', 'adjective', 'c'],
['disordine', 'noun', 'b'],
['dispari', 'adjective', 'c'],
['dispensa', 'noun', 'c'],
['disperare', 'verb', 'b'],
['disperato', 'past_part', 'b'],
['disperato', 'adjective', 'b'],
['disperazione', 'noun', 'b'],
['disperdere', 'verb', 'b'],
['dispetto', 'noun', 'b'],
['dispettoso', 'adjective', 'c'],
['dispiacere', 'verb', 'a'],
['disponibile', 'adjective', 'a'],
['disponibile', 'noun', 'a'],
['disponibilit', 'noun', 'b'],
['disporre', 'verb', 'a'],
['dispositivo', 'adjective', 'b'],
['dispositivo', 'noun', 'b'],
['disposizione', 'noun', 'a'],
['disprezzo', 'noun', 'b'],
['dissenso', 'noun', 'c'],
['distacco', 'noun', 'b'],
['distante', 'pres_part', 'b'],
['distante', 'adjective', 'b'],
['distante', 'adverb', 'b'],
['distanza', 'noun', 'a'],
['distendere', 'verb', 'b'],
['disteso', 'past_part', 'c'],
['disteso', 'adjective', 'c'],
['disteso', 'noun', 'c'],
['distinguere', 'verb', 'a'],
['distintivo', 'adjective', 'c'],
['distintivo', 'noun', 'c'],
['distinto', 'past_part', 'b'],
['distinto', 'adjective', 'b'],
['distinto', 'noun', 'b'],
['distinzione', 'noun', 'b'],
['distrarre', 'verb', 'b'],
['distratto', 'past_part', 'c'],
['distratto', 'adjective', 'c'],
['distrazione', 'noun', 'c'],
['distretto', 'noun', 'b'],
['distribuire', 'verb', 'a'],
['distributore', 'adjective', 'b'],
['distributore', 'noun', 'b'],
['distribuzione', 'noun', 'b'],
['distruggere', 'verb', 'a'],
['distrutto', 'past_part', 'c'],
['distrutto', 'adjective', 'c'],
['distruzione', 'noun', 'b'],
['disturbare', 'verb', 'b'],
['disturbo', 'noun', 'b'],
['disubbidiente', 'pres_part', 'c'],
['disubbidiente', 'adjective', 'c'],
['disubbidienza', 'noun', 'c'],
['disubbidire', 'verb', 'c'],
['dito', 'noun', 'a'],
['ditta', 'noun', 'b'],
['dittatura', 'noun', 'b'],
['divano', 'noun', 'a'],
['divano-letto', 'noun', 'c'],
['divenire', 'verb', 'a'],
['divenire', 'noun', 'a'],
['diventare', 'verb', 'a'],
['diversamente', 'adverb', 'b'],
['diversit', 'noun', 'b'],
['diverso', 'adjective', 'a'],
['diverso', 'adjective', 'a'],
['diverso', 'pronoun', 'a'],
['divertente', 'pres_part', 'a'],
['divertente', 'adjective', 'a'],
['divertimento', 'noun', 'b'],
['divertire', 'verb', 'a'],
['divertito', 'past_part', 'b'],
['divertito', 'adjective', 'b'],
['dividere', 'verb', 'a'],
['divieto', 'noun', 'b'],
['divinit', 'noun', 'b'],
['divino', 'adjective', 'b'],
['divino', 'noun', 'b'],
['divisa', 'noun', 'b'],
['divisione', 'noun', 'b'],
['divorare', 'verb', 'b'],
['divorziare', 'verb', 'c'],
['divorzio', 'noun', 'b'],
['dizionario', 'noun', 'b'],
['do', 'noun', 'c'],
['doccia', 'noun', 'b'],
['docciaschiuma', 'noun', 'c'],
['docente', 'pres_part', 'b'],
['docente', 'adjective', 'b'],
['docente', 'noun', 'b'],
['docile', 'adjective', 'c'],
['documentare', 'verb', 'b'],
['documentario', 'adjective', 'b'],
['documentario', 'noun', 'b'],
['documentazione', 'noun', 'b'],
['documento', 'noun', 'a'],
['dodici', 'adjective', 'a'],
['dodici', 'noun', 'a'],
['dogana', 'noun', 'c'],
['dolce', 'adjective', 'a'],
['dolce', 'noun', 'a'],
['dolce', 'adverb', 'a'],
['dolcezza', 'noun', 'b'],
['dolcificante', 'pres_part', 'c'],
['dolcificante', 'adjective', 'c'],
['dolcificante', 'noun', 'c'],
['dolciume', 'noun', 'c'],
['dolere', 'verb', 'c'],
['dolersi', 'verb', 'c'],
['dollaro', 'noun', 'a'],
['dolore', 'noun', 'a'],
['doloroso', 'adjective', 'b'],
['domanda', 'noun', 'a'],
['domandare', 'verb', 'a'],
['domani', 'adverb', 'a'],
['domani', 'noun', 'a'],
['domenica', 'noun', 'a'],
['domestica', 'noun', 'c'],
['domestico', 'adjective', 'b'],
['domestico', 'noun', 'b'],
['dominante', 'pres_part', 'b'],
['dominante', 'adjective', 'b'],
['dominante', 'noun', 'b'],
['dominare', 'verb', 'b'],
['dominio', 'noun', 'b'],
['don', 'noun', 'a'],
['donare', 'verb', 'b'],
['dondolare', 'verb', 'c'],
['donna', 'noun', 'a'],
['dono', 'noun', 'b'],
['dopo', 'adverb', 'a'],
['dopo', 'preposition', 'a'],
['dopo', 'conjunction', 'a'],
['dopo', 'adjective', 'a'],
['dopo', 'noun', 'a'],
['dopobarba', 'noun', 'c'],
['doppio', 'adjective', 'a'],
['doppio', 'noun', 'a'],
['doppio', 'adverb', 'a'],
['doppione', 'noun', 'c'],
['dorato', 'past_part', 'b'],
['dorato', 'adjective', 'b'],
['dorato', 'noun', 'b'],
['dormiglione', 'adjective', 'c'],
['dormiglione', 'noun', 'c'],
['dormire', 'verb', 'a'],
['dorso', 'noun', 'b'],
['dose', 'noun', 'b'],
['dotare', 'verb', 'b'],
['dotato', 'past_part', 'b'],
['dotato', 'adjective', 'b'],
['dote', 'noun', 'b'],
['dottore', 'noun', 'a'],
['dottoressa', 'noun', 'b'],
['dottrina', 'noun', 'b'],
['dove', 'adverb', 'a'],
['dove', 'conjunction', 'a'],
['dove', 'noun', 'a'],
['dovere', 'verb', 'a'],
['dovere', 'noun', 'a'],
['dovuto', 'past_part', 'b'],
['dovuto', 'adjective', 'b'],
['dovuto', 'noun', 'b'],
['dozzina', 'noun', 'b'],
['drago', 'noun', 'b'],
['dramma', 'noun', 'b'],
['drammatico', 'adjective', 'b'],
['dritto', 'adjective', 'b'],
['dritto', 'adverb', 'b'],
['dritto', 'noun', 'b'],
['drizzare', 'verb', 'c'],
['droga', 'noun', 'a'],
['drogare', 'verb', 'b'],
['drogato', 'past_part', 'c'],
['drogato', 'adjective', 'c'],
['drogato', 'noun', 'c'],
['dubbio', 'noun', 'a'],
['dubbio', 'adjective', 'b'],
['dubitare', 'verb', 'b'],
['dublinese', 'adjective', 'c'],
['dublinese', 'noun', 'c'],
['due', 'adjective', 'a'],
['due', 'noun', 'a'],
['duecento', 'adjective', 'b'],
['duecento', 'noun', 'b'],
['duello', 'noun', 'b'],
['duemila', 'adjective', 'b'],
['duemila', 'noun', 'b'],
['dunque', 'conjunction', 'a'],
['dunque', 'noun', 'a'],
['duomo', 'noun', 'c'],
['durante', 'pres_part', 'a'],
['durante', 'preposition', 'a'],
['durante', 'noun', 'a'],
['durare', 'verb', 'a'],
['durata', 'noun', 'a'],
['duro', 'adjective', 'a'],
['duro', 'noun', 'a'],
['duro', 'adverb', 'a'],
['e', 'noun', 'c'],
['e', 'conjunction', 'a'],
['ebbene', 'conjunction', 'b'],
['ebraico', 'adjective', 'b'],
['ebraico', 'noun', 'b'],
['ebreo', 'adjective', 'a'],
['ebreo', 'noun', 'a'],
['eccellente', 'pres_part', 'b'],
['eccellente', 'adjective', 'b'],
['eccellenza', 'noun', 'b'],
['eccessivo', 'adjective', 'b'],
['eccesso', 'noun', 'b'],
['eccetera', 'adverb', 'b'],
['eccezionale', 'adjective', 'b'],
['eccezione', 'noun', 'b'],
['eccitare', 'verb', 'b'],
['ecco', 'adverb', 'a'],
['eco', 'noun', 'b'],
['ecologico', 'adjective', 'b'],
['economia', 'noun', 'a'],
['economico', 'adjective', 'a'],
['economico', 'noun', 'a'],
['economista', 'noun', 'b'],
['edicola', 'noun', 'a'],
['edificio', 'noun', 'a'],
['editore', 'noun', 'a'],
['editore', 'adjective', 'a'],
['editoriale', 'adjective', 'b'],
['editoriale', 'noun', 'b'],
['edizione', 'noun', 'a'],
['educare', 'verb', 'b'],
['educativo', 'adjective', 'b'],
['educato', 'past_part', 'c'],
['educato', 'adjective', 'c'],
['educazione', 'noun', 'a'],
['effe', 'noun', 'c'],
['effettivamente', 'adverb', 'a'],
['effettivo', 'adjective', 'b'],
['effettivo', 'noun', 'b'],
['effetto', 'noun', 'a'],
['effettuare', 'verb', 'a'],
['efficace', 'adjective', 'b'],
['efficacia', 'noun', 'b'],
['efficiente', 'adjective', 'b'],
['efficienza', 'noun', 'b'],
['egiziano', 'adjective', 'c'],
['egiziano', 'noun', 'c'],
['egli', 'pronoun', 'a'],
['elaborare', 'verb', 'b'],
['elaborazione', 'noun', 'b'],
['elastico', 'adjective', 'b'],
['elastico', 'noun', 'b'],
['elegante', 'adjective', 'a'],
['eleganza', 'noun', 'b'],
['eleggere', 'verb', 'b'],
['elementare', 'adjective', 'a'],
['elemento', 'noun', 'a'],
['elemosina', 'noun', 'c'],
['elencare', 'verb', 'b'],
['elenco', 'noun', 'a'],
['elettorale', 'adjective', 'b'],
['elettore', 'noun', 'b'],
['elettricista', 'noun', 'c'],
['elettricit', 'noun', 'c'],
['elettrico', 'adjective', 'a'],
['elettrico', 'noun', 'a'],
['elettrodomestico', 'noun', 'c'],
['elettromagnetico', 'adjective', 'b'],
['elettrone', 'noun', 'b'],
['elettronico', 'adjective', 'a'],
['elevare', 'verb', 'b'],
['elevato', 'past_part', 'b'],
['elevato', 'adjective', 'b'],
['elezione', 'noun', 'b'],
['elica', 'noun', 'c'],
['elicottero', 'noun', 'c'],
['eliminare', 'verb', 'a'],
['eliminazione', 'noun', 'b'],
['elle', 'noun', 'c'],
['elmo', 'noun', 'c'],
['e-mail', 'noun', 'a'],
['emanare', 'verb', 'b'],
['emergenza', 'noun', 'b'],
['emergere', 'verb', 'a'],
['emettere', 'verb', 'b'],
['emigrazione', 'noun', 'c'],
['emiliano', 'adjective', 'c'],
['emiliano', 'noun', 'c'],
['emissione', 'noun', 'b'],
['emme', 'noun', 'c'],
['emmenthal', 'noun', 'c'],
['emo', 'noun', 'b'],
['emotivo', 'adjective', 'b'],
['emotivo', 'noun', 'b'],
['emozionante', 'pres_part', 'c'],
['emozionante', 'adjective', 'c'],
['emozionare', 'verb', 'b'],
['emozionato', 'past_part', 'c'],
['emozionato', 'adjective', 'c'],
['emozione', 'noun', 'a'],
['enciclopedia', 'noun', 'c'],
['energetico', 'adjective', 'b'],
['energetico', 'noun', 'b'],
['energia', 'noun', 'a'],
['enne', 'noun', 'c'],
['ennesimo', 'adjective', 'b'],
['enorme', 'adjective', 'a'],
['ente', 'noun', 'a'],
['entit', 'noun', 'b'],
['entrambi', 'pronoun', 'a'],
['entrambi', 'adjective', 'a'],
['entrare', 'verb', 'a'],
['entrare', 'noun', 'a'],
['entrata', 'noun', 'a'],
['entro', 'preposition', 'a'],
['entro', 'adverb', 'a'],
['entusiasmo', 'noun', 'b'],
['entusiasta', 'adjective', 'b'],
['entusiasta', 'noun', 'b'],
['epifania', 'noun', 'c'],
['episodio', 'noun', 'a'],
['epoca', 'noun', 'a'],
['eppure', 'conjunction', 'a'],
['equazione', 'noun', 'b'],
['equilibrio', 'noun', 'a'],
['equino', 'adjective', 'c'],
['equino', 'noun', 'c'],
['equipaggio', 'noun', 'c'],
['equivalere', 'verb', 'b'],
['equivoco', 'adjective', 'b'],
['equivoco', 'noun', 'b'],
['era', 'noun', 'a'],
['erba', 'noun', 'b'],
['erede', 'noun', 'b'],
['eredit', 'noun', 'b'],
['ereditare', 'verb', 'b'],
['ergastolo', 'noun', 'c'],
['ergere', 'verb', 'b'],
['ernia', 'noun', 'c'],
['eroe', 'noun', 'a'],
['eroina', 'noun', 'c'],
['erotico', 'adjective', 'b'],
['erotico', 'noun', 'b'],
['errare', 'verb', 'b'],
['erre', 'noun', 'c'],
['errore', 'noun', 'a'],
['esagerare', 'verb', 'b'],
['esagerato', 'past_part', 'b'],
['esagerato', 'adjective', 'b'],
['esagerato', 'noun', 'b'],
['esagerazione', 'noun', 'c'],
['esagono', 'noun', 'c'],
['esagono', 'adjective', 'c'],
['esaltare', 'verb', 'b'],
['esaltazione', 'noun', 'c'],
['esame', 'noun', 'a'],
['esaminare', 'verb', 'b'],
['esattamente', 'adverb', 'a'],
['esatto', 'adjective', 'a'],
['esatto', 'adverb', 'a'],
['esaurire', 'verb', 'b'],
['esca', 'noun', 'c'],
['eschimese', 'adjective', 'c'],
['eschimese', 'noun', 'c'],
['esclamare', 'verb', 'b'],
['esclamazione', 'noun', 'c'],
['escludere', 'verb', 'a'],
['esclusione', 'noun', 'b'],
['esclusivamente', 'adverb', 'b'],
['esclusivo', 'adjective', 'b'],
['escluso', 'past_part', 'b'],
['escluso', 'adjective', 'b'],
['escluso', 'noun', 'b'],
['esecutivo', 'adjective', 'b'],
['esecutivo', 'noun', 'b'],
['esecuzione', 'noun', 'b'],
['eseguire', 'verb', 'a'],
['esempio', 'noun', 'a'],
['esemplare', 'noun', 'b'],
['esemplare', 'adjective', 'b'],
['esercitare', 'verb', 'b'],
['esercito', 'noun', 'a'],
['esercizio', 'noun', 'a'],
['esibire', 'verb', 'b'],
['esigenza', 'noun', 'a'],
['esigere', 'verb', 'b'],
['esilio', 'noun', 'c'],
['esistente', 'pres_part', 'b'],
['esistente', 'adjective', 'b'],
['esistente', 'noun', 'b'],
['esistenza', 'noun', 'a'],
['esistere', 'verb', 'a'],
['esitare', 'verb', 'b'],
['esito', 'noun', 'b'],
['esordio', 'noun', 'b'],
['espansione', 'noun', 'b'],
['espellere', 'verb', 'b'],
['esperienza', 'noun', 'a'],
['esperimento', 'noun', 'b'],
['esperto', 'past_part', 'a'],
['esperto', 'adjective', 'a'],
['esperto', 'noun', 'a'],
['esplicito', 'adjective', 'b'],
['esplodere', 'verb', 'b'],
['esplorare', 'verb', 'b'],
['esplosione', 'noun', 'b'],
['esplosivo', 'adjective', 'b'],
['esplosivo', 'noun', 'b'],
['esponente', 'pres_part', 'b'],
['esponente', 'noun', 'b'],
['esporre', 'verb', 'a'],
['esposizione', 'noun', 'b'],
['espressione', 'noun', 'a'],
['espresso', 'past_part', 'c'],
['espresso', 'adjective', 'c'],
['espresso', 'noun', 'c'],
['esprimere', 'verb', 'a'],
['essa', 'pronoun', 'a'],
['esse', 'noun', 'c'],
['esse', 'pronoun', 'b'],
['essenza', 'noun', 'b'],
['essenziale', 'adjective', 'b'],
['essenziale', 'noun', 'b'],
['essenzialmente', 'adverb', 'b'],
['essere', 'verb', 'a'],
['essere', 'noun', 'a'],
['essi', 'pronoun', 'a'],
['esso', 'pronoun', 'a'],
['est', 'noun', 'b'],
['est', 'adjective', 'b'],
['estate', 'noun', 'a'],
['estendere', 'verb', 'b'],
['estensione', 'noun', 'b'],
['esterno', 'adjective', 'a'],
['esterno', 'noun', 'a'],
['estero', 'adjective', 'a'],
['estero', 'noun', 'a'],
['estetico', 'adjective', 'b'],
['estivo', 'adjective', 'b'],
['estone', 'adjective', 'c'],
['estone', 'noun', 'c'],
['estraneo', 'adjective', 'b'],
['estraneo', 'noun', 'b'],
['estrarre', 'verb', 'b'],
['estratto', 'past_part', 'b'],
['estratto', 'adjective', 'b'],
['estratto', 'noun', 'b'],
['estrazione', 'noun', 'b'],
['estremamente', 'adverb', 'b'],
['estremit', 'noun', 'b'],
['estremo', 'adjective', 'a'],
['estremo', 'noun', 'a'],
['et', 'noun', 'a'],
['eterno', 'adjective', 'b'],
['eterno', 'noun', 'b'],
['etica', 'noun', 'b'],
['etichetta', 'noun', 'b'],
['etico', 'adjective', 'b'],
['ettaro', 'noun', 'c'],
['etto', 'noun', 'c'],
['euro', 'noun', 'a'],
['europeo', 'adjective', 'a'],
['europeo', 'noun', 'a'],
['evadere', 'verb', 'c'],
['evaporare', 'verb', 'c'],
['evasione', 'noun', 'b'],
['evento', 'noun', 'a'],
['eventuale', 'adjective', 'a'],
['eventualmente', 'adverb', 'b'],
['evidente', 'adjective', 'a'],
['evidentemente', 'adverb', 'a'],
['evidenza', 'noun', 'b'],
['evidenziare', 'verb', 'b'],
['evidenziatore', 'adjective', 'c'],
['evidenziatore', 'noun', 'c'],
['evitare', 'verb', 'a'],
['evocare', 'verb', 'b'],
['evoluzione', 'noun', 'b'],
['ex', 'adjective', 'a'],
['ex', 'noun', 'a'],
['ex', 'preposition', 'a'],
['extra', 'adjective', 'b'],
['extra', 'noun', 'b'],
['fa', 'adverb', 'a'],
['fabbrica', 'noun', 'a'],
['fabbricare', 'verb', 'b'],
['fabbro', 'noun', 'c'],
['faccenda', 'noun', 'b'],
['faccia', 'noun', 'a'],
['facciata', 'noun', 'b'],
['facile', 'adjective', 'a'],
['facile', 'adverb', 'a'],
['facilit', 'noun', 'b'],
['facilitare', 'verb', 'b'],
['facilitazione', 'noun', 'c'],
['facilmente', 'adverb', 'a'],
['facolt', 'noun', 'b'],
['fagiano', 'noun', 'c'],
['falco', 'noun', 'c'],
['falegname', 'noun', 'c'],
['fallimento', 'noun', 'b'],
['fallire', 'verb', 'b'],
['fallito', 'past_part', 'b'],
['fallito', 'adjective', 'b'],
['fallito', 'noun', 'b'],
['falso', 'adjective', 'a'],
['falso', 'adverb', 'a'],
['falso', 'noun', 'a'],
['fama', 'noun', 'b'],
['fame', 'noun', 'a'],
['famiglia', 'noun', 'a'],
['familiare', 'adjective', 'a'],
['familiare', 'noun', 'a'],
['famoso', 'adjective', 'a'],
['fan', 'noun', 'b'],
['fanale', 'noun', 'c'],
['fanciulla', 'noun', 'b'],
['fanciullo', 'adjective', 'c'],
['fanciullo', 'noun', 'c'],
['fango', 'noun', 'b'],
['fangoso', 'adjective', 'c'],
['fantascienza', 'noun', 'b'],
['fantasia', 'noun', 'a'],
['fantasma', 'noun', 'b'],
['fantastico', 'adjective', 'a'],
['fantastico', 'noun', 'a'],
['fanteria', 'noun', 'c'],
['fantino', 'noun', 'c'],
['fantoccio', 'noun', 'c'],
['fare', 'verb', 'a'],
['fare', 'noun', 'a'],
['farfalla', 'noun', 'b'],
['farina', 'noun', 'b'],
['farmacia', 'noun', 'b'],
['farmaco', 'noun', 'b'],
['faro', 'noun', 'c'],
['fascia', 'noun', 'a'],
['fasciatoio', 'noun', 'c'],
['fascicolo', 'noun', 'b'],
['fascino', 'noun', 'b'],
['fascio', 'noun', 'b'],
['fascismo', 'noun', 'b'],
['fascista', 'adjective', 'b'],
['fascista', 'noun', 'b'],
['fase', 'noun', 'a'],
['fastidio', 'noun', 'a'],
['fastidioso', 'adjective', 'b'],
['fata', 'noun', 'b'],
['fatica', 'noun', 'a'],
['faticare', 'verb', 'b'],
['faticoso', 'adjective', 'b'],
['fatto', 'noun', 'a'],
['fattore', 'noun', 'a'],
['fattoria', 'noun', 'b'],
['fattura', 'noun', 'b'],
['fatturato', 'past_part', 'b'],
['fatturato', 'adjective', 'b'],
['fatturato', 'noun', 'b'],
['fauna', 'noun', 'c'],
['fava', 'noun', 'c'],
['favola', 'noun', 'b'],
['favoloso', 'adjective', 'b'],
['favore', 'noun', 'a'],
['favorevole', 'adjective', 'b'],
['favorire', 'verb', 'b'],
['fax', 'noun', 'b'],
['fazzoletto', 'noun', 'b'],
['febbraio', 'noun', 'a'],
['febbre', 'noun', 'b'],
['fecondare', 'verb', 'c'],
['fede', 'noun', 'a'],
['fedele', 'adjective', 'b'],
['fedele', 'noun', 'b'],
['fedelt', 'noun', 'b'],
['federa', 'noun', 'c'],
['federale', 'adjective', 'b'],
['federale', 'noun', 'b'],
['fegato', 'noun', 'b'],
['felice', 'adjective', 'a'],
['felicit', 'noun', 'b'],
['felino', 'noun', 'c'],
['felino', 'adjective', 'c'],
['felpa', 'noun', 'c'],
['femmina', 'noun', 'a'],
['femminile', 'adjective', 'a'],
['femminile', 'noun', 'a'],
['fenomeno', 'noun', 'a'],
['feria', 'noun', 'b'],
['feriale', 'adjective', 'c'],
['ferie', 'noun', 'c'],
['ferire', 'verb', 'b'],
['ferita', 'noun', 'a'],
['ferito', 'past_part', 'b'],
['ferito', 'adjective', 'b'],
['ferito', 'noun', 'b'],
['fermaglio', 'noun', 'c'],
['fermare', 'verb', 'a'],
['fermo', 'adjective', 'a'],
['feroce', 'adjective', 'b'],
['ferragosto', 'noun', 'c'],
['ferramenta', 'noun', 'c'],
['ferro', 'noun', 'a'],
['ferrovia', 'noun', 'b'],
['ferroviario', 'adjective', 'b'],
['ferroviere', 'noun', 'c'],
['fertilizzante', 'pres_part', 'c'],
['fertilizzante', 'adjective', 'c'],
['fertilizzante', 'noun', 'c'],
['fessura', 'noun', 'c'],
['festa', 'noun', 'a'],
['festeggiare', 'verb', 'a'],
['festival', 'noun', 'b'],
['festivo', 'adjective', 'c'],
['fetta', 'noun', 'b'],
['fiaba', 'noun', 'b'],
['fiala', 'noun', 'c'],
['fiamma', 'noun', 'b'],
['fiammifero', 'noun', 'c'],
['fiammifero', 'adjective', 'c'],
['fianco', 'noun', 'a'],
['fiatare', 'verb', 'c'],
['fiato', 'noun', 'b'],
['fibbia', 'noun', 'c'],
['fibra', 'noun', 'b'],
['ficcare', 'verb', 'b'],
['fiction', 'noun', 'b'],
['fidanzamento', 'noun', 'c'],
['fidanzarsi', 'verb', 'b'],
['fidanzata', 'noun', 'b'],
['fidanzato', 'past_part', 'b'],
['fidanzato', 'adjective', 'b'],
['fidanzato', 'noun', 'b'],
['fidarsi', 'verb', 'a'],
['fiducia', 'noun', 'a'],
['fiducioso', 'adjective', 'c'],
['fieno', 'noun', 'c'],
['fiera', 'noun', 'b'],
['fiero', 'adjective', 'b'],
['figlia', 'noun', 'a'],
['figliastro', 'noun', 'c'],
['figlio', 'noun', 'a'],
['figura', 'noun', 'a'],
['figurare', 'verb', 'a'],
['figurina', 'noun', 'c'],
['fila', 'noun', 'a'],
['filante', 'pres_part', 'c'],
['filante', 'adjective', 'c'],
['filante', 'noun', 'c'],
['filare', 'verb', 'b'],
['filastrocca', 'noun', 'c'],
['file', 'noun', 'a'],
['filetto', 'noun', 'c'],
['film', 'noun', 'a'],
['filmato', 'past_part', 'b'],
['filmato', 'adjective', 'b'],
['filmato', 'noun', 'b'],
['filo', 'noun', 'a'],
['filosofia', 'noun', 'a'],
['filosofico', 'adjective', 'b'],
['filosofo', 'noun', 'b'],
['filtrare', 'verb', 'b'],
['filtro', 'noun', 'b'],
['finale', 'adjective', 'a'],
['finale', 'noun', 'a'],
['finalit', 'noun', 'b'],
['finalmente', 'adverb', 'a'],
['finanza', 'noun', 'b'],
['finanziamento', 'noun', 'b'],
['finanziare', 'verb', 'b'],
['finanziario', 'adjective', 'a'],
['finanziatore', 'adjective', 'c'],
['finanziatore', 'noun', 'c'],
['finch', 'conjunction', 'a'],
['fine', 'noun', 'a'],
['fine', 'adjective', 'b'],
['finestra', 'noun', 'a'],
['finestrino', 'noun', 'b'],
['fingere', 'verb', 'a'],
['finimondo', 'noun', 'c'],
['finire', 'verb', 'a'],
['finire', 'noun', 'a'],
['finito', 'past_part', 'b'],
['finito', 'adjective', 'b'],
['finlandese', 'adjective', 'c'],
['finlandese', 'noun', 'c'],
['fino', 'preposition', 'a'],
['fino', 'adverb', 'a'],
['finocchio', 'noun', 'c'],
['finora', 'adverb', 'b'],
['finta', 'noun', 'b'],
['finto', 'past_part', 'a'],
['finto', 'adjective', 'a'],
['fiocco', 'noun', 'c'],
['fionda', 'noun', 'c'],
['fioraio', 'noun', 'c'],
['fiore', 'noun', 'a'],
['fiorentino', 'adjective', 'b'],
['fiorentino', 'noun', 'b'],
['fiorito', 'past_part', 'c'],
['fiorito', 'adjective', 'c'],
['firma', 'noun', 'a'],
['firmare', 'verb', 'a'],
['fiscale', 'adjective', 'b'],
['fiscale', 'noun', 'b'],
['fisicamente', 'adverb', 'b'],
['fisico', 'adjective', 'a'],
['fisico', 'noun', 'a'],
['fissare', 'verb', 'a'],
['fisso', 'adjective', 'a'],
['fisso', 'adverb', 'a'],
['fisso', 'noun', 'a'],
['fitto', 'past_part', 'b'],
['fitto', 'adjective', 'b'],
['fitto', 'adverb', 'b'],
['fitto', 'noun', 'b'],
['fiume', 'noun', 'a'],
['fiuto', 'noun', 'c'],
['flash', 'noun', 'b'],
['flauto', 'noun', 'c'],
['flessibile', 'adjective', 'b'],
['flessibile', 'noun', 'b'],
['flora', 'noun', 'c'],
['fluido', 'adjective', 'b'],
['fluido', 'noun', 'b'],
['fluoro', 'noun', 'c'],
['flusso', 'noun', 'b'],
['foca', 'noun', 'c'],
['focaccia', 'noun', 'c'],
['fodera', 'noun', 'c'],
['foderare', 'verb', 'c'],
['foglia', 'noun', 'b'],
['foglio', 'noun', 'a'],
['fogna', 'noun', 'c'],
['folla', 'noun', 'b'],
['folle', 'adjective', 'b'],
['folle', 'noun', 'b'],
['follia', 'noun', 'b'],
['fondamentale', 'adjective', 'a'],
['fondamentale', 'noun', 'a'],
['fondamentalmente', 'adverb', 'b'],
['fondamento', 'noun', 'b'],
['fondare', 'verb', 'a'],
['fondatore', 'noun', 'b'],
['fondazione', 'noun', 'b'],
['fondere', 'verb', 'b'],
['fondo', 'adjective', 'loc-comando'],
['fondo', 'noun', 'loc-comando'],
['fondo', 'adverb', 'loc-comando'],
['fontana', 'noun', 'b'],
['fontanella', 'noun', 'c'],
['fonte', 'noun', 'a'],
['forare', 'verb', 'b'],
['forbice', 'noun', 'c'],
['forchetta', 'noun', 'c'],
['forcina', 'noun', 'c'],
['foresta', 'noun', 'b'],
['forestale', 'adjective', 'c'],
['forestale', 'noun', 'c'],
['forfora', 'noun', 'c'],
['forma', 'noun', 'a'],
['formaggino', 'noun', 'c'],
['formaggio', 'noun', 'b'],
['formale', 'adjective', 'b'],
['formare', 'verb', 'a'],
['formato', 'past_part', 'b'],
['formato', 'adjective', 'b'],
['formato', 'noun', 'b'],
['formazione', 'noun', 'a'],
['formula', 'noun', 'a'],
['formulare', 'verb', 'b'],
['fornace', 'noun', 'c'],
['fornaio', 'noun', 'c'],
['fornello', 'noun', 'b'],
['fornire', 'verb', 'a'],
['fornitore', 'adjective', 'b'],
['fornitore', 'noun', 'b'],
['forno', 'noun', 'b'],
['foro', 'noun', 'b'],
['forse', 'adverb', 'a'],
['forse', 'noun', 'a'],
['forte', 'adjective', 'a'],
['forte', 'adverb', 'a'],
['forte', 'noun', 'a'],
['fortemente', 'adverb', 'b'],
['fortuna', 'noun', 'a'],
['fortunatamente', 'adverb', 'b'],
['fortunato', 'adjective', 'b'],
['forum', 'noun', 'b'],
['forza', 'noun', 'a'],
['forzare', 'verb', 'b'],
['fosforescente', 'adjective', 'c'],
['fossa', 'noun', 'b'],
['fossetta', 'noun', 'c'],
['fosso', 'noun', 'c'],
['foto', 'noun', 'a'],
['fotografare', 'verb', 'b'],
['fotografia', 'noun', 'a'],
['fotografico', 'adjective', 'b'],
['fotografo', 'noun', 'b'],
['fottere', 'verb', 'b'],
['foulard', 'noun', 'c'],
['fra', 'preposition', 'a'],
['fracasso', 'noun', 'c'],
['fragile', 'adjective', 'b'],
['frammento', 'noun', 'b'],
['francamente', 'adverb', 'b'],
['francese', 'adjective', 'a'],
['francese', 'noun', 'a'],
['francobollo', 'noun', 'c'],
['frangia', 'noun', 'c'],
['frase', 'noun', 'a'],
['fratello', 'noun', 'a'],
['frazione', 'noun', 'b'],
['freccia', 'noun', 'b'],
['freddezza', 'noun', 'c'],
['freddo', 'adjective', 'a'],
['freddo', 'noun', 'a'],
['fregare', 'verb', 'a'],
['frenare', 'verb', 'b'],
['frenetico', 'adjective', 'b'],
['freno', 'noun', 'b'],
['frequentare', 'verb', 'a'],
['frequente', 'adjective', 'b'],
['frequenza', 'noun', 'b'],
['fresco', 'adjective', 'a'],
['fresco', 'noun', 'a'],
['fretta', 'noun', 'a'],
['frigo', 'noun', 'b'],
['frigorifero', 'adjective', 'b'],
['frigorifero', 'noun', 'b'],
['fringuello', 'noun', 'c'],
['frittata', 'noun', 'c'],
['fritto', 'past_part', 'c'],
['fritto', 'adjective', 'c'],
['fritto', 'noun', 'c'],
['friulano', 'adjective', 'c'],
['friulano', 'noun', 'c'],
['fronte', 'noun', 'a'],
['frontiera', 'noun', 'b'],
['frugare', 'verb', 'b'],
['frumento', 'noun', 'c'],
['fruscio', 'noun', 'c'],
['frusta', 'noun', 'c'],
['frutta', 'noun', 'b'],
['fruttivendolo', 'noun', 'c'],
['frutto', 'noun', 'a'],
['fucile', 'noun', 'b'],
['fuga', 'noun', 'a'],
['fuggire', 'verb', 'a'],
['fulmine', 'noun', 'b'],
['fumare', 'verb', 'a'],
['fumetto', 'noun', 'b'],
['fumo', 'noun', 'a'],
['fumo', 'adjective', 'a'],
['fune', 'noun', 'c'],
['funerale', 'noun', 'b'],
['funerale', 'adjective', 'b'],
['fungo', 'noun', 'b'],
['funzionale', 'adjective', 'b'],
['funzionale', 'noun', 'b'],
['funzionamento', 'noun', 'b'],
['funzionare', 'verb', 'a'],
['funzionario', 'noun', 'b'],
['funzione', 'noun', 'a'],
['fuoco', 'noun', 'loc-comando'],
['fuori', 'adverb', 'a'],
['fuori', 'preposition', 'a'],
['fuori', 'noun', 'a'],
['fuori', 'adjective', 'a'],
['furbo', 'adjective', 'b'],
['furbo', 'noun', 'b'],
['furfante', 'noun', 'c'],
['furgone', 'noun', 'b'],
['furia', 'noun', 'b'],
['furioso', 'adjective', 'b'],
['furto', 'noun', 'b'],
['fusione', 'noun', 'b'],
['fuso', 'past_part', 'b'],
['fuso', 'adjective', 'b'],
['fuso', 'noun', 'b'],
['futuro', 'adjective', 'a'],
['futuro', 'noun', 'a'],
['gabbia', 'noun', 'b'],
['galassia', 'noun', 'b'],
['galeotto', 'noun', 'c'],
['galera', 'noun', 'b'],
['galleggiare', 'verb', 'c'],
['galleria', 'noun', 'b'],
['gallese', 'adjective', 'c'],
['gallese', 'noun', 'c'],
['galletta', 'noun', 'c'],
['gallina', 'noun', 'b'],
['gallo', 'noun', 'c'],
['gamba', 'noun', 'a'],
['gambero', 'noun', 'c'],
['gambo', 'noun', 'c'],
['ganascia', 'noun', 'c'],
['gancio', 'noun', 'c'],
['gara', 'noun', 'a'],
['garage', 'noun', 'b'],
['garantire', 'verb', 'a'],
['garanzia', 'noun', 'b'],
['garbo', 'noun', 'c'],
['gargarismo', 'noun', 'c'],
['garofano', 'noun', 'c'],
['garza', 'noun', 'c'],
['gas', 'noun', 'a'],
['gasolio', 'noun', 'c'],
['gassosa', 'noun', 'c'],
['gastronomia', 'noun', 'c'],
['gatto', 'noun', 'a'],
['gavetta', 'noun', 'c'],
['gay', 'adjective', 'b'],
['gay', 'noun', 'b'],
['gazza', 'noun', 'c'],
['gelateria', 'noun', 'c'],
['gelatina', 'noun', 'c'],
['gelato', 'past_part', 'b'],
['gelato', 'adjective', 'b'],
['gelato', 'noun', 'b'],
['gelido', 'adjective', 'b'],
['gelo', 'noun', 'c'],
['gelosia', 'noun', 'b'],
['geloso', 'adjective', 'b'],
['gelsomino', 'noun', 'c'],
['gemello', 'adjective', 'b'],
['gemello', 'noun', 'b'],
['gemma', 'noun', 'c'],
['gene', 'noun', 'b'],
['generale', 'adjective', 'a'],
['generale', 'noun', 'a'],
['generalmente', 'adverb', 'b'],
['generare', 'verb', 'a'],
['generazione', 'noun', 'a'],
['genere', 'noun', 'a'],
['generico', 'adjective', 'b'],
['generico', 'noun', 'b'],
['generosit', 'noun', 'c'],
['generoso', 'adjective', 'b'],
['genetico', 'adjective', 'b'],
['gengiva', 'noun', 'c'],
['geniale', 'adjective', 'b'],
['genio', 'noun', 'b'],
['genitore', 'noun', 'a'],
['gennaio', 'noun', 'a'],
['genovese', 'adjective', 'c'],
['genovese', 'noun', 'c'],
['gente', 'noun', 'a'],
['gentile', 'adjective', 'a'],
['gentile', 'noun', 'a'],
['genuino', 'adjective', 'c'],
['geografico', 'adjective', 'b'],
['geografo', 'noun', 'c'],
['geometra', 'noun', 'c'],
['geometria', 'noun', 'c'],
['geometrico', 'adjective', 'c'],
['gesso', 'noun', 'b'],
['gestione', 'noun', 'a'],
['gestire', 'verb', 'a'],
['gesto', 'noun', 'a'],
['gestore', 'noun', 'b'],
['gettare', 'verb', 'a'],
['gettone', 'noun', 'c'],
['ghiaccio', 'noun', 'b'],
['ghiacciolo', 'noun', 'c'],
['ghianda', 'noun', 'c'],
['ghiro', 'noun', 'c'],
['gi', 'noun', 'c'],
['gi', 'adverb', 'a'],
['giacca', 'noun', 'a'],
['giacere', 'verb', 'b'],
['giaguaro', 'noun', 'c'],
['giallo', 'adjective', 'a'],
['giallo', 'noun', 'a'],
['giapponese', 'adjective', 'a'],
['giapponese', 'noun', 'a'],
['giardinaggio', 'noun', 'c'],
['giardiniera', 'noun', 'c'],
['giardino', 'noun', 'a'],
['gigante', 'noun', 'b'],
['gigante', 'adjective', 'b'],
['gigantesco', 'adjective', 'b'],
['giglio', 'noun', 'b'],
['ginnastica', 'noun', 'b'],
['ginocchio', 'noun', 'a'],
['giocare', 'verb', 'a'],
['giocatore', 'noun', 'a'],
['giocattolo', 'noun', 'b'],
['gioco', 'noun', 'a'],
['gioia', 'noun', 'a'],
['gioiello', 'noun', 'b'],
['gioioso', 'adjective', 'c'],
['giordano', 'adjective', 'c'],
['giordano', 'noun', 'c'],
['giornale', 'noun', 'a'],
['giornale', 'adjective', 'a'],
['giornalino', 'noun', 'c'],
['giornalista', 'noun', 'a'],
['giornata', 'noun', 'a'],
['giorno', 'noun', 'a'],
['giostra', 'noun', 'c'],
['giovane', 'adjective', 'a'],
['giovane', 'noun', 'a'],
['giovanile', 'adjective', 'b'],
['gioved', 'noun', 'b'],
['giovent', 'noun', 'b'],
['giovinezza', 'noun', 'b'],
['giraffa', 'noun', 'c'],
['girare', 'verb', 'a'],
['giravite', 'noun', 'c'],
['giretto', 'noun', 'c'],
['giro', 'noun', 'a'],
['gironzolare', 'verb', 'c'],
['girotondo', 'noun', 'c'],
['gita', 'noun', 'b'],
['gi', 'adverb', 'a'],
['gi', 'adjective', 'a'],
['giubba', 'noun', 'c'],
['giubbotto', 'noun', 'c'],
['giudicare', 'verb', 'a'],
['giudice', 'noun', 'a'],
['giudiziario', 'adjective', 'b'],
['giudizio', 'noun', 'a'],
['giugno', 'noun', 'a'],
['giungere', 'verb', 'a'],
['giungla', 'noun', 'c'],
['giuramento', 'noun', 'b'],
['giurare', 'verb', 'a'],
['giuria', 'noun', 'c'],
['giuridico', 'adjective', 'b'],
['giustamente', 'adverb', 'b'],
['giustificare', 'verb', 'b'],
['giustizia', 'noun', 'a'],
['giusto', 'adjective', 'a'],
['giusto', 'noun', 'a'],
['giusto', 'adverb', 'a'],
['gli', 'pronoun', 'a'],
['glicine', 'noun', 'c'],
['global', 'adjective', 'b'],
['global', 'noun', 'b'],
['globale', 'adjective', 'b'],
['gloria', 'noun', 'b'],
['gnocco', 'noun', 'c'],
['gnomo', 'noun', 'c'],
['goal', 'noun', 'b'],
['gobbo', 'adjective', 'c'],
['gobbo', 'noun', 'c'],
['goccia', 'noun', 'b'],
['godere', 'verb', 'a'],
['gola', 'noun', 'b'],
['goloso', 'adjective', 'c'],
['gomito', 'noun', 'b'],
['gomitolo', 'noun', 'c'],
['gomma', 'noun', 'b'],
['gonfiare', 'verb', 'b'],
['gonfio', 'adjective', 'b'],
['gonfio', 'noun', 'b'],
['gonna', 'noun', 'b'],
['gorgonzola', 'noun', 'c'],
['gorilla', 'noun', 'c'],
['gossip', 'noun', 'b'],
['governare', 'verb', 'b'],
['governatore', 'noun', 'b'],
['governo', 'noun', 'a'],
['gradino', 'noun', 'b'],
['gradire', 'verb', 'b'],
['grado', 'noun', 'a'],
['graffiare', 'verb', 'c'],
['graffio', 'noun', 'c'],
['grafico', 'adjective', 'b'],
['grafico', 'noun', 'b'],
['grammatica', 'noun', 'b'],
['grammo', 'noun', 'b'],
['grana', 'noun', 'c'],
['granaio', 'noun', 'c'],
['granchio', 'noun', 'c'],
['grande', 'adjective', 'a'],
['grande', 'noun', 'a'],
['grandezza', 'noun', 'b'],
['grandine', 'noun', 'c'],
['grandioso', 'adjective', 'b'],
['grano', 'noun', 'b'],
['granturco', 'noun', 'c'],
['grappa', 'noun', 'c'],
['grasso', 'adjective', 'a'],
['grasso', 'noun', 'a'],
['gratis', 'adverb', 'b'],
['gratis', 'adjective', 'b'],
['grattare', 'verb', 'b'],
['grattugiato', 'past_part', 'c'],
['grattugiato', 'adjective', 'c'],
['gratuito', 'adjective', 'b'],
['grave', 'adjective', 'a'],
['grave', 'noun', 'a'],
['grave', 'adverb', 'a'],
['gravidanza', 'noun', 'b'],
['gravit', 'noun', 'b'],
['grazie', 'exclamation', 'a'],
['grazie', 'noun', 'a'],
['grazioso', 'adjective', 'c'],
['greco', 'adjective', 'a'],
['greco', 'noun', 'a'],
['grembiule', 'noun', 'c'],
['gridare', 'verb', 'a'],
['grido', 'noun', 'b'],
['grigio', 'adjective', 'a'],
['grigio', 'noun', 'a'],
['griglia', 'noun', 'c'],
['grinza', 'noun', 'c'],
['grissino', 'noun', 'c'],
['grossista', 'noun', 'c'],
['grosso', 'adjective', 'a'],
['grosso', 'noun', 'a'],
['grotta', 'noun', 'b'],
['gru', 'noun', 'c'],
['gruppo', 'noun', 'a'],
['guadagnare', 'verb', 'a'],
['guadagno', 'noun', 'b'],
['guaio', 'noun', 'b'],
['guaire', 'verb', 'c'],
['guancia', 'noun', 'b'],
['guanciale', 'noun', 'c'],
['guanciale', 'adjective', 'c'],
['guanto', 'noun', 'b'],
['guardare', 'verb', 'a'],
['guardaroba', 'noun', 'c'],
['guardia', 'noun', 'a'],
['guarire', 'verb', 'b'],
['guarnizione', 'noun', 'c'],
['guasto', 'noun', 'c'],
['guerra', 'noun', 'a'],
['guerriero', 'noun', 'b'],
['guerriero', 'adjective', 'b'],
['gufo', 'noun', 'c'],
['guida', 'noun', 'a'],
['guidare', 'verb', 'a'],
['guidatore', 'noun', 'c'],
['guinzaglio', 'noun', 'c'],
['gustare', 'verb', 'b'],
['gusto', 'noun', 'a'],
['gustoso', 'adjective', 'c'],
['hamburger', 'noun', 'c'],
['hobby', 'noun', 'b'],
['home', 'noun', 'b'],
['hotel', 'noun', 'b'],
['hyperlink', 'noun', 'b'],
['i', 'noun', 'c'],
['i', 'determiner', 'b'],
['icona', 'noun', 'b'],
['ics', 'noun', 'c'],
['idea', 'noun', 'a'],
['ideale', 'adjective', 'a'],
['ideale', 'noun', 'a'],
['ideare', 'verb', 'b'],
['identico', 'adjective', 'b'],
['identico', 'noun', 'b'],
['identificare', 'verb', 'a'],
['identificazione', 'noun', 'b'],
['identit', 'noun', 'a'],
['ideologia', 'noun', 'b'],
['ideologico', 'adjective', 'b'],
['idiota', 'adjective', 'a'],
['idiota', 'noun', 'a'],
['idraulico', 'adjective', 'b'],
['idraulico', 'noun', 'b'],
['idrico', 'adjective', 'b'],
['idrogeno', 'noun', 'b'],
['ieri', 'adverb', 'a'],
['ieri', 'noun', 'a'],
['igiene', 'noun', 'c'],
['ignorante', 'pres_part', 'b'],
['ignorante', 'adjective', 'b'],
['ignorante', 'noun', 'b'],
['ignoranza', 'noun', 'b'],
['ignorare', 'verb', 'a'],
['ignoto', 'adjective', 'b'],
['ignoto', 'noun', 'b'],
['il', 'determiner', 'a'],
['il', 'pronoun', 'a'],
['illecito', 'adjective', 'b'],
['illecito', 'noun', 'b'],
['illegale', 'adjective', 'b'],
['illegale', 'noun', 'b'],
['illegittimo', 'adjective', 'c'],
['illegittimo', 'noun', 'c'],
['illudere', 'verb', 'b'],
['illuminare', 'verb', 'b'],
['illuminato', 'past_part', 'b'],
['illuminato', 'adjective', 'b'],
['illuminato', 'noun', 'b'],
['illusione', 'noun', 'b'],
['illustrare', 'verb', 'b'],
['illustre', 'adjective', 'b'],
['imballare', 'verb', 'c'],
['imbarazzante', 'pres_part', 'b'],
['imbarazzante', 'adjective', 'b'],
['imbarazzato', 'past_part', 'b'],
['imbarazzato', 'adjective', 'b'],
['imbarazzo', 'noun', 'b'],
['imbattersi', 'verb', 'b'],
['imbecille', 'adjective', 'b'],
['imbecille', 'noun', 'b'],
['imbiancare', 'verb', 'c'],
['imbianchino', 'noun', 'c'],
['imbottigliare', 'verb', 'c'],
['imbrogliare', 'verb', 'c'],
['imbroglio', 'noun', 'c'],
['imbuto', 'noun', 'c'],
['imitare', 'verb', 'b'],
['immaginare', 'verb', 'a'],
['immaginare', 'noun', 'a'],
['immaginario', 'adjective', 'b'],
['immaginario', 'noun', 'b'],
['immaginazione', 'noun', 'b'],
['immagine', 'noun', 'a'],
['immaturo', 'adjective', 'c'],
['immediatamente', 'adverb', 'a'],
['immediato', 'adjective', 'b'],
['immediato', 'noun', 'b'],
['immenso', 'adjective', 'b'],
['immenso', 'noun', 'b'],
['immergere', 'verb', 'b'],
['immigrato', 'past_part', 'b'],
['immigrato', 'adjective', 'b'],
['immigrato', 'noun', 'b'],
['immobile', 'adjective', 'a'],
['immobile', 'noun', 'a'],
['immobiliare', 'adjective', 'b'],
['immobiliare', 'noun', 'b'],
['immondizia', 'noun', 'c'],
['impallidire', 'verb', 'c'],
['imparare', 'verb', 'a'],
['impastare', 'verb', 'c'],
['impatto', 'noun', 'b'],
['impaziente', 'adjective', 'c'],
['impaziente', 'noun', 'c'],
['impazzire', 'verb', 'b'],
['impedire', 'verb', 'a'],
['impegnare', 'verb', 'a'],
['impegnativo', 'adjective', 'b'],
['impegnato', 'past_part', 'c'],
['impegnato', 'adjective', 'c'],
['impegno', 'noun', 'a'],
['imperare', 'verb', 'b'],
['imperatore', 'noun', 'b'],
['imperiale', 'adjective', 'b'],
['imperiale', 'noun', 'b'],
['impermeabile', 'adjective', 'c'],
['impermeabile', 'noun', 'c'],
['impero', 'noun', 'b'],
['impero', 'adjective', 'b'],
['impianto', 'noun', 'a'],
['impiegare', 'verb', 'a'],
['impiegato', 'past_part', 'b'],
['impiegato', 'adjective', 'b'],
['impiegato', 'noun', 'b'],
['impiego', 'noun', 'b'],
['implicare', 'verb', 'b'],
['imporre', 'verb', 'a'],
['importante', 'pres_part', 'a'],
['importante', 'adjective', 'a'],
['importante', 'noun', 'a'],
['importanza', 'noun', 'a'],
['importare', 'verb', 'a'],
['importo', 'noun', 'b'],
['impossibile', 'adjective', 'a'],
['impossibile', 'noun', 'a'],
['impostare', 'verb', 'b'],
['impostazione', 'noun', 'b'],
['impreciso', 'adjective', 'c'],
['imprenditore', 'noun', 'b'],
['impresa', 'noun', 'a'],
['impressionante', 'pres_part', 'b'],
['impressionante', 'adjective', 'b'],
['impressionare', 'verb', 'b'],
['impressione', 'noun', 'a'],
['imprevisto', 'adjective', 'b'],
['imprevisto', 'noun', 'b'],
['imprigionare', 'verb', 'c'],
['improbabile', 'adjective', 'b'],
['impronta', 'noun', 'b'],
['improvvisamente', 'adverb', 'b'],
['improvvisare', 'verb', 'b'],
['improvviso', 'adjective', 'a'],
['improvviso', 'noun', 'a'],
['imprudente', 'adjective', 'c'],
['imprudente', 'noun', 'c'],
['impulsivo', 'adjective', 'c'],
['impulsivo', 'noun', 'c'],
['impulso', 'noun', 'b'],
['imputata', 'noun', 'b'],
['imputato', 'past_part', 'a'],
['imputato', 'adjective', 'a'],
['imputato', 'noun', 'a'],
['in', 'preposition', 'a'],
['inaspettato', 'adjective', 'b'],
['inaugurare', 'verb', 'b'],
['incamminare', 'verb', 'c'],
['incantare', 'verb', 'c'],
['incapace', 'adjective', 'b'],
['incapace', 'noun', 'b'],
['incapacit', 'noun', 'b'],
['incaricare', 'verb', 'b'],
['incarico', 'noun', 'b'],
['incartare', 'verb', 'c'],
['incassare', 'verb', 'b'],
['incasso', 'noun', 'c'],
['incastrare', 'verb', 'b'],
['incatenare', 'verb', 'c'],
['incazzarsi', 'verb', 'b'],
['incendio', 'noun', 'b'],
['incertezza', 'noun', 'b'],
['incerto', 'adjective', 'b'],
['incerto', 'noun', 'b'],
['inchiesta', 'noun', 'b'],
['inchiodare', 'verb', 'c'],
['incidente', 'noun', 'a'],
['incidere', 'verb', 'b'],
['incinta', 'adjective', 'b'],
['incitare', 'verb', 'c'],
['incivile', 'adjective', 'c'],
['incivile', 'noun', 'c'],
['includere', 'verb', 'b'],
['incluso', 'past_part', 'b'],
['incluso', 'adjective', 'b'],
['incluso', 'noun', 'b'],
['incollare', 'verb', 'b'],
['incominciare', 'verb', 'b'],
['incompleto', 'adjective', 'c'],
['incomprensibile', 'adjective', 'b'],
['inconsolabile', 'adjective', 'c'],
['incontentabile', 'adjective', 'c'],
['incontrare', 'verb', 'a'],
['incontro', 'noun', 'a'],
['incontro', 'adverb', 'b'],
['incoraggiare', 'verb', 'b'],
['incoronare', 'verb', 'c'],
['incorreggibile', 'adjective', 'c'],
['incredibile', 'adjective', 'a'],
['incremento', 'noun', 'b'],
['incrinare', 'verb', 'c'],
['incrociare', 'verb', 'b'],
['incrocio', 'noun', 'c'],
['incubo', 'noun', 'b'],
['incurabile', 'adjective', 'c'],
['incurabile', 'noun', 'c'],
['incuriosire', 'verb', 'b'],
['indagare', 'verb', 'b'],
['indagine', 'noun', 'a'],
['indescrivibile', 'adjective', 'c'],
['indiano', 'adjective', 'b'],
['indiano', 'noun', 'b'],
['indicare', 'verb', 'a'],
['indicazione', 'noun', 'a'],
['indice', 'noun', 'a'],
['indice', 'adjective', 'a'],
['indietreggiare', 'verb', 'c'],
['indietro', 'adverb', 'a'],
['indietro', 'adjective', 'a'],
['indietro', 'loc-comando', 'a'],
['indifeso', 'adjective', 'c'],
['indifferente', 'adjective', 'b'],
['indifferente', 'noun', 'b'],
['indifferenza', 'noun', 'b'],
['indigestione', 'noun', 'c'],
['indimenticabile', 'adjective', 'c'],
['indipendente', 'adjective', 'b'],
['indipendente', 'noun', 'b'],
['indipendentemente', 'adverb', 'b'],
['indipendenza', 'noun', 'b'],
['indiretto', 'adjective', 'b'],
['indirizzare', 'verb', 'b'],
['indirizzo', 'noun', 'a'],
['indisciplinato', 'adjective', 'c'],
['indispensabile', 'adjective', 'b'],
['indispensabile', 'noun', 'b'],
['individuale', 'adjective', 'b'],
['individuare', 'verb', 'a'],
['individuo', 'noun', 'a'],
['individuo', 'adjective', 'a'],
['indizio', 'noun', 'b'],
['indossare', 'verb', 'a'],
['indovinare', 'verb', 'b'],
['indovinello', 'noun', 'c'],
['indubbiamente', 'adverb', 'b'],
['indumento', 'noun', 'c'],
['indurre', 'verb', 'b'],
['industria', 'noun', 'a'],
['industriale', 'adjective', 'a'],
['industriale', 'noun', 'a'],
['inedito', 'adjective', 'b'],
['inefficace', 'adjective', 'c'],
['inerte', 'adjective', 'c'],
['inesistente', 'adjective', 'b'],
['inesperienza', 'noun', 'c'],
['inesperto', 'adjective', 'c'],
['inevitabile', 'adjective', 'b'],
['inevitabile', 'noun', 'b'],
['inevitabilmente', 'adverb', 'b'],
['infame', 'adjective', 'c'],
['infame', 'noun', 'c'],
['infantile', 'adjective', 'b'],
['infanzia', 'noun', 'b'],
['infarto', 'noun', 'b'],
['infatti', 'conjunction', 'a'],
['infatti', 'adverb', 'a'],
['infedele', 'adjective', 'c'],
['infedele', 'noun', 'c'],
['infelice', 'adjective', 'b'],
['infelice', 'noun', 'b'],
['inferiore', 'adjective', 'a'],
['infermiera', 'noun', 'b'],
['infermiere', 'noun', 'c'],
['inferno', 'noun', 'b'],
['inferno', 'adjective', 'b'],
['infezione', 'noun', 'b'],
['infilare', 'verb', 'a'],
['infine', 'adverb', 'a'],
['infinito', 'adjective', 'a'],
['infinito', 'noun', 'a'],
['influenza', 'noun', 'b'],
['influenzare', 'verb', 'b'],
['informare', 'verb', 'a'],
['informatica', 'noun', 'b'],
['informatico', 'adjective', 'b'],
['informatico', 'noun', 'b'],
['informativo', 'adjective', 'b'],
['informazione', 'noun', 'a'],
['infradito', 'adjective', 'c'],
['infradito', 'noun', 'c'],
['infrastruttura', 'noun', 'b'],
['infuriare', 'verb', 'b'],
['infuso', 'past_part', 'c'],
['infuso', 'adjective', 'c'],
['infuso', 'noun', 'c'],
['ingannare', 'verb', 'b'],
['inganno', 'noun', 'b'],
['ingegnere', 'noun', 'b'],
['ingegneria', 'noun', 'b'],
['ingelosire', 'verb', 'c'],
['ingenuo', 'adjective', 'b'],
['ingenuo', 'noun', 'b'],
['ingessare', 'verb', 'c'],
['ingiusto', 'adjective', 'b'],
['ingiusto', 'noun', 'b'],
['inglese', 'adjective', 'a'],
['inglese', 'noun', 'a'],
['ingoiare', 'verb', 'b'],
['ingorgo', 'noun', 'c'],
['ingrandire', 'verb', 'c'],
['ingrassare', 'verb', 'b'],
['ingrediente', 'noun', 'b'],
['ingresso', 'noun', 'a'],
['iniezione', 'noun', 'c'],
['iniziale', 'adjective', 'a'],
['iniziale', 'noun', 'a'],
['inizialmente', 'adverb', 'b'],
['iniziare', 'verb', 'a'],
['iniziativa', 'noun', 'a'],
['inizio', 'noun', 'a'],
['innamorarsi', 'verb', 'a'],
['innamorato', 'past_part', 'b'],
['innamorato', 'adjective', 'b'],
['innamorato', 'noun', 'b'],
['innanzitutto', 'adverb', 'b'],
['innervosire', 'verb', 'c'],
['innocente', 'adjective', 'b'],
['innocente', 'noun', 'b'],
['innocuo', 'adjective', 'b'],
['innovativo', 'adjective', 'b'],
['innovazione', 'noun', 'b'],
['inoltre', 'adverb', 'a'],
['inquadrare', 'verb', 'b'],
['inquietante', 'pres_part', 'b'],
['inquietante', 'adjective', 'b'],
['inquinamento', 'noun', 'b'],
['inquinare', 'verb', 'c'],
['inquinato', 'past_part', 'c'],
['inquinato', 'adjective', 'c'],
['insalata', 'noun', 'b'],
['insegna', 'noun', 'b'],
['insegnamento', 'noun', 'b'],
['insegnante', 'pres_part', 'a'],
['insegnante', 'adjective', 'a'],
['insegnante', 'noun', 'a'],
['insegnare', 'verb', 'a'],
['inseguire', 'verb', 'b'],
['inseparabile', 'adjective', 'c'],
['inseparabile', 'noun', 'c'],
['inserimento', 'noun', 'b'],
['inserire', 'verb', 'a'],
['insetticida', 'adjective', 'c'],
['insetto', 'noun', 'b'],
['insieme', 'adverb', 'a'],
['insieme', 'noun', 'a'],
['insinuare', 'verb', 'b'],
['insistere', 'verb', 'a'],
['insoddisfatto', 'adjective', 'c'],
['insolito', 'adjective', 'b'],
['insolito', 'noun', 'b'],
['insomma', 'adverb', 'a'],
['insopportabile', 'adjective', 'b'],
['insospettire', 'verb', 'c'],
['installare', 'verb', 'b'],
['insuccesso', 'noun', 'c'],
['insultare', 'verb', 'b'],
['insulto', 'noun', 'b'],
['intanto', 'adverb', 'a'],
['intasare', 'verb', 'c'],
['intatto', 'adjective', 'b'],
['integrale', 'adjective', 'b'],
['integrale', 'noun', 'b'],
['integrare', 'verb', 'b'],
['integrazione', 'noun', 'b'],
['intellettuale', 'adjective', 'b'],
['intellettuale', 'noun', 'b'],
['intelligente', 'adjective', 'a'],
['intelligenza', 'noun', 'b'],
['intendere', 'verb', 'a'],
['intensit', 'noun', 'b'],
['intenso', 'adjective', 'a'],
['intento', 'noun', 'b'],
['intenzione', 'noun', 'a'],
['interagire', 'verb', 'b'],
['interamente', 'adverb', 'b'],
['interazione', 'noun', 'b'],
['intercettare', 'verb', 'b'],
['intercettazione', 'noun', 'b'],
['interessante', 'pres_part', 'a'],
['interessante', 'adjective', 'a'],
['interessare', 'verb', 'a'],
['interessato', 'past_part', 'b'],
['interessato', 'adjective', 'b'],
['interessato', 'noun', 'b'],
['interesse', 'noun', 'a'],
['interiore', 'adjective', 'b'],
['interiore', 'noun', 'b'],
['interlocutore', 'noun', 'b'],
['internazionale', 'adjective', 'a'],
['internazionale', 'noun', 'a'],
['internet', 'noun', 'a'],
['interno', 'adjective', 'a'],
['interno', 'noun', 'a'],
['intero', 'adjective', 'a'],
['intero', 'noun', 'a'],
['interpretare', 'verb', 'a'],
['interpretazione', 'noun', 'b'],
['interprete', 'noun', 'b'],
['interrogare', 'verb', 'b'],
['interrogativo', 'adjective', 'b'],
['interrogativo', 'noun', 'b'],
['interrogatorio', 'adjective', 'b'],
['interrogatorio', 'noun', 'b'],
['interrogazione', 'noun', 'c'],
['interrompere', 'verb', 'a'],
['interruttore', 'noun', 'c'],
['interruzione', 'noun', 'b'],
['intervallo', 'noun', 'b'],
['intervenire', 'verb', 'a'],
['intervento', 'noun', 'a'],
['intervista', 'noun', 'a'],
['intesa', 'noun', 'b'],
['intestare', 'verb', 'b'],
['intestino', 'noun', 'c'],
['intimidire', 'verb', 'c'],
['intimit', 'noun', 'b'],
['intimo', 'adjective', 'b'],
['intimo', 'noun', 'b'],
['intitolare', 'verb', 'b'],
['intonaco', 'noun', 'c'],
['intorno', 'adverb', 'a'],
['intorno', 'preposition', 'a'],
['intorno', 'adjective', 'a'],
['intorno', 'noun', 'a'],
['intraprendere', 'verb', 'b'],
['intravedere', 'verb', 'b'],
['intrecciare', 'verb', 'b'],
['introdurre', 'verb', 'a'],
['introduzione', 'noun', 'b'],
['intuire', 'verb', 'b'],
['intuizione', 'noun', 'b'],
['inutile', 'adjective', 'a'],
['invadente', 'pres_part', 'c'],
['invadente', 'adjective', 'c'],
['invadente', 'noun', 'c'],
['invadere', 'verb', 'b'],
['invasione', 'noun', 'b'],
['invecchiare', 'verb', 'b'],
['invece', 'adverb', 'a'],
['inventare', 'verb', 'a'],
['invenzione', 'noun', 'b'],
['invernale', 'adjective', 'b'],
['invernale', 'noun', 'b'],
['inverno', 'noun', 'a'],
['investimento', 'noun', 'b'],
['investire', 'verb', 'a'],
['inviare', 'verb', 'a'],
['inviato', 'past_part', 'b'],
['inviato', 'adjective', 'b'],
['inviato', 'noun', 'b'],
['invidiare', 'verb', 'b'],
['invidioso', 'adjective', 'c'],
['invidioso', 'noun', 'c'],
['invincibile', 'adjective', 'c'],
['invisibile', 'adjective', 'b'],
['invisibile', 'noun', 'b'],
['invitare', 'verb', 'a'],
['invitato', 'past_part', 'b'],
['invitato', 'adjective', 'b'],
['invitato', 'noun', 'b'],
['invito', 'noun', 'b'],
['invocare', 'verb', 'b'],
['inzuppare', 'verb', 'c'],
['io', 'pronoun', 'a'],
['ionico', 'adjective', 'c'],
['ipotesi', 'noun', 'a'],
['ipotizzare', 'verb', 'b'],
['ippopotamo', 'noun', 'c'],
['ipsilon', 'noun', 'c'],
['ira', 'noun', 'b'],
['irlandese', 'adjective', 'b'],
['irlandese', 'noun', 'b'],
['ironia', 'noun', 'b'],
['ironico', 'adjective', 'b'],
['irriconoscibile', 'adjective', 'c'],
['irritare', 'verb', 'b'],
['iscritto', 'past_part', 'b'],
['iscritto', 'adjective', 'b'],
['iscritto', 'noun', 'b'],
['iscrivere', 'verb', 'a'],
['iscrizione', 'noun', 'b'],
['islamico', 'adjective', 'b'],
['islamico', 'noun', 'b'],
['islandese', 'adjective', 'c'],
['islandese', 'noun', 'c'],
['isola', 'noun', 'a'],
['isolare', 'verb', 'b'],
['isolato', 'past_part', 'b'],
['isolato', 'adjective', 'b'],
['isolato', 'noun', 'b'],
['ispettore', 'noun', 'b'],
['ispirare', 'verb', 'a'],
['ispirazione', 'noun', 'b'],
['israeliano', 'adjective', 'c'],
['israeliano', 'noun', 'c'],
['istante', 'noun', 'a'],
['istanza', 'noun', 'b'],
['istintivo', 'adjective', 'c'],
['istinto', 'noun', 'b'],
['istituto', 'noun', 'a'],
['istituzionale', 'adjective', 'b'],
['istituzione', 'noun', 'a'],
['istruttivo', 'adjective', 'c'],
['istruttore', 'noun', 'c'],
['istruzione', 'noun', 'a'],
['italiano', 'adjective', 'a'],
['italiano', 'noun', 'a'],
['iugoslavo', 'adjective', 'c'],
['iugoslavo', 'noun', 'c'],
['jeans', 'noun', 'b'],
['karat', 'noun', 'c'],
['ketchup', 'noun', 'c'],
['killer', 'noun', 'b'],
['killer', 'adjective', 'b'],
['kit', 'noun', 'c'],
['kiwi', 'noun', 'c'],
['l', 'adverb', 'a'],
['la', 'determiner', 'a'],
['la', 'pronoun', 'a'],
['labbro', 'noun', 'a'],
['labirinto', 'noun', 'c'],
['laboratorio', 'noun', 'a'],
['laborioso', 'adjective', 'c'],
['lacca', 'noun', 'c'],
['lacca', 'adjective', 'c'],
['laccio', 'noun', 'c'],
['lacrima', 'noun', 'a'],
['laddove', 'adverb', 'b'],
['laddove', 'conjunction', 'b'],
['ladro', 'noun', 'b'],
['laggi', 'adverb', 'b'],
['lago', 'noun', 'a'],
['laico', 'adjective', 'b'],
['laico', 'noun', 'b'],
['lama', 'noun', 'b'],
['lamentare', 'verb', 'a'],
['lamentela', 'noun', 'c'],
['lametta', 'noun', 'c'],
['lamiera', 'noun', 'c'],
['lampada', 'noun', 'b'],
['lampadario', 'noun', 'c'],
['lampo', 'noun', 'b'],
['lampo', 'adjective', 'b'],
['lampo', 'noun', 'b'],
['lana', 'noun', 'b'],
['lancetta', 'noun', 'c'],
['lanciare', 'verb', 'a'],
['lancio', 'noun', 'b'],
['lanterna', 'noun', 'c'],
['lapis', 'noun', 'c'],
['lardo', 'noun', 'c'],
['larghezza', 'noun', 'c'],
['largo', 'adjective', 'a'],
['largo', 'noun', 'a'],
['largo', 'adverb', 'a'],
['lasagna', 'noun', 'c'],
['lasciare', 'verb', 'a'],
['lass', 'adverb', 'b'],
['lastra', 'noun', 'b'],
['laterale', 'adjective', 'b'],
['laterale', 'noun', 'b'],
['latino', 'adjective', 'b'],
['latino', 'noun', 'b'],
['lato', 'noun', 'a'],
['latta', 'noun', 'c'],
['lattante', 'pres_part', 'c'],
['lattante', 'adjective', 'c'],
['lattante', 'noun', 'c'],
['latte', 'noun', 'a'],
['latte', 'adjective', 'a'],
['latteria', 'noun', 'c'],
['lattina', 'noun', 'c'],
['lattuga', 'noun', 'c'],
['laurea', 'noun', 'b'],
['laureare', 'verb', 'b'],
['laureato', 'past_part', 'b'],
['laureato', 'adjective', 'b'],
['laureato', 'noun', 'b'],
['lava', 'noun', 'c'],
['lavabo', 'noun', 'c'],
['lavagna', 'noun', 'c'],
['lavagna', 'adjective', 'c'],
['lavanda', 'noun', 'c'],
['lavanderia', 'noun', 'c'],
['lavandino', 'noun', 'c'],
['lavapiatti', 'noun', 'c'],
['lavare', 'verb', 'a'],
['lavastoviglie', 'noun', 'c'],
['lavatrice', 'noun', 'b'],
['lavello', 'noun', 'c'],
['lavorare', 'verb', 'a'],
['lavorativo', 'adjective', 'b'],
['lavoratore', 'adjective', 'a'],
['lavoratore', 'noun', 'a'],
['lavorazione', 'noun', 'b'],
['lavoro', 'noun', 'a'],
['laziale', 'adjective', 'c'],
['laziale', 'noun', 'c'],
['le', 'determiner', 'a'],
['le', 'pronoun', 'a'],
['le', 'pronoun', 'a'],
['leader', 'noun', 'b'],
['lealt', 'noun', 'c'],
['lebbra', 'noun', 'c'],
['leccare', 'verb', 'b'],
['leccio', 'noun', 'c'],
['lecito', 'adjective', 'b'],
['lecito', 'noun', 'b'],
['lega', 'noun', 'b'],
['legale', 'adjective', 'a'],
['legale', 'noun', 'a'],
['legame', 'noun', 'b'],
['legare', 'verb', 'a'],
['legato', 'past_part', 'a'],
['legato', 'adjective', 'a'],
['legato', 'noun', 'a'],
['legge', 'noun', 'a'],
['leggenda', 'noun', 'b'],
['leggere', 'verb', 'a'],
['leggermente', 'adverb', 'b'],
['leggero', 'adjective', 'a'],
['leggero', 'adverb', 'a'],
['leggero', 'noun', 'a'],
['legislativo', 'adjective', 'b'],
['legittimo', 'adjective', 'b'],
['legna', 'noun', 'c'],
['legno', 'noun', 'a'],
['legume', 'noun', 'c'],
['lei', 'pronoun', 'a'],
['lentamente', 'adverb', 'a'],
['lente', 'noun', 'c'],
['lenticchia', 'noun', 'c'],
['lentiggine', 'noun', 'c'],
['lento', 'adjective', 'a'],
['lento', 'noun', 'a'],
['lento', 'adverb', 'a'],
['lenza', 'noun', 'c'],
['lenzuolo', 'noun', 'b'],
['leone', 'noun', 'b'],
['leonessa', 'noun', 'c'],
['leopardo', 'noun', 'c'],
['lepre', 'noun', 'c'],
['lesione', 'noun', 'b'],
['lessare', 'verb', 'c'],
['lessema', 'noun', 'b'],
['lettera', 'noun', 'a'],
['letterale', 'adjective', 'c'],
['letteralmente', 'adverb', 'b'],
['letterario', 'adjective', 'b'],
['letteratura', 'noun', 'a'],
['letto', 'noun', 'a'],
['lettone', 'noun', 'c'],
['lettore', 'noun', 'a'],
['lettura', 'noun', 'a'],
['leva', 'noun', 'b'],
['levare', 'verb', 'a'],
['levare', 'noun', 'a'],
['lezione', 'noun', 'a'],
['l', 'adverb', 'a'],
['li', 'pronoun', 'a'],
['libanese', 'adjective', 'b'],
['libanese', 'noun', 'b'],
['liberale', 'adjective', 'b'],
['liberale', 'noun', 'b'],
['liberamente', 'adverb', 'b'],
['liberare', 'verb', 'a'],
['liberazione', 'noun', 'b'],
['libero', 'adjective', 'a'],
['libero', 'noun', 'a'],
['libert', 'noun', 'a'],
['libico', 'adjective', 'c'],
['libico', 'noun', 'c'],
['libraio', 'noun', 'c'],
['libreria', 'noun', 'b'],
['libretto', 'noun', 'b'],
['libro', 'noun', 'a'],
['licenza', 'noun', 'b'],
['licenziamento', 'noun', 'c'],
['licenziare', 'verb', 'b'],
['liceo', 'noun', 'b'],
['lido', 'noun', 'c'],
['lieto', 'adjective', 'b'],
['lieve', 'adjective', 'b'],
['lievito', 'noun', 'c'],
['ligure', 'adjective', 'c'],
['ligure', 'noun', 'c'],
['lima', 'noun', 'c'],
['limare', 'verb', 'c'],
['limitare', 'verb', 'a'],
['limitato', 'past_part', 'b'],
['limitato', 'adjective', 'b'],
['limite', 'noun', 'a'],
['limite', 'adjective', 'a'],
['limonata', 'noun', 'c'],
['limone', 'noun', 'b'],
['limone', 'adjective', 'b'],
['linea', 'noun', 'a'],
['lineare', 'adjective', 'b'],
['lineare', 'noun', 'b'],
['linfa', 'noun', 'b'],
['lingerie', 'noun', 'c'],
['lingua', 'noun', 'a'],
['linguaggio', 'noun', 'a'],
['linguistica', 'noun', 'b'],
['linguistico', 'adjective', 'b'],
['linguistico', 'noun', 'b'],
['link', 'noun', 'b'],
['liquido', 'adjective', 'a'],
['liquido', 'noun', 'a'],
['liquore', 'noun', 'c'],
['lira', 'noun', 'a'],
['lirico', 'adjective', 'b'],
['lisbonese', 'adjective', 'c'],
['lisbonese', 'noun', 'c'],
['liscio', 'adjective', 'b'],
['liscio', 'noun', 'b'],
['lista', 'noun', 'a'],
['lite', 'noun', 'b'],
['litigare', 'verb', 'a'],
['litigio', 'noun', 'b'],
['litro', 'noun', 'b'],
['lituano', 'adjective', 'c'],
['lituano', 'noun', 'c'],
['live', 'adjective', 'b'],
['livello', 'noun', 'a'],
['lo', 'determiner', 'a'],
['lo', 'pronoun', 'a'],
['locale', 'adjective', 'a'],
['locale', 'noun', 'a'],
['locale', 'noun', 'a'],
['localit', 'noun', 'b'],
['locanda', 'noun', 'c'],
['locazione', 'noun', 'b'],
['locomotiva', 'noun', 'c'],
['logica', 'noun', 'b'],
['logico', 'adjective', 'b'],
['logico', 'noun', 'b'],
['logoro', 'past_part', 'c'],
['logoro', 'adjective', 'c'],
['lombardo', 'adjective', 'b'],
['lombardo', 'noun', 'b'],
['londinese', 'adjective', 'c'],
['londinese', 'noun', 'c'],
['lontananza', 'noun', 'b'],
['lontano', 'adjective', 'a'],
['lontano', 'adverb', 'a'],
['lontano', 'noun', 'a'],
['lonza', 'noun', 'c'],
['look', 'noun', 'b'],
['loro', 'pronoun', 'a'],
['loro', 'adjective', 'a'],
['lotta', 'noun', 'a'],
['lottare', 'verb', 'b'],
['lozione', 'noun', 'c'],
['lucano', 'adjective', 'c'],
['lucano', 'noun', 'c'],
['luccicare', 'verb', 'c'],
['lucciola', 'noun', 'c'],
['luce', 'noun', 'a'],
['lucente', 'pres_part', 'c'],
['lucente', 'adjective', 'c'],
['lucente', 'noun', 'c'],
['lucertola', 'noun', 'c'],
['lucidare', 'verb', 'c'],
['lucido', 'adjective', 'b'],
['lucido', 'noun', 'b'],
['luglio', 'noun', 'a'],
['lui', 'pronoun', 'a'],
['lumaca', 'noun', 'c'],
['luminoso', 'adjective', 'b'],
['luna', 'noun', 'a'],
['luned', 'noun', 'a'],
['lunghezza', 'noun', 'b'],
['lungo', 'adjective', 'a'],
['lungo', 'preposition', 'a'],
['lungo', 'noun', 'a'],
['luogo', 'noun', 'a'],
['lupo', 'noun', 'a'],
['lussemburghese', 'adjective', 'c'],
['lussemburghese', 'noun', 'c'],
['lusso', 'noun', 'b'],
['lutto', 'noun', 'b'],
['ma', 'conjunction', 'a'],
['ma', 'noun', 'a'],
['maccherone', 'noun', 'c'],
['macchia', 'noun', 'a'],
['macchina', 'noun', 'a'],
['macchinista', 'noun', 'c'],
['macedone', 'adjective', 'c'],
['macedone', 'noun', 'c'],
['macedonia', 'noun', 'c'],
['maceria', 'noun', 'b'],
['macinare', 'verb', 'c'],
['madonna', 'noun', 'b'],
['madonna', 'exclamation', 'b'],
['madre', 'noun', 'a'],
['madrileno', 'adjective', 'c'],
['madrileno', 'noun', 'c'],
['madrileno', 'adjective', 'c'],
['madrileno', 'noun', 'c'],
['madrina', 'noun', 'c'],
['maestra', 'noun', 'b'],
['maestranza', 'noun', 'c'],
['maestro', 'noun', 'a'],
['maestro', 'adjective', 'a'],
['mafia', 'noun', 'b'],
['mafioso', 'adjective', 'b'],
['mafioso', 'noun', 'b'],
['magari', 'exclamation', 'a'],
['magari', 'conjunction', 'a'],
['magari', 'adverb', 'a'],
['magazzino', 'noun', 'b'],
['maggio', 'noun', 'a'],
['maggioranza', 'noun', 'a'],
['maggiorenne', 'adjective', 'c'],
['maggiorenne', 'noun', 'c'],
['maggiormente', 'adverb', 'b'],
['magia', 'noun', 'b'],
['magico', 'adjective', 'a'],
['magistrato', 'noun', 'b'],
['magistratura', 'noun', 'b'],
['maglia', 'noun', 'a'],
['maglietta', 'noun', 'b'],
['magnetico', 'adjective', 'b'],
['magnifico', 'adjective', 'b'],
['mago', 'noun', 'b'],
['mago', 'adjective', 'b'],
['magro', 'adjective', 'b'],
['magro', 'noun', 'b'],
['mah', 'exclamation', 'b'],
['mai', 'adverb', 'a'],
['maiale', 'noun', 'b'],
['maionese', 'noun', 'c'],
['mais', 'noun', 'c'],
['maiuscola', 'noun', 'c'],
['malato', 'adjective', 'a'],
['malato', 'noun', 'a'],
['malattia', 'noun', 'a'],
['malaugurio', 'noun', 'c'],
['malavita', 'noun', 'c'],
['male', 'adverb', 'a'],
['male', 'exclamation', 'a'],
['male', 'noun', 'a'],
['maledetto', 'past_part', 'b'],
['maledetto', 'adjective', 'b'],
['maledetto', 'noun', 'b'],
['maledizione', 'noun', 'b'],
['maledizione', 'exclamation', 'b'],
['maleducato', 'adjective', 'c'],
['maleducato', 'noun', 'c'],
['maleducazione', 'noun', 'c'],
['malgrado', 'noun', 'b'],
['malgrado', 'adverb', 'b'],
['malgrado', 'conjunction', 'b'],
['malgrado', 'preposition', 'b'],
['malinconia', 'noun', 'b'],
['malinteso', 'adjective', 'c'],
['malinteso', 'noun', 'c'],
['malizia', 'noun', 'c'],
['maltempo', 'noun', 'c'],
['maltese', 'adjective', 'c'],
['maltese', 'noun', 'c'],
['maltrattamento', 'noun', 'c'],
['maltrattare', 'verb', 'c'],
['malva', 'noun', 'c'],
['malvagio', 'adjective', 'b'],
['malvagio', 'noun', 'b'],
['mamma', 'noun', 'a'],
['mammella', 'noun', 'c'],
['mammifero', 'noun', 'c'],
['manager', 'noun', 'b'],
['mancanza', 'noun', 'a'],
['mancare', 'verb', 'a'],
['mancato', 'past_part', 'b'],
['mancato', 'adjective', 'b'],
['mancino', 'adjective', 'c'],
['mancino', 'noun', 'c'],
['manco', 'adjective', 'b'],
['manco', 'adverb', 'b'],
['mandare', 'verb', 'a'],
['mandarino', 'noun', 'c'],
['mandarino', 'adjective', 'c'],
['mandato', 'past_part', 'b'],
['mandato', 'adjective', 'b'],
['mandato', 'noun', 'b'],
['mandorla', 'noun', 'c'],
['mandorlo', 'noun', 'c'],
['manganello', 'noun', 'c'],
['mangiare', 'verb', 'a'],
['mangime', 'noun', 'c'],
['mania', 'noun', 'b'],
['maniaco', 'adjective', 'c'],
['maniaco', 'noun', 'c'],
['manica', 'noun', 'b'],
['manico', 'noun', 'b'],
['maniera', 'noun', 'a'],
['manifestare', 'verb', 'a'],
['manifestazione', 'noun', 'a'],
['manifesto', 'noun', 'b'],
['mano', 'noun', 'a'],
['manodopera', 'noun', 'c'],
['manoscritto', 'adjective', 'b'],
['manoscritto', 'noun', 'b'],
['manovale', 'noun', 'c'],
['manovra', 'noun', 'b'],
['mantello', 'noun', 'b'],
['mantenere', 'verb', 'a'],
['manuale', 'adjective', 'b'],
['manuale', 'noun', 'b'],
['manuale', 'noun', 'b'],
['manutenzione', 'noun', 'b'],
['manzo', 'noun', 'c'],
['mappa', 'noun', 'b'],
['marca', 'noun', 'b'],
['marcare', 'verb', 'b'],
['marchigiano', 'adjective', 'c'],
['marchigiano', 'noun', 'c'],
['marchio', 'noun', 'b'],
['marcia', 'noun', 'b'],
['marciapiede', 'noun', 'b'],
['marcio', 'adjective', 'b'],
['marcio', 'noun', 'b'],
['marcire', 'verb', 'c'],
['marco', 'noun', 'a'],
['mare', 'noun', 'a'],
['marea', 'noun', 'b'],
['maresciallo', 'noun', 'b'],
['margherita', 'noun', 'c'],
['marginale', 'adjective', 'b'],
['marginale', 'noun', 'b'],
['margine', 'noun', 'b'],
['marinaio', 'noun', 'b'],
['marino', 'adjective', 'b'],
['marino', 'noun', 'b'],
['marionetta', 'noun', 'c'],
['marito', 'noun', 'a'],
['marketing', 'noun', 'b'],
['marmellata', 'noun', 'c'],
['marmo', 'noun', 'b'],
['marocchino', 'adjective', 'c'],
['marocchino', 'noun', 'c'],
['marrone', 'noun', 'b'],
['marrone', 'adjective', 'b'],
['marted', 'noun', 'b'],
['marzo', 'noun', 'a'],
['mascarpone', 'noun', 'c'],
['maschera', 'noun', 'b'],
['mascherare', 'verb', 'b'],
['mascherato', 'past_part', 'c'],
['mascherato', 'adjective', 'c'],
['maschile', 'adjective', 'a'],
['maschile', 'noun', 'a'],
['maschio', 'noun', 'a'],
['maschio', 'adjective', 'a'],
['massa', 'noun', 'a'],
['massa', 'adverb', 'a'],
['massacrare', 'verb', 'b'],
['massacro', 'noun', 'c'],
['massaggio', 'noun', 'c'],
['massaia', 'noun', 'c'],
['massiccio', 'adjective', 'b'],
['massiccio', 'noun', 'b'],
['massimo', 'adjective', 'a'],
['massimo', 'noun', 'a'],
['massimo', 'adverb', 'a'],
['master', 'noun', 'b'],
['masticare', 'verb', 'b'],
['masturbare', 'verb', 'b'],
['matematica', 'noun', 'b'],
['matematico', 'adjective', 'b'],
['matematico', 'noun', 'b'],
['materasso', 'noun', 'b'],
['materia', 'noun', 'a'],
['materiale', 'adjective', 'a'],
['materiale', 'noun', 'a'],
['maternit', 'noun', 'b'],
['materno', 'adjective', 'b'],
['matita', 'noun', 'b'],
['matricola', 'noun', 'b'],
['matrimoniale', 'adjective', 'b'],
['matrimoniale', 'noun', 'b'],
['matrimonio', 'noun', 'a'],
['mattina', 'noun', 'a'],
['mattinata', 'noun', 'b'],
['mattino', 'noun', 'a'],
['matto', 'adjective', 'a'],
['matto', 'noun', 'a'],
['mattone', 'noun', 'b'],
['mattone', 'adjective', 'b'],
['mattone', 'noun', 'b'],
['maturare', 'verb', 'b'],
['maturit', 'noun', 'b'],
['maturo', 'adjective', 'b'],
['mazzo', 'noun', 'b'],
['me', 'pronoun', 'a'],
['meccanico', 'adjective', 'a'],
['meccanico', 'noun', 'a'],
['meccanismo', 'noun', 'a'],
['medaglia', 'noun', 'b'],
['medesimo', 'adjective', 'b'],
['medesimo', 'pronoun', 'b'],
['media', 'noun', 'a'],
['media', 'noun', 'b'],
['mediante', 'preposition', 'b'],
['medicare', 'verb', 'c'],
['medicina', 'noun', 'a'],
['medico', 'noun', 'a'],
['medico', 'adjective', 'b'],
['medievale', 'adjective', 'b'],
['medio', 'adjective', 'a'],
['medio', 'noun', 'a'],
['medioevo', 'noun', 'b'],
['meditare', 'verb', 'b'],
['mediterraneo', 'adjective', 'b'],
['mediterraneo', 'noun', 'b'],
['meglio', 'adverb', 'a'],
['meglio', 'adjective', 'a'],
['meglio', 'noun', 'a'],
['mela', 'noun', 'b'],
['melagrana', 'noun', 'c'],
['melanzana', 'noun', 'c'],
['melo', 'noun', 'c'],
['melograno', 'noun', 'c'],
['melone', 'noun', 'c'],
['membrana', 'noun', 'b'],
['membro', 'noun', 'a'],
['memoria', 'noun', 'a'],
['menare', 'verb', 'b'],
['mendicante', 'pres_part', 'c'],
['mendicante', 'adjective', 'c'],
['mendicante', 'noun', 'c'],
['meno', 'adverb', 'a'],
['meno', 'adjective', 'a'],
['meno', 'preposition', 'a'],
['meno', 'noun', 'a'],
['mensa', 'noun', 'b'],
['mensile', 'adjective', 'b'],
['mensile', 'noun', 'b'],
['mensola', 'noun', 'c'],
['menta', 'noun', 'c'],
['mentale', 'adjective', 'a'],
['mentalit', 'noun', 'b'],
['mente', 'noun', 'a'],
['mentire', 'verb', 'a'],
['mento', 'noun', 'b'],
['mentre', 'conjunction', 'a'],
['menu', 'noun', 'b'],
['menzogna', 'noun', 'b'],
['meraviglia', 'noun', 'b'],
['meravigliare', 'verb', 'b'],
['meraviglioso', 'adjective', 'a'],
['meraviglioso', 'noun', 'a'],
['mercante', 'noun', 'b'],
['mercato', 'noun', 'a'],
['merce', 'noun', 'b'],
['merceria', 'noun', 'c'],
['mercoled', 'noun', 'b'],
['merda', 'noun', 'a'],
['merenda', 'noun', 'c'],
['merendina', 'noun', 'c'],
['meridiano', 'adjective', 'c'],
['meridiano', 'noun', 'c'],
['meridionale', 'adjective', 'a'],
['meridionale', 'noun', 'a'],
['meridione', 'noun', 'c'],
['meritare', 'verb', 'a'],
['merito', 'noun', 'a'],
['merlo', 'noun', 'c'],
['merluzzo', 'noun', 'c'],
['mero', 'adjective', 'b'],
['mescolare', 'verb', 'b'],
['mese', 'noun', 'a'],
['messa', 'noun', 'b'],
['messa', 'noun', 'b'],
['messaggio', 'noun', 'a'],
['messe', 'noun', 'c'],
['messicano', 'adjective', 'c'],
['messicano', 'noun', 'c'],
['mestiere', 'noun', 'a'],
['mestolo', 'noun', 'c'],
['mestruazione', 'noun', 'c'],
['met', 'noun', 'a'],
['meta', 'noun', 'b'],
['metafora', 'noun', 'b'],
['metallico', 'adjective', 'b'],
['metallo', 'noun', 'b'],
['metalmeccanico', 'adjective', 'c'],
['metalmeccanico', 'noun', 'c'],
['meteo', 'adjective', 'b'],
['meteo', 'noun', 'b'],
['metodo', 'noun', 'a'],
['metro', 'noun', 'a'],
['metropolitano', 'adjective', 'b'],
['metropolitano', 'noun', 'b'],
['mettere', 'verb', 'a'],
['mezzanotte', 'noun', 'b'],
['mezzo', 'adjective', 'a'],
['mezzo', 'noun', 'a'],
['mezzo', 'adverb', 'a'],
['mezzogiorno', 'noun', 'b'],
['mi', 'pronoun', 'a'],
['miagolare', 'verb', 'c'],
['mica', 'noun', 'a'],
['mica', 'adverb', 'a'],
['micio', 'noun', 'c'],
['microfono', 'noun', 'b'],
['miele', 'noun', 'b'],
['miele', 'adjective', 'b'],
['mietere', 'verb', 'c'],
['migliaio', 'noun', 'c'],
['migliaio', 'noun', 'a'],
['miglioramento', 'noun', 'b'],
['migliorare', 'verb', 'a'],
['migliore', 'adjective', 'a'],
['migliore', 'noun', 'a'],
['migliore', 'adverb', 'a'],
['mignolo', 'noun', 'c'],
['mila', 'adjective', 'a'],
['milanese', 'adjective', 'b'],
['milanese', 'noun', 'b'],
['miliardo', 'noun', 'a'],
['milione', 'noun', 'a'],
['militare', 'adjective', 'a'],
['militare', 'noun', 'a'],
['mille', 'adjective', 'a'],
['mille', 'noun', 'a'],
['millennio', 'noun', 'b'],
['millimetro', 'noun', 'b'],
['mimosa', 'noun', 'c'],
['minaccia', 'noun', 'b'],
['minacciare', 'verb', 'a'],
['minchia', 'noun', 'b'],
['minestra', 'noun', 'c'],
['minestrone', 'noun', 'c'],
['mini', 'adjective', 'c'],
['miniera', 'noun', 'b'],
['minigonna', 'noun', 'c'],
['minimo', 'adjective', 'a'],
['minimo', 'noun', 'a'],
['ministero', 'noun', 'a'],
['ministro', 'noun', 'a'],
['minoranza', 'noun', 'b'],
['minore', 'adjective', 'a'],
['minore', 'noun', 'a'],
['minuscolo', 'adjective', 'b'],
['minuto', 'noun', 'a'],
['mio', 'adjective', 'a'],
['mio', 'pronoun', 'a'],
['miracolo', 'noun', 'a'],
['mirare', 'verb', 'b'],
['mischiare', 'verb', 'b'],
['miscuglio', 'noun', 'c'],
['miseria', 'noun', 'b'],
['misero', 'adjective', 'b'],
['missile', 'adjective', 'c'],
['missile', 'noun', 'c'],
['missione', 'noun', 'a'],
['mister', 'noun', 'c'],
['misterioso', 'adjective', 'b'],
['mistero', 'noun', 'a'],
['misto', 'adjective', 'b'],
['misto', 'noun', 'b'],
['misura', 'noun', 'a'],
['misurare', 'verb', 'b'],
['misurazione', 'noun', 'c'],
['mitico', 'adjective', 'b'],
['mito', 'noun', 'b'],
['mitragliatrice', 'noun', 'c'],
['mobile', 'adjective', 'a'],
['mobile', 'noun', 'a'],
['mobilio', 'noun', 'c'],
['mocassino', 'noun', 'c'],
['moda', 'noun', 'a'],
['modalit', 'noun', 'b'],
['modella', 'noun', 'b'],
['modellare', 'verb', 'c'],
['modello', 'noun', 'a'],
['moderato', 'past_part', 'b'],
['moderato', 'adjective', 'b'],
['moderato', 'adverb', 'b'],
['moderato', 'noun', 'b'],
['moderatore', 'adjective', 'b'],
['moderatore', 'noun', 'b'],
['modernit', 'noun', 'b'],
['moderno', 'adjective', 'a'],
['moderno', 'noun', 'a'],
['modestia', 'noun', 'c'],
['modesto', 'adjective', 'b'],
['modifica', 'noun', 'b'],
['modificare', 'verb', 'a'],
['modificazione', 'noun', 'b'],
['modo', 'noun', 'a'],
['modulo', 'noun', 'b'],
['moglie', 'noun', 'a'],
['molecola', 'noun', 'b'],
['molisano', 'adjective', 'c'],
['molisano', 'noun', 'c'],
['molla', 'noun', 'c'],
['mollare', 'verb', 'b'],
['mollusco', 'noun', 'c'],
['molo', 'noun', 'c'],
['moltiplicare', 'verb', 'b'],
['molto', 'adjective', 'a'],
['molto', 'pronoun', 'a'],
['molto', 'adverb', 'a'],
['molto', 'noun', 'a'],
['momento', 'noun', 'a'],
['monaca', 'noun', 'c'],
['monaco', 'noun', 'c'],
['monarchica', 'noun', 'c'],
['mondiale', 'adjective', 'a'],
['mondiale', 'noun', 'a'],
['mondo', 'noun', 'a'],
['monello', 'noun', 'c'],
['moneta', 'noun', 'a'],
['monetario', 'adjective', 'b'],
['monitor', 'noun', 'b'],
['monologo', 'noun', 'b'],
['montaggio', 'noun', 'b'],
['montagna', 'noun', 'a'],
['montare', 'verb', 'b'],
['monte', 'noun', 'a'],
['montenegrino', 'adjective', 'c'],
['montenegrino', 'noun', 'c'],
['monumento', 'noun', 'b'],
['mora', 'noun', 'b'],
['morale', 'adjective', 'a'],
['morale', 'noun', 'a'],
['morbido', 'adjective', 'b'],
['morbido', 'noun', 'b'],
['mordere', 'verb', 'b'],
['morire', 'verb', 'a'],
['moro', 'adjective', 'b'],
['moro', 'noun', 'b'],
['morsicare', 'verb', 'c'],
['morso', 'noun', 'c'],
['mortadella', 'noun', 'c'],
['mortale', 'adjective', 'b'],
['mortale', 'noun', 'b'],
['morte', 'noun', 'a'],
['morto', 'past_part', 'a'],
['morto', 'adjective', 'a'],
['morto', 'noun', 'a'],
['mosca', 'noun', 'b'],
['moscovita', 'adjective', 'c'],
['moscovita', 'noun', 'c'],
['mossa', 'noun', 'b'],
['mostarda', 'noun', 'c'],
['mostra', 'noun', 'a'],
['mostrare', 'verb', 'a'],
['mostro', 'noun', 'b'],
['motel', 'noun', 'c'],
['motivare', 'verb', 'b'],
['motivazione', 'noun', 'b'],
['motivo', 'noun', 'a'],
['moto', 'noun', 'a'],
['moto', 'noun', 'b'],
['motociclismo', 'noun', 'c'],
['motociclista', 'adjective', 'c'],
['motociclista', 'noun', 'c'],
['motore', 'adjective', 'a'],
['motore', 'noun', 'a'],
['motorino', 'noun', 'b'],
['motoscafo', 'noun', 'c'],
['mousse', 'noun', 'c'],
['movimento', 'noun', 'a'],
['mozzarella', 'noun', 'c'],
['mucca', 'noun', 'b'],
['mucchio', 'noun', 'b'],
['muggire', 'verb', 'c'],
['muggito', 'past_part', 'c'],
['muggito', 'noun', 'c'],
['mugnaio', 'noun', 'c'],
['mugolare', 'verb', 'c'],
['mulino', 'noun', 'c'],
['multa', 'noun', 'b'],
['multare', 'verb', 'c'],
['multinazionale', 'adjective', 'b'],
['multinazionale', 'noun', 'b'],
['multiplo', 'adjective', 'b'],
['multiplo', 'noun', 'b'],
['multipresa', 'noun', 'c'],
['mummia', 'noun', 'c'],
['mungere', 'verb', 'c'],
['municipio', 'noun', 'c'],
['muovere', 'verb', 'a'],
['murare', 'verb', 'c'],
['muratore', 'noun', 'c'],
['muro', 'noun', 'a'],
['muschio', 'noun', 'c'],
['muschio', 'adjective', 'c'],
['muscolare', 'adjective', 'b'],
['muscolare', 'noun', 'b'],
['muscolo', 'noun', 'a'],
['museo', 'noun', 'a'],
['musica', 'noun', 'a'],
['musicale', 'adjective', 'a'],
['musicista', 'noun', 'b'],
['muso', 'noun', 'b'],
['musulmano', 'adjective', 'b'],
['musulmano', 'noun', 'b'],
['muta', 'noun', 'c'],
['mutamento', 'noun', 'b'],
['mutanda', 'noun', 'b'],
['mutandina', 'noun', 'c'],
['mutare', 'verb', 'b'],
['mutazione', 'noun', 'b'],
['mutilato', 'past_part', 'c'],
['mutilato', 'adjective', 'c'],
['mutilato', 'noun', 'c'],
['muto', 'adjective', 'b'],
['muto', 'noun', 'b'],
['mutuo', 'noun', 'b'],
['nanna', 'noun', 'c'],
['nano', 'adjective', 'b'],
['nano', 'noun', 'b'],
['napoletano', 'adjective', 'b'],
['napoletano', 'noun', 'b'],
['narrare', 'verb', 'b'],
['narrativo', 'adjective', 'b'],
['narratore', 'noun', 'b'],
['narrazione', 'noun', 'b'],
['nasale', 'adjective', 'b'],
['nasale', 'noun', 'b'],
['nascere', 'verb', 'a'],
['nascere', 'noun', 'a'],
['nascita', 'noun', 'a'],
['nascondere', 'verb', 'a'],
['nascondiglio', 'noun', 'c'],
['nascondino', 'noun', 'c'],
['nascosto', 'past_part', 'a'],
['nascosto', 'adjective', 'a'],
['nascosto', 'noun', 'a'],
['naso', 'noun', 'a'],
['nastro', 'noun', 'a'],
['natale', 'adjective', 'a'],
['natale', 'noun', 'a'],
['natalizio', 'adjective', 'b'],
['natalizio', 'noun', 'b'],
['nato', 'past_part', 'b'],
['nato', 'adjective', 'b'],
['nato', 'noun', 'b'],
['natura', 'noun', 'a'],
['naturale', 'adjective', 'a'],
['naturale', 'noun', 'a'],
['naturalmente', 'adverb', 'a'],
['naufragio', 'noun', 'c'],
['navale', 'adjective', 'c'],
['nave', 'noun', 'a'],
['navicella', 'noun', 'c'],
['navigare', 'verb', 'b'],
['navigazione', 'noun', 'b'],
['nazionale', 'adjective', 'a'],
['nazionale', 'noun', 'a'],
['nazionalit', 'noun', 'c'],
['nazione', 'noun', 'a'],
['nazista', 'adjective', 'b'],
['nazista', 'noun', 'b'],
['ndrangheta', 'noun', 'c'],
['n', 'conjunction', 'a'],
['ne', 'pronoun', 'a'],
['ne', 'adverb', 'a'],
['neanche', 'adverb', 'a'],
['nebbia', 'noun', 'b'],
['necessariamente', 'adverb', 'b'],
['necessario', 'adjective', 'a'],
['necessario', 'noun', 'a'],
['necessit', 'noun', 'a'],
['necessitare', 'verb', 'b'],
['negare', 'verb', 'a'],
['negativo', 'adjective', 'a'],
['negativo', 'noun', 'a'],
['negativo', 'adverb', 'a'],
['negazione', 'noun', 'c'],
['negoziante', 'pres_part', 'c'],
['negoziante', 'noun', 'c'],
['negozio', 'noun', 'a'],
['negro', 'adjective', 'b'],
['negro', 'noun', 'b'],
['nemico', 'adjective', 'a'],
['nemico', 'noun', 'a'],
['nemmeno', 'adverb', 'a'],
['neo', 'noun', 'c'],
['neonato', 'noun', 'b'],
['neonato', 'adjective', 'b'],
['neppure', 'adverb', 'a'],
['nero', 'adjective', 'a'],
['nero', 'noun', 'a'],
['nervo', 'noun', 'b'],
['nervosismo', 'noun', 'c'],
['nervoso', 'adjective', 'a'],
['nervoso', 'noun', 'a'],
['nessuno', 'adjective', 'a'],
['nessuno', 'pronoun', 'a'],
['nettare', 'noun', 'c'],
['netto', 'adjective', 'b'],
['netto', 'noun', 'b'],
['netto', 'adverb', 'b'],
['network', 'noun', 'b'],
['neutro', 'adjective', 'b'],
['neutro', 'noun', 'b'],
['neve', 'noun', 'a'],
['nevicare', 'verb', 'c'],
['news', 'noun', 'b'],
['newyorkese', 'adjective', 'c'],
['newyorkese', 'noun', 'c'],
['nido', 'noun', 'b'],
['niente', 'pronoun', 'a'],
['niente', 'adjective', 'a'],
['niente', 'adverb', 'a'],
['nipote', 'noun', 'a'],
['no', 'adverb', 'a'],
['no', 'noun', 'a'],
['no', 'adjective', 'a'],
['nobile', 'adjective', 'b'],
['nobile', 'noun', 'b'],
['nocciola', 'noun', 'c'],
['nocciola', 'adjective', 'c'],
['nocciolina', 'noun', 'c'],
['nocivo', 'adjective', 'c'],
['nodo', 'noun', 'b'],
['noi', 'pronoun', 'a'],
['noia', 'noun', 'b'],
['noioso', 'adjective', 'b'],
['noleggiare', 'verb', 'c'],
['nome', 'noun', 'a'],
['nomina', 'noun', 'b'],
['nominare', 'verb', 'a'],
['non', 'adverb', 'a'],
['nonch', 'conjunction', 'b'],
['nonna', 'noun', 'a'],
['nonno', 'noun', 'a'],
['nono', 'adjective', 'b'],
['nono', 'noun', 'b'],
['nonostante', 'preposition', 'a'],
['nonostante', 'conjunction', 'a'],
['nord', 'noun', 'a'],
['nord', 'adjective', 'a'],
['nordamericano', 'adjective', 'c'],
['nordamericano', 'noun', 'c'],
['norma', 'noun', 'a'],
['normale', 'adjective', 'a'],
['normale', 'noun', 'a'],
['normalit', 'noun', 'b'],
['normalmente', 'adverb', 'b'],
['normativa', 'noun', 'b'],
['norvegese', 'adjective', 'c'],
['norvegese', 'noun', 'c'],
['nostalgia', 'noun', 'b'],
['nostro', 'adjective', 'a'],
['nostro', 'pronoun', 'a'],
['nota', 'noun', 'a'],
['notaio', 'noun', 'b'],
['notare', 'verb', 'a'],
['notevole', 'adjective', 'b'],
['notizia', 'noun', 'a'],
['noto', 'adjective', 'a'],
['noto', 'noun', 'a'],
['notte', 'noun', 'a'],
['notturno', 'adjective', 'b'],
['notturno', 'noun', 'b'],
['novanta', 'adjective', 'b'],
['novanta', 'noun', 'b'],
['nove', 'adjective', 'a'],
['nove', 'noun', 'a'],
['novella', 'noun', 'c'],
['novembre', 'noun', 'a'],
['novit', 'noun', 'a'],
['nozione', 'noun', 'b'],
['nozze', 'noun', 'b'],
['nube', 'noun', 'b'],
['nucleare', 'adjective', 'a'],
['nucleare', 'noun', 'a'],
['nucleo', 'noun', 'b'],
['nudo', 'adjective', 'a'],
['nudo', 'noun', 'a'],
['nulla', 'pronoun', 'a'],
['nulla', 'adverb', 'a'],
['numerare', 'verb', 'b'],
['numerazione', 'noun', 'c'],
['numero', 'noun', 'a'],
['numeroso', 'adjective', 'a'],
['nuora', 'noun', 'c'],
['nuotare', 'verb', 'b'],
['nuoto', 'noun', 'b'],
['nuovamente', 'adverb', 'b'],
['nuovo', 'adjective', 'a'],
['nuovo', 'noun', 'a'],
['nutrire', 'verb', 'b'],
['nuvola', 'noun', 'b'],
['nuvoloso', 'adjective', 'c'],
['nylon', 'noun', 'c'],
['o', 'noun', 'c'],
['o', 'conjunction', 'a'],
['obbedire', 'verb', 'b'],
['obbiettivo', 'adjective', 'c'],
['obbiettivo', 'noun', 'c'],
['obbligare', 'verb', 'a'],
['obbligatorio', 'adjective', 'b'],
['obbligazione', 'noun', 'b'],
['obbligo', 'noun', 'b'],
['obiettivo', 'adjective', 'a'],
['obiettivo', 'noun', 'a'],
['obiezione', 'noun', 'b'],
['obl', 'noun', 'c'],
['occasione', 'noun', 'a'],
['occhiaia', 'noun', 'c'],
['occhiale', 'noun', 'a'],
['occhiale', 'adjective', 'a'],
['occhiata', 'noun', 'b'],
['occhiello', 'noun', 'c'],
['occhio', 'noun', 'a'],
['occidentale', 'adjective', 'a'],
['occidentale', 'noun', 'a'],
['occidente', 'noun', 'b'],
['occidente', 'adjective', 'b'],
['occorrere', 'verb', 'a'],
['occupare', 'verb', 'a'],
['occupato', 'past_part', 'c'],
['occupato', 'adjective', 'c'],
['occupato', 'noun', 'c'],
['occupazione', 'noun', 'b'],
['oceano', 'noun', 'b'],
['oculista', 'noun', 'c'],
['oddio', 'exclamation', 'b'],
['odiare', 'verb', 'a'],
['odio', 'noun', 'b'],
['odorare', 'verb', 'c'],
['odore', 'noun', 'a'],
['offendere', 'verb', 'b'],
['offerta', 'noun', 'a'],
['offesa', 'noun', 'b'],
['offeso', 'past_part', 'c'],
['offeso', 'adjective', 'c'],
['offeso', 'noun', 'c'],
['officina', 'noun', 'b'],
['offline', 'adjective', 'b'],
['offline', 'noun', 'b'],
['offrire', 'verb', 'a'],
['oggettivo', 'adjective', 'b'],
['oggetto', 'noun', 'a'],
['oggi', 'adverb', 'a'],
['oggi', 'noun', 'a'],
['ogni', 'adjective', 'a'],
['ognuno', 'pronoun', 'a'],
['ognuno', 'adjective', 'a'],
['ok', 'adverb', 'a'],
['ok', 'noun', 'a'],
['ok', 'adjective', 'a'],
['okay', 'adverb', 'a'],
['okay', 'noun', 'a'],
['okay', 'adjective', 'a'],
['olandese', 'adjective', 'b'],
['olandese', 'noun', 'b'],
['oliare', 'verb', 'c'],
['oliera', 'noun', 'c'],
['olimpico', 'adjective', 'b'],
['olio', 'noun', 'a'],
['oliva', 'noun', 'b'],
['oliva', 'adjective', 'b'],
['oltre', 'adverb', 'a'],
['oltre', 'preposition', 'a'],
['oltrepassare', 'verb', 'c'],
['oltretutto', 'adverb', 'b'],
['omaggio', 'noun', 'b'],
['ombelico', 'noun', 'c'],
['ombra', 'noun', 'a'],
['ombrellone', 'noun', 'c'],
['omicidio', 'noun', 'a'],
['omogeneizzato', 'past_part', 'c'],
['omogeneizzato', 'adjective', 'c'],
['omogeneizzato', 'noun', 'c'],
['omonimo', 'adjective', 'b'],
['omonimo', 'noun', 'b'],
['onda', 'noun', 'a'],
['ondata', 'noun', 'b'],
['ondeggiare', 'verb', 'c'],
['onere', 'noun', 'b'],
['onestamente', 'adverb', 'b'],
['onesto', 'adjective', 'b'],
['onesto', 'noun', 'b'],
['onesto', 'adverb', 'b'],
['online', 'adjective', 'b'],
['online', 'noun', 'b'],
['onorare', 'verb', 'b'],
['onore', 'noun', 'a'],
['opera', 'noun', 'a'],
['operaio', 'noun', 'a'],
['operaio', 'adjective', 'a'],
['operare', 'verb', 'a'],
['operativo', 'adjective', 'b'],
['operativo', 'noun', 'b'],
['operatore', 'adjective', 'b'],
['operatore', 'noun', 'b'],
['operazione', 'noun', 'a'],
['opinione', 'noun', 'a'],
['opporre', 'verb', 'a'],
['opportunit', 'noun', 'b'],
['opportuno', 'adjective', 'b'],
['opposizione', 'noun', 'b'],
['opposto', 'past_part', 'a'],
['opposto', 'adjective', 'a'],
['opposto', 'noun', 'a'],
['oppressivo', 'adjective', 'c'],
['oppresso', 'past_part', 'c'],
['oppresso', 'adjective', 'c'],
['oppresso', 'noun', 'c'],
['oppressore', 'adjective', 'c'],
['oppressore', 'noun', 'c'],
['oppure', 'conjunction', 'a'],
['opzione', 'noun', 'b'],
['ora', 'noun', 'a'],
['ora', 'adverb', 'a'],
['orale', 'adjective', 'b'],
['oramai', 'adverb', 'b'],
['orario', 'adjective', 'a'],
['orario', 'noun', 'a'],
['orbita', 'noun', 'b'],
['orchestra', 'noun', 'b'],
['orco', 'noun', 'b'],
['ordinamento', 'noun', 'b'],
['ordinanza', 'noun', 'b'],
['ordinare', 'verb', 'a'],
['ordinario', 'adjective', 'b'],
['ordinario', 'noun', 'b'],
['ordine', 'noun', 'a'],
['orecchino', 'noun', 'c'],
['orecchio', 'noun', 'a'],
['orefice', 'noun', 'c'],
['organico', 'adjective', 'b'],
['organico', 'noun', 'b'],
['organismo', 'noun', 'a'],
['organizzare', 'verb', 'a'],
['organizzato', 'past_part', 'b'],
['organizzato', 'adjective', 'b'],
['organizzato', 'noun', 'b'],
['organizzazione', 'noun', 'a'],
['organo', 'noun', 'a'],
['orgasmo', 'noun', 'b'],
['orgoglio', 'noun', 'b'],
['orgoglioso', 'adjective', 'b'],
['orientale', 'adjective', 'b'],
['orientale', 'noun', 'b'],
['orientamento', 'noun', 'b'],
['orientare', 'verb', 'b'],
['oriente', 'adjective', 'b'],
['oriente', 'noun', 'b'],
['origano', 'noun', 'c'],
['originale', 'adjective', 'a'],
['originale', 'noun', 'a'],
['originario', 'adjective', 'b'],
['origine', 'noun', 'a'],
['orizzontale', 'adjective', 'b'],
['orizzontale', 'noun', 'b'],
['orizzonte', 'noun', 'b'],
['orlo', 'noun', 'b'],
['orma', 'noun', 'c'],
['ormai', 'adverb', 'a'],
['ormone', 'noun', 'b'],
['oro', 'noun', 'a'],
['orologiaio', 'noun', 'c'],
['orologio', 'noun', 'a'],
['oroscopo', 'noun', 'b'],
['orribile', 'adjective', 'b'],
['orrore', 'noun', 'b'],
['orso', 'noun', 'b'],
['ortaggio', 'noun', 'c'],
['ortensia', 'noun', 'c'],
['ortica', 'noun', 'c'],
['orto', 'noun', 'b'],
['ortolano', 'noun', 'c'],
['ortolano', 'adjective', 'c'],
['orzo', 'noun', 'c'],
['osare', 'verb', 'b'],
['osceno', 'adjective', 'c'],
['oscillare', 'verb', 'b'],
['oscurare', 'verb', 'b'],
['oscuro', 'adjective', 'b'],
['oscuro', 'noun', 'b'],
['oscuro', 'adverb', 'b'],
['ospedale', 'noun', 'a'],
['ospitalit', 'noun', 'c'],
['ospitare', 'verb', 'a'],
['ospite', 'adjective', 'a'],
['ospite', 'noun', 'a'],
['ospizio', 'noun', 'c'],
['osservare', 'verb', 'a'],
['osservazione', 'noun', 'b'],
['ossessione', 'noun', 'b'],
['ossia', 'conjunction', 'b'],
['ossigeno', 'noun', 'b'],
['osso', 'noun', 'a'],
['ostacolare', 'verb', 'b'],
['ostacolo', 'noun', 'b'],
['ostaggio', 'noun', 'c'],
['oste', 'noun', 'c'],
['ostile', 'adjective', 'b'],
['ostinato', 'past_part', 'c'],
['ostinato', 'adjective', 'c'],
['ostrica', 'noun', 'c'],
['ottanta', 'adjective', 'b'],
['ottanta', 'noun', 'b'],
['ottavo', 'adjective', 'b'],
['ottavo', 'noun', 'b'],
['ottenere', 'verb', 'a'],
['ottica', 'noun', 'b'],
['ottimo', 'adjective', 'a'],
['ottimo', 'noun', 'a'],
['otto', 'adjective', 'a'],
['otto', 'noun', 'a'],
['ottobre', 'noun', 'a'],
['ottone', 'noun', 'c'],
['ovale', 'adjective', 'c'],
['ovale', 'noun', 'c'],
['ovatta', 'noun', 'c'],
['ove', 'adverb', 'b'],
['ove', 'conjunction', 'b'],
['ovest', 'noun', 'b'],
['ovest', 'adjective', 'b'],
['ovile', 'noun', 'c'],
['ovino', 'adjective', 'c'],
['ovino', 'noun', 'c'],
['ovunque', 'adverb', 'a'],
['ovunque', 'conjunction', 'a'],
['ovvero', 'conjunction', 'a'],
['ovviamente', 'adverb', 'a'],
['ovviare', 'verb', 'b'],
['ovvio', 'adjective', 'b'],
['ozono', 'noun', 'c'],
['pacchetto', 'noun', 'b'],
['pacco', 'noun', 'b'],
['pace', 'noun', 'a'],
['padella', 'noun', 'c'],
['padre', 'noun', 'a'],
['padrona', 'noun', 'b'],
['padronato', 'noun', 'c'],
['padrone', 'noun', 'a'],
['padroneggiare', 'verb', 'c'],
['paesaggio', 'noun', 'b'],
['paese', 'noun', 'a'],
['paga', 'noun', 'b'],
['pagamento', 'noun', 'a'],
['pagare', 'verb', 'a'],
['pagella', 'noun', 'c'],
['pagina', 'noun', 'a'],
['paglia', 'noun', 'b'],
['paglia', 'adjective', 'b'],
['pagliaio', 'noun', 'c'],
['pago', 'past_part', 'b'],
['pago', 'adjective', 'b'],
['paio', 'noun', 'a'],
['pala', 'noun', 'b'],
['palato', 'noun', 'c'],
['palazzina', 'noun', 'c'],
['palazzo', 'noun', 'a'],
['palco', 'noun', 'b'],
['palcoscenico', 'noun', 'b'],
['palermitano', 'adjective', 'c'],
['palermitano', 'noun', 'c'],
['palestinese', 'adjective', 'c'],
['palestinese', 'noun', 'c'],
['palestra', 'noun', 'b'],
['paletta', 'noun', 'c'],
['palla', 'noun', 'a'],
['pallacanestro', 'noun', 'c'],
['pallanuoto', 'noun', 'c'],
['pallavolo', 'noun', 'c'],
['pallido', 'adjective', 'b'],
['pallina', 'noun', 'b'],
['pallino', 'noun', 'c'],
['palloncino', 'noun', 'c'],
['pallone', 'noun', 'b'],
['pallottola', 'noun', 'c'],
['pallottoliere', 'noun', 'c'],
['palma', 'noun', 'c'],
['palo', 'noun', 'b'],
['palombaro', 'noun', 'c'],
['palpebra', 'noun', 'c'],
['palude', 'noun', 'c'],
['panca', 'noun', 'c'],
['pancarr', 'noun', 'c'],
['pancetta', 'noun', 'c'],
['panchina', 'noun', 'b'],
['pancia', 'noun', 'b'],
['panciotto', 'noun', 'c'],
['panda', 'noun', 'c'],
['pandoro', 'noun', 'c'],
['pane', 'noun', 'a'],
['panetteria', 'noun', 'c'],
['panettiere', 'noun', 'c'],
['panettone', 'noun', 'c'],
['panico', 'adjective', 'b'],
['panico', 'noun', 'b'],
['paniere', 'noun', 'c'],
['panino', 'noun', 'b'],
['panna', 'noun', 'b'],
['pannello', 'noun', 'b'],
['panno', 'noun', 'b'],
['pannocchia', 'noun', 'c'],
['pannolino', 'noun', 'c'],
['pannolone', 'noun', 'c'],
['panorama', 'noun', 'b'],
['pantalone', 'noun', 'a'],
['pantera', 'noun', 'c'],
['pantofola', 'noun', 'c'],
['panzerotto', 'noun', 'c'],
['papa', 'noun', 'a'],
['pap', 'noun', 'a'],
['papavero', 'noun', 'c'],
['papera', 'noun', 'c'],
['papero', 'noun', 'c'],
['pappa', 'noun', 'c'],
['pappagallo', 'noun', 'c'],
['parabola', 'noun', 'c'],
['parabrezza', 'noun', 'c'],
['paracadute', 'noun', 'c'],
['paracadutista', 'noun', 'c'],
['paradiso', 'noun', 'b'],
['paradosso', 'noun', 'b'],
['paradosso', 'adjective', 'b'],
['parafulmine', 'noun', 'c'],
['paragonare', 'verb', 'b'],
['paragone', 'noun', 'b'],
['paralisi', 'noun', 'c'],
['paralizzato', 'past_part', 'c'],
['paralizzato', 'adjective', 'c'],
['parallelepipedo', 'noun', 'c'],
['parallelo', 'adjective', 'b'],
['parallelo', 'noun', 'b'],
['paralume', 'noun', 'c'],
['parametro', 'noun', 'b'],
['paraocchi', 'noun', 'c'],
['parare', 'verb', 'b'],
['paraurti', 'noun', 'c'],
['paravento', 'noun', 'c'],
['parcheggiare', 'verb', 'b'],
['parcheggio', 'noun', 'b'],
['parco', 'noun', 'a'],
['parecchio', 'adjective', 'a'],
['parecchio', 'pronoun', 'a'],
['parecchio', 'adverb', 'a'],
['parecchio', 'adjective', 'a'],
['pareggiare', 'verb', 'c'],
['pareggio', 'noun', 'c'],
['parente', 'noun', 'a'],
['parentesi', 'noun', 'b'],
['parere', 'verb', 'a'],
['parere', 'noun', 'a'],
['parete', 'noun', 'a'],
['pari', 'adjective', 'a'],
['pari', 'adverb', 'a'],
['pari', 'noun', 'a'],
['parigino', 'adjective', 'c'],
['parigino', 'noun', 'c'],
['parit', 'noun', 'c'],
['parlamentare', 'adjective', 'b'],
['parlamentare', 'noun', 'b'],
['parlamento', 'noun', 'b'],
['parlare', 'verb', 'a'],
['parmigiano', 'adjective', 'c'],
['parmigiano', 'noun', 'c'],
['parola', 'noun', 'a'],
['parquet', 'noun', 'c'],
['parroco', 'noun', 'c'],
['parrucca', 'noun', 'c'],
['parrucchiere', 'noun', 'c'],
['parte', 'noun', 'a'],
['parte', 'adverb', 'a'],
['partecipante', 'pres_part', 'b'],
['partecipante', 'adjective', 'b'],
['partecipante', 'noun', 'b'],
['partecipare', 'verb', 'a'],
['partecipazione', 'noun', 'b'],
['parteggiare', 'verb', 'c'],
['partenza', 'noun', 'a'],
['particella', 'noun', 'b'],
['particolare', 'adjective', 'a'],
['particolare', 'noun', 'a'],
['particolarmente', 'adverb', 'a'],
['partigiano', 'noun', 'b'],
['partigiano', 'adjective', 'b'],
['partire', 'verb', 'a'],
['partita', 'noun', 'a'],
['partito', 'noun', 'a'],
['partner', 'noun', 'b'],
['parto', 'noun', 'b'],
['partorire', 'verb', 'b'],
['party', 'noun', 'b'],
['parziale', 'adjective', 'b'],
['parziale', 'noun', 'b'],
['parzialmente', 'adverb', 'b'],
['pascolare', 'verb', 'c'],
['pasqua', 'noun', 'c'],
['pasquale', 'adjective', 'b'],
['passaggio', 'noun', 'a'],
['passare', 'verb', 'a'],
['passata', 'noun', 'c'],
['passatempo', 'noun', 'c'],
['passato', 'past_part', 'a'],
['passato', 'adjective', 'a'],
['passato', 'noun', 'a'],
['passeggero', 'adjective', 'b'],
['passeggero', 'noun', 'b'],
['passeggiare', 'verb', 'b'],
['passeggiata', 'noun', 'b'],
['passeggio', 'noun', 'c'],
['passero', 'noun', 'c'],
['passione', 'noun', 'a'],
['passivo', 'adjective', 'b'],
['passivo', 'noun', 'b'],
['passo', 'noun', 'a'],
['pasta', 'noun', 'a'],
['pasticca', 'noun', 'c'],
['pasticcere', 'noun', 'c'],
['pasticceria', 'noun', 'c'],
['pasticcino', 'noun', 'c'],
['pasticcio', 'noun', 'c'],
['pastiglia', 'noun', 'c'],
['pastina', 'noun', 'c'],
['pasto', 'noun', 'b'],
['pastore', 'noun', 'b'],
['patata', 'noun', 'b'],
['patatina', 'noun', 'c'],
['pat', 'noun', 'c'],
['patente', 'noun', 'b'],
['patetico', 'adjective', 'b'],
['patetico', 'noun', 'b'],
['patologia', 'noun', 'b'],
['patria', 'noun', 'b'],
['patrimonio', 'noun', 'b'],
['pattinaggio', 'noun', 'c'],
['pattinare', 'verb', 'c'],
['pattino', 'noun', 'c'],
['patto', 'noun', 'b'],
['pattumiera', 'noun', 'c'],
['paura', 'noun', 'a'],
['pauroso', 'adjective', 'c'],
['pausa', 'noun', 'a'],
['pavimento', 'noun', 'b'],
['pavone', 'noun', 'c'],
['pavone', 'adjective', 'c'],
['paziente', 'adjective', 'a'],
['paziente', 'noun', 'a'],
['pazienza', 'noun', 'a'],
['pazza', 'noun', 'c'],
['pazzesco', 'adjective', 'b'],
['pazzo', 'adjective', 'a'],
['pazzo', 'noun', 'a'],
['peccato', 'noun', 'b'],
['peccato', 'exclamation', 'b'],
['peccatore', 'noun', 'c'],
['peccatore', 'adjective', 'c'],
['pechinese', 'adjective', 'c'],
['pechinese', 'noun', 'c'],
['pecora', 'noun', 'b'],
['pecorino', 'adjective', 'c'],
['pecorino', 'noun', 'c'],
['pedalare', 'verb', 'c'],
['pedale', 'noun', 'c'],
['pedale', 'adjective', 'c'],
['pedone', 'noun', 'c'],
['pedone', 'adjective', 'c'],
['peggio', 'adverb', 'a'],
['peggio', 'adjective', 'a'],
['peggio', 'noun', 'a'],
['peggioramento', 'noun', 'c'],
['peggiorare', 'verb', 'b'],
['peggiore', 'adjective', 'b'],
['peggiore', 'noun', 'b'],
['peggiore', 'adverb', 'b'],
['pelato', 'past_part', 'c'],
['pelato', 'adjective', 'c'],
['pelato', 'noun', 'c'],
['pelle', 'noun', 'a'],
['pellegrino', 'noun', 'c'],
['pellegrino', 'adjective', 'c'],
['pellerossa', 'adjective', 'c'],
['pellerossa', 'noun', 'c'],
['pelletteria', 'noun', 'c'],
['pellicola', 'noun', 'b'],
['pelo', 'noun', 'b'],
['peloso', 'adjective', 'c'],
['peloso', 'noun', 'c'],
['peluche', 'noun', 'c'],
['pena', 'noun', 'a'],
['penale', 'adjective', 'b'],
['penale', 'noun', 'b'],
['pendere', 'verb', 'b'],
['pendolo', 'noun', 'c'],
['pene', 'noun', 'b'],
['penetrare', 'verb', 'b'],
['penisola', 'noun', 'c'],
['penna', 'noun', 'b'],
['pennarello', 'noun', 'c'],
['pensare', 'verb', 'a'],
['pensiero', 'noun', 'a'],
['pensionato', 'past_part', 'c'],
['pensionato', 'adjective', 'c'],
['pensionato', 'noun', 'c'],
['pensione', 'noun', 'a'],
['pentagono', 'noun', 'c'],
['pentirsi', 'verb', 'b'],
['pentola', 'noun', 'b'],
['penultimo', 'adjective', 'c'],
['pepe', 'noun', 'c'],
['peperoncino', 'noun', 'c'],
['peperone', 'noun', 'c'],
['per', 'preposition', 'a'],
['pera', 'noun', 'c'],
['peraltro', 'adverb', 'b'],
['percentuale', 'adjective', 'b'],
['percentuale', 'noun', 'b'],
['percepire', 'verb', 'a'],
['percezione', 'noun', 'b'],
['perch', 'adverb', 'a'],
['perch', 'conjunction', 'a'],
['perch', 'noun', 'a'],
['perci', 'conjunction', 'a'],
['percorrere', 'verb', 'b'],
['percorso', 'past_part', 'a'],
['percorso', 'adjective', 'a'],
['percorso', 'noun', 'a'],
['perdere', 'verb', 'a'],
['perdita', 'noun', 'a'],
['perdonare', 'verb', 'a'],
['perdono', 'noun', 'b'],
['perduto', 'past_part', 'b'],
['perduto', 'adjective', 'b'],
['perfettamente', 'adverb', 'a'],
['perfetto', 'past_part', 'a'],
['perfetto', 'adjective', 'a'],
['perfetto', 'noun', 'a'],
['perfezione', 'noun', 'b'],
['perfino', 'adverb', 'a'],
['perfino', 'preposition', 'a'],
['pergola', 'noun', 'c'],
['pergolato', 'noun', 'c'],
['pergolato', 'adjective', 'c'],
['pericolo', 'noun', 'a'],
['pericoloso', 'adjective', 'a'],
['periferia', 'noun', 'b'],
['periodico', 'adjective', 'b'],
['periodico', 'noun', 'b'],
['periodo', 'noun', 'a'],
['perito', 'noun', 'b'],
['perito', 'adjective', 'b'],
['perla', 'noun', 'b'],
['perla', 'adjective', 'b'],
['permaloso', 'adjective', 'c'],
['permaloso', 'noun', 'c'],
['permanente', 'pres_part', 'b'],
['permanente', 'adjective', 'b'],
['permanente', 'noun', 'b'],
['permesso', 'past_part', 'b'],
['permesso', 'adjective', 'b'],
['permesso', 'noun', 'b'],
['permettere', 'verb', 'a'],
['pero', 'noun', 'c'],
['per', 'conjunction', 'a'],
['perpendicolare', 'adjective', 'c'],
['perpendicolare', 'noun', 'c'],
['perplesso', 'adjective', 'b'],
['perquisizione', 'noun', 'b'],
['perseguire', 'verb', 'b'],
['persiana', 'noun', 'c'],
['persiano', 'adjective', 'b'],
['persiano', 'noun', 'b'],
['persino', 'adverb', 'a'],
['perso', 'past_part', 'b'],
['perso', 'adjective', 'b'],
['persona', 'noun', 'a'],
['personaggio', 'noun', 'a'],
['personale', 'adjective', 'a'],
['personale', 'noun', 'a'],
['personale', 'noun', 'a'],
['personalit', 'noun', 'b'],
['personalmente', 'adverb', 'a'],
['pertanto', 'conjunction', 'b'],
['perugino', 'adjective', 'c'],
['perugino', 'noun', 'c'],
['peruviano', 'adjective', 'c'],
['peruviano', 'noun', 'c'],
['pervenire', 'verb', 'b'],
['pesante', 'pres_part', 'a'],
['pesante', 'adjective', 'a'],
['pesante', 'adverb', 'a'],
['pesare', 'verb', 'b'],
['pesca', 'noun', 'c'],
['pesca', 'adjective', 'c'],
['pesca', 'noun', 'b'],
['pescare', 'verb', 'b'],
['pescatore', 'noun', 'b'],
['pescatore', 'adjective', 'b'],
['pesce', 'noun', 'a'],
['peschereccio', 'noun', 'c'],
['peschereccio', 'adjective', 'c'],
['pescheria', 'noun', 'c'],
['pesco', 'noun', 'c'],
['peso', 'noun', 'a'],
['pessimo', 'adjective', 'b'],
['pestare', 'verb', 'c'],
['peste', 'noun', 'c'],
['pesto', 'past_part', 'c'],
['pesto', 'adjective', 'c'],
['pesto', 'noun', 'c'],
['petalo', 'noun', 'c'],
['petardo', 'noun', 'c'],
['petroliera', 'noun', 'c'],
['petrolio', 'noun', 'b'],
['pettegolezzo', 'noun', 'c'],
['pettegolo', 'adjective', 'c'],
['pettegolo', 'noun', 'c'],
['pettinare', 'verb', 'c'],
['pettinatura', 'noun', 'c'],
['pettine', 'noun', 'c'],
['pettirosso', 'noun', 'c'],
['petto', 'noun', 'a'],
['pezza', 'noun', 'c'],
['pezzetto', 'noun', 'b'],
['pezzo', 'noun', 'a'],
['pezzuola', 'noun', 'c'],
['pi', 'noun', 'c'],
['piacere', 'verb', 'a'],
['piacere', 'noun', 'a'],
['piacevole', 'adjective', 'b'],
['piadina', 'noun', 'c'],
['piaga', 'noun', 'c'],
['pialla', 'noun', 'c'],
['piallare', 'verb', 'c'],
['pianeggiante', 'pres_part', 'c'],
['pianeggiante', 'adjective', 'c'],
['pianerottolo', 'noun', 'b'],
['pianeta', 'noun', 'a'],
['piangere', 'verb', 'a'],
['piangere', 'noun', 'a'],
['piano', 'noun', 'a'],
['piano', 'noun', 'a'],
['piano', 'adjective', 'a'],
['piano', 'adverb', 'a'],
['pianoforte', 'noun', 'b'],
['pianoterra', 'noun', 'c'],
['pianta', 'noun', 'a'],
['piantare', 'verb', 'b'],
['pianto', 'noun', 'b'],
['pianura', 'noun', 'b'],
['piastra', 'noun', 'c'],
['piattaforma', 'noun', 'b'],
['piatto', 'adjective', 'a'],
['piatto', 'noun', 'a'],
['piazza', 'noun', 'a'],
['piazzale', 'noun', 'b'],
['piazzare', 'verb', 'b'],
['piccante', 'adjective', 'c'],
['picchiare', 'verb', 'b'],
['piccino', 'adjective', 'c'],
['piccino', 'noun', 'c'],
['piccione', 'noun', 'c'],
['picco', 'noun', 'b'],
['piccolo', 'adjective', 'a'],
['piccolo', 'noun', 'a'],
['piccone', 'noun', 'c'],
['picnic', 'noun', 'c'],
['pidocchio', 'noun', 'c'],
['piede', 'noun', 'a'],
['piega', 'noun', 'b'],
['piegare', 'verb', 'b'],
['pieghevole', 'adjective', 'c'],
['pieghevole', 'noun', 'c'],
['piemontese', 'adjective', 'b'],
['piemontese', 'noun', 'b'],
['piena', 'noun', 'c'],
['pienamente', 'adverb', 'b'],
['pieno', 'adjective', 'a'],
['pieno', 'noun', 'a'],
['piet', 'noun', 'b'],
['pietra', 'noun', 'a'],
['pigiama', 'noun', 'c'],
['pigione', 'noun', 'c'],
['pigliare', 'verb', 'b'],
['pigna', 'noun', 'c'],
['pigrizia', 'noun', 'c'],
['pigro', 'adjective', 'c'],
['pigro', 'noun', 'c'],
['pila', 'noun', 'b'],
['pillola', 'noun', 'b'],
['pilota', 'noun', 'b'],
['pineta', 'noun', 'c'],
['ping-pong', 'noun', 'c'],
['pinguino', 'noun', 'c'],
['pinna', 'noun', 'c'],
['pinolo', 'noun', 'c'],
['pinza', 'noun', 'c'],
['pinzetta', 'noun', 'c'],
['pioggia', 'noun', 'a'],
['piombo', 'noun', 'b'],
['piombo', 'adjective', 'b'],
['piombo', 'noun', 'b'],
['pioppo', 'noun', 'c'],
['piovere', 'verb', 'b'],
['piovoso', 'adjective', 'c'],
['piovoso', 'noun', 'c'],
['pip', 'noun', 'c'],
['pipistrello', 'noun', 'c'],
['pirata', 'noun', 'b'],
['piscina', 'noun', 'b'],
['pisello', 'noun', 'c'],
['pisello', 'adjective', 'c'],
['pisolino', 'noun', 'c'],
['pista', 'noun', 'b'],
['pistacchio', 'noun', 'c'],
['pistacchio', 'adjective', 'c'],
['pistola', 'noun', 'a'],
['pittare', 'verb', 'c'],
['pittore', 'noun', 'b'],
['pittore', 'adjective', 'b'],
['pittura', 'noun', 'b'],
['pitturare', 'verb', 'c'],
['pi', 'adverb', 'a'],
['pi', 'adjective', 'a'],
['pi', 'preposition', 'a'],
['pi', 'noun', 'a'],
['piuma', 'noun', 'c'],
['piumino', 'noun', 'c'],
['piuttosto', 'adverb', 'a'],
['pizza', 'noun', 'b'],
['pizzeria', 'noun', 'c'],
['pizzetta', 'noun', 'c'],
['pizzicare', 'verb', 'c'],
['pizzo', 'noun', 'c'],
['plaid', 'noun', 'c'],
['plastica', 'noun', 'b'],
['plastico', 'adjective', 'b'],
['plastico', 'noun', 'b'],
['platano', 'noun', 'c'],
['platino', 'noun', 'c'],
['platino', 'adjective', 'c'],
['plurale', 'noun', 'c'],
['plurale', 'adjective', 'c'],
['pneumatico', 'noun', 'c'],
['pochino', 'noun', 'b'],
['poco', 'adjective', 'a'],
['poco', 'pronoun', 'a'],
['poco', 'adverb', 'a'],
['podere', 'noun', 'c'],
['poema', 'noun', 'b'],
['poesia', 'noun', 'a'],
['poeta', 'noun', 'a'],
['poetico', 'adjective', 'b'],
['poetico', 'noun', 'b'],
['poggiapiedi', 'noun', 'c'],
['poggiare', 'verb', 'c'],
['poi', 'adverb', 'a'],
['poich', 'conjunction', 'a'],
['poker', 'noun', 'b'],
['polacco', 'adjective', 'b'],
['polacco', 'noun', 'b'],
['polemica', 'noun', 'b'],
['polenta', 'noun', 'c'],
['polipo', 'noun', 'c'],
['politica', 'noun', 'a'],
['politico', 'adjective', 'a'],
['politico', 'noun', 'a'],
['polizia', 'noun', 'a'],
['poliziotto', 'noun', 'a'],
['pollaio', 'noun', 'c'],
['pollame', 'noun', 'c'],
['pollice', 'noun', 'b'],
['pollo', 'noun', 'c'],
['polmone', 'noun', 'b'],
['polo', 'noun', 'b'],
['polpa', 'noun', 'c'],
['polpastrello', 'noun', 'c'],
['polpetta', 'noun', 'c'],
['polpo', 'noun', 'c'],
['polsino', 'noun', 'c'],
['polso', 'noun', 'b'],
['poltrona', 'noun', 'b'],
['polvere', 'noun', 'a'],
['polverina', 'noun', 'c'],
['polveroso', 'adjective', 'c'],
['pomata', 'noun', 'c'],
['pomello', 'noun', 'c'],
['pomeriggio', 'noun', 'a'],
['pomodoro', 'noun', 'b'],
['pompa', 'noun', 'b'],
['pompelmo', 'noun', 'c'],
['pompiere', 'noun', 'c'],
['ponte', 'noun', 'a'],
['pony', 'noun', 'c'],
['pop', 'adjective', 'b'],
['pop', 'noun', 'b'],
['popolare', 'adjective', 'a'],
['popolare', 'noun', 'a'],
['popolare', 'verb', 'b'],
['popolarit', 'noun', 'c'],
['popolazione', 'noun', 'a'],
['popolo', 'noun', 'a'],
['porcellana', 'noun', 'c'],
['porcheria', 'noun', 'c'],
['porco', 'noun', 'b'],
['porco', 'adjective', 'b'],
['porgere', 'verb', 'b'],
['porno', 'adjective', 'b'],
['porno', 'noun', 'b'],
['porre', 'verb', 'a'],
['porta', 'noun', 'a'],
['portabagagli', 'noun', 'c'],
['portabagagli', 'adjective', 'c'],
['portacenere', 'noun', 'c'],
['portachiavi', 'noun', 'c'],
['portacipria', 'noun', 'c'],
['portaerei', 'noun', 'c'],
['portafinestra', 'noun', 'c'],
['portafoglio', 'noun', 'b'],
['portafortuna', 'noun', 'c'],
['portale', 'noun', 'b'],
['portamonete', 'noun', 'c'],
['portaombrelli', 'noun', 'c'],
['portare', 'verb', 'a'],
['portata', 'noun', 'b'],
['portatore', 'adjective', 'b'],
['portatore', 'noun', 'b'],
['portiere', 'noun', 'b'],
['portineria', 'noun', 'c'],
['porto', 'noun', 'a'],
['portoghese', 'adjective', 'b'],
['portoghese', 'noun', 'b'],
['portone', 'noun', 'b'],
['porzione', 'noun', 'b'],
['posa', 'noun', 'b'],
['posacenere', 'noun', 'c'],
['posare', 'verb', 'b'],
['posata', 'noun', 'c'],
['positivo', 'adjective', 'a'],
['positivo', 'noun', 'a'],
['positivo', 'adverb', 'a'],
['posizionare', 'verb', 'b'],
['posizione', 'noun', 'a'],
['possedere', 'verb', 'a'],
['possesso', 'noun', 'b'],
['possibile', 'adjective', 'a'],
['possibile', 'noun', 'a'],
['possibilit', 'noun', 'a'],
['post', 'noun', 'b'],
['posta', 'noun', 'a'],
['postale', 'adjective', 'b'],
['postare', 'verb', 'b'],
['posteggiatore', 'noun', 'c'],
['posteriore', 'adjective', 'b'],
['posteriore', 'noun', 'b'],
['postino', 'noun', 'c'],
['postino', 'adjective', 'c'],
['posto', 'noun', 'a'],
['potare', 'verb', 'c'],
['potente', 'pres_part', 'a'],
['potente', 'adjective', 'a'],
['potente', 'noun', 'a'],
['potentino', 'adjective', 'c'],
['potentino', 'noun', 'c'],
['potenza', 'noun', 'b'],
['potenziale', 'adjective', 'b'],
['potenziale', 'noun', 'b'],
['potere', 'verb', 'a'],
['potere', 'noun', 'a'],
['povero', 'adjective', 'a'],
['povert', 'noun', 'b'],
['pozzanghera', 'noun', 'c'],
['pozzo', 'noun', 'b'],
['praghese', 'adjective', 'c'],
['praghese', 'noun', 'c'],
['pranzo', 'noun', 'a'],
['prassi', 'noun', 'b'],
['pratica', 'noun', 'a'],
['praticamente', 'adverb', 'a'],
['praticare', 'verb', 'b'],
['pratico', 'adjective', 'a'],
['prato', 'noun', 'b'],
['precario', 'adjective', 'b'],
['precedente', 'pres_part', 'a'],
['precedente', 'adjective', 'a'],
['precedente', 'noun', 'a'],
['precedentemente', 'adverb', 'b'],
['precedenza', 'noun', 'b'],
['precedere', 'verb', 'b'],
['precipitare', 'verb', 'b'],
['precisamente', 'adverb', 'b'],
['precisare', 'verb', 'a'],
['precisione', 'noun', 'b'],
['preciso', 'adjective', 'a'],
['preciso', 'adverb', 'a'],
['preda', 'noun', 'b'],
['predisporre', 'verb', 'b'],
['preferenza', 'noun', 'b'],
['preferire', 'verb', 'a'],
['preferito', 'past_part', 'b'],
['preferito', 'adjective', 'b'],
['preferito', 'noun', 'b'],
['pregare', 'verb', 'a'],
['preghiera', 'noun', 'b'],
['pregiato', 'past_part', 'c'],
['pregiato', 'adjective', 'c'],
['pregio', 'noun', 'b'],
['pregiudizio', 'noun', 'b'],
['prego', 'exclamation', 'a'],
['prelevare', 'verb', 'b'],
['preliminare', 'adjective', 'b'],
['preliminare', 'noun', 'b'],
['prmaman', 'adjective', 'c'],
['premere', 'verb', 'b'],
['premessa', 'noun', 'b'],
['premiare', 'verb', 'b'],
['premier', 'noun', 'b'],
['premio', 'noun', 'a'],
['premio', 'adjective', 'a'],
['prendere', 'verb', 'a'],
['prenotare', 'verb', 'b'],
['prenotazione', 'noun', 'c'],
['preoccupare', 'verb', 'a'],
['preoccupato', 'past_part', 'b'],
['preoccupato', 'adjective', 'b'],
['preoccupazione', 'noun', 'b'],
['preparare', 'verb', 'a'],
['preparazione', 'noun', 'b'],
['prepotente', 'adjective', 'c'],
['prepotente', 'noun', 'c'],
['presa', 'noun', 'a'],
['prescindere', 'verb', 'b'],
['prescrivere', 'verb', 'b'],
['prescrizione', 'noun', 'b'],
['presentare', 'verb', 'a'],
['presentazione', 'noun', 'b'],
['presente', 'adjective', 'a'],
['presente', 'noun', 'a'],
['presente', 'adverb', 'a'],
['presenza', 'noun', 'a'],
['presepe', 'noun', 'b'],
['preside', 'noun', 'c'],
['presidente', 'noun', 'a'],
['presidente', 'adjective', 'a'],
['presidenza', 'noun', 'b'],
['pressione', 'noun', 'a'],
['presso', 'adverb', 'a'],
['presso', 'preposition', 'a'],
['presso', 'noun', 'a'],
['presso', 'adjective', 'a'],
['prestare', 'verb', 'a'],
['prestazione', 'noun', 'b'],
['prestigio', 'noun', 'b'],
['prestigioso', 'adjective', 'b'],
['prestito', 'noun', 'b'],
['presto', 'adverb', 'a'],
['presto', 'exclamation', 'a'],
['presto', 'adjective', 'a'],
['presumere', 'verb', 'b'],
['presunto', 'past_part', 'b'],
['presunto', 'adjective', 'b'],
['presupposto', 'past_part', 'b'],
['presupposto', 'adjective', 'b'],
['presupposto', 'noun', 'b'],
['prete', 'noun', 'a'],
['pretendere', 'verb', 'a'],
['pretesa', 'noun', 'b'],
['pretesto', 'noun', 'b'],
['prevalentemente', 'adverb', 'b'],
['prevalere', 'verb', 'b'],
['prevedere', 'verb', 'a'],
['prevedibile', 'adjective', 'b'],
['prevenire', 'verb', 'b'],
['preventivo', 'adjective', 'b'],
['preventivo', 'noun', 'b'],
['prevenzione', 'noun', 'b'],
['previdenza', 'noun', 'c'],
['previsione', 'noun', 'b'],
['previsto', 'past_part', 'a'],
['previsto', 'adjective', 'a'],
['previsto', 'noun', 'a'],
['prezioso', 'adjective', 'a'],
['prezioso', 'noun', 'a'],
['prezzemolo', 'noun', 'c'],
['prezzo', 'noun', 'a'],
['prigione', 'noun', 'b'],
['prigioniero', 'adjective', 'b'],
['prigioniero', 'noun', 'b'],
['prima', 'adverb', 'a'],
['prima', 'adjective', 'a'],
['prima', 'noun', 'a'],
['prima', 'noun', 'a'],
['primario', 'adjective', 'b'],
['primario', 'noun', 'b'],
['primavera', 'noun', 'a'],
['primizia', 'noun', 'c'],
['primo', 'adjective', 'a'],
['primo', 'noun', 'a'],
['primo', 'adverb', 'a'],
['primula', 'noun', 'c'],
['principale', 'adjective', 'a'],
['principale', 'noun', 'a'],
['principalmente', 'adverb', 'b'],
['principe', 'noun', 'a'],
['principe', 'adjective', 'a'],
['principessa', 'noun', 'b'],
['principio', 'noun', 'a'],
['priorit', 'noun', 'b'],
['privacy', 'noun', 'b'],
['privare', 'verb', 'b'],
['privato', 'adjective', 'a'],
['privato', 'noun', 'a'],
['privilegio', 'noun', 'b'],
['privo', 'adjective', 'b'],
['privo', 'preposition', 'b'],
['privo', 'noun', 'b'],
['probabile', 'adjective', 'b'],
['probabilit', 'noun', 'b'],
['probabilmente', 'adverb', 'a'],
['problema', 'noun', 'a'],
['problematico', 'adjective', 'b'],
['procedere', 'verb', 'a'],
['procedimento', 'noun', 'b'],
['procedura', 'noun', 'a'],
['processo', 'noun', 'a'],
['proclamare', 'verb', 'b'],
['procura', 'noun', 'b'],
['procurare', 'verb', 'b'],
['procuratore', 'noun', 'b'],
['prodotto', 'past_part', 'a'],
['prodotto', 'adjective', 'a'],
['prodotto', 'noun', 'a'],
['produrre', 'verb', 'a'],
['produttivo', 'adjective', 'b'],
['produttore', 'adjective', 'b'],
['produttore', 'noun', 'b'],
['produzione', 'noun', 'a'],
['prof', 'noun', 'b'],
['professionale', 'adjective', 'a'],
['professione', 'noun', 'b'],
['professionista', 'noun', 'b'],
['professore', 'noun', 'a'],
['professoressa', 'noun', 'b'],
['profeta', 'noun', 'b'],
['profilattico', 'adjective', 'c'],
['profilattico', 'noun', 'c'],
['profilo', 'noun', 'a'],
['profitto', 'noun', 'b'],
['profondamente', 'adverb', 'b'],
['profondit', 'noun', 'b'],
['profondo', 'adjective', 'a'],
['profondo', 'noun', 'a'],
['profondo', 'adverb', 'a'],
['profumare', 'verb', 'b'],
['profumato', 'past_part', 'c'],
['profumato', 'adjective', 'c'],
['profumo', 'noun', 'b'],
['progettare', 'verb', 'b'],
['progettazione', 'noun', 'b'],
['progetto', 'noun', 'a'],
['programma', 'noun', 'a'],
['programmare', 'verb', 'b'],
['programmazione', 'noun', 'b'],
['progressista', 'adjective', 'c'],
['progressista', 'noun', 'c'],
['progressivo', 'adjective', 'b'],
['progresso', 'noun', 'b'],
['proibire', 'verb', 'b'],
['proiettare', 'verb', 'b'],
['proiettile', 'noun', 'b'],
['proiezione', 'noun', 'b'],
['prolunga', 'noun', 'c'],
['promessa', 'noun', 'b'],
['promettere', 'verb', 'a'],
['promozione', 'noun', 'b'],
['promuovere', 'verb', 'b'],
['pronto', 'adjective', 'a'],
['pronuncia', 'noun', 'c'],
['pronunciare', 'verb', 'a'],
['propaganda', 'noun', 'b'],
['propagandare', 'verb', 'c'],
['proporre', 'verb', 'a'],
['proporzione', 'noun', 'b'],
['proposito', 'noun', 'a'],
['proposizione', 'noun', 'c'],
['proposta', 'noun', 'a'],
['propriet', 'noun', 'a'],
['proprietario', 'adjective', 'a'],
['proprietario', 'noun', 'a'],
['proprio', 'adjective', 'a'],
['proprio', 'adverb', 'a'],
['proprio', 'noun', 'a'],
['prosa', 'noun', 'b'],
['prosciugare', 'verb', 'c'],
['prosciutto', 'noun', 'b'],
['prosecco', 'noun', 'c'],
['proseguire', 'verb', 'a'],
['prospettiva', 'noun', 'b'],
['prossimo', 'adjective', 'a'],
['prossimo', 'noun', 'a'],
['prostituta', 'noun', 'b'],
['protagonista', 'adjective', 'a'],
['protagonista', 'noun', 'a'],
['proteggere', 'verb', 'a'],
['proteina', 'noun', 'b'],
['protesta', 'noun', 'b'],
['protestare', 'verb', 'b'],
['protetto', 'past_part', 'b'],
['protetto', 'adjective', 'b'],
['protetto', 'noun', 'b'],
['protezione', 'noun', 'b'],
['protocollo', 'noun', 'b'],
['prova', 'noun', 'a'],
['provare', 'verb', 'a'],
['provenienza', 'noun', 'b'],
['provenire', 'verb', 'a'],
['provincia', 'noun', 'a'],
['provinciale', 'adjective', 'b'],
['provinciale', 'noun', 'b'],
['provocare', 'verb', 'a'],
['provola', 'noun', 'c'],
['provolone', 'noun', 'c'],
['provvedere', 'verb', 'b'],
['provvedimento', 'noun', 'b'],
['provvisorio', 'adjective', 'b'],
['prudere', 'verb', 'c'],
['prugna', 'noun', 'c'],
['prugna', 'adjective', 'c'],
['prurito', 'noun', 'c'],
['pseudonimo', 'noun', 'b'],
['pseudonimo', 'adjective', 'b'],
['psichiatra', 'noun', 'b'],
['psichiatria', 'noun', 'c'],
['psichico', 'adjective', 'b'],
['psicologia', 'noun', 'b'],
['psicologico', 'adjective', 'b'],
['psicologo', 'noun', 'b'],
['pub', 'noun', 'b'],
['pubblicare', 'verb', 'a'],
['pubblicazione', 'noun', 'b'],
['pubblicit', 'noun', 'a'],
['pubblicitario', 'adjective', 'b'],
['pubblicitario', 'noun', 'b'],
['pubblico', 'adjective', 'a'],
['pubblico', 'noun', 'a'],
['pugilato', 'noun', 'c'],
['pugliese', 'adjective', 'c'],
['pugliese', 'noun', 'c'],
['pugno', 'noun', 'a'],
['pulce', 'noun', 'c'],
['pulce', 'adjective', 'c'],
['pulcino', 'noun', 'c'],
['puledro', 'noun', 'c'],
['pulire', 'verb', 'a'],
['pulito', 'past_part', 'b'],
['pulito', 'adjective', 'b'],
['pulito', 'noun', 'b'],
['pulizia', 'noun', 'b'],
['pullman', 'noun', 'b'],
['pullover', 'noun', 'c'],
['pulmino', 'noun', 'c'],
['pulsante', 'pres_part', 'b'],
['pulsante', 'adjective', 'b'],
['pulsante', 'noun', 'b'],
['puma', 'noun', 'c'],
['pungere', 'verb', 'c'],
['punire', 'verb', 'b'],
['punizione', 'noun', 'b'],
['punk', 'adjective', 'c'],
['punk', 'noun', 'c'],
['punta', 'noun', 'a'],
['puntare', 'verb', 'a'],
['puntata', 'noun', 'b'],
['puntato', 'past_part', 'b'],
['puntato', 'adjective', 'b'],
['punteggio', 'noun', 'c'],
['puntiglio', 'noun', 'c'],
['puntino', 'noun', 'b'],
['punto', 'noun', 'a'],
['puntuale', 'adjective', 'b'],
['puntura', 'noun', 'c'],
['pupa', 'noun', 'b'],
['pupazzo', 'noun', 'c'],
['pupo', 'noun', 'c'],
['purch', 'conjunction', 'b'],
['pure', 'adverb', 'a'],
['pure', 'conjunction', 'a'],
['pur', 'noun', 'c'],
['purga', 'noun', 'c'],
['puro', 'adjective', 'a'],
['puro', 'noun', 'a'],
['purtroppo', 'adverb', 'a'],
['puttana', 'noun', 'b'],
['puzza', 'noun', 'b'],
['puzzare', 'verb', 'b'],
['puzzle', 'noun', 'c'],
['qua', 'adverb', 'a'],
['quaderno', 'noun', 'b'],
['quadrato', 'past_part', 'b'],
['quadrato', 'adjective', 'b'],
['quadrato', 'noun', 'b'],
['quadrifoglio', 'noun', 'c'],
['quadro', 'adjective', 'a'],
['quadro', 'noun', 'a'],
['quaglia', 'noun', 'c'],
['qualche', 'adjective', 'a'],
['qualche', 'adverb', 'a'],
['qualcosa', 'pronoun', 'a'],
['qualcuno', 'pronoun', 'a'],
['qualcuno', 'adjective', 'a'],
['qualcuno', 'noun', 'a'],
['quale', 'adjective', 'a'],
['quale', 'pronoun', 'a'],
['quale', 'adverb', 'a'],
['quale', 'noun', 'a'],
['qualificare', 'verb', 'b'],
['qualit', 'noun', 'a'],
['qualora', 'conjunction', 'b'],
['qualsiasi', 'adjective', 'a'],
['qualunque', 'adjective', 'a'],
['qualunque', 'pronoun', 'a'],
['quando', 'conjunction', 'a'],
['quando', 'adverb', 'a'],
['quando', 'noun', 'a'],
['quantit', 'noun', 'a'],
['quantitativo', 'adjective', 'b'],
['quantitativo', 'noun', 'b'],
['quanto', 'adjective', 'a'],
['quanto', 'pronoun', 'a'],
['quanto', 'adverb', 'a'],
['quanto', 'noun', 'a'],
['quaranta', 'adjective', 'a'],
['quaranta', 'noun', 'a'],
['quarta', 'noun', 'b'],
['quartiere', 'noun', 'a'],
['quarto', 'adjective', 'a'],
['quarto', 'noun', 'a'],
['quasi', 'adverb', 'a'],
['quasi', 'conjunction', 'a'],
['quattordici', 'adjective', 'b'],
['quattordici', 'noun', 'b'],
['quattro', 'adjective', 'a'],
['quattro', 'noun', 'a'],
['quello', 'adjective', 'a'],
['quello', 'pronoun', 'a'],
['quercia', 'noun', 'c'],
['questione', 'noun', 'a'],
['questo', 'adjective', 'a'],
['questo', 'pronoun', 'a'],
['questura', 'noun', 'b'],
['qui', 'adverb', 'a'],
['quindi', 'adverb', 'a'],
['quindi', 'conjunction', 'a'],
['quindici', 'adjective', 'a'],
['quindici', 'noun', 'a'],
['quinta', 'noun', 'b'],
['quinto', 'adjective', 'b'],
['quinto', 'noun', 'b'],
['quiz', 'noun', 'a'],
['quota', 'noun', 'a'],
['quotidiano', 'adjective', 'a'],
['quotidiano', 'noun', 'a'],
['rabbia', 'noun', 'a'],
['racchetta', 'noun', 'c'],
['racchiudere', 'verb', 'b'],
['raccogliere', 'verb', 'a'],
['raccolta', 'noun', 'a'],
['raccomandare', 'verb', 'b'],
['raccomandazione', 'noun', 'c'],
['raccontare', 'verb', 'a'],
['racconto', 'noun', 'a'],
['raddoppiare', 'verb', 'b'],
['raddrizzare', 'verb', 'c'],
['radere', 'verb', 'c'],
['radiazione', 'noun', 'b'],
['radicale', 'adjective', 'b'],
['radicale', 'noun', 'b'],
['radicchio', 'noun', 'c'],
['radice', 'noun', 'a'],
['radio', 'noun', 'a'],
['radio', 'adjective', 'a'],
['rado', 'adjective', 'b'],
['rado', 'adverb', 'b'],
['raffigurare', 'verb', 'b'],
['raffinato', 'past_part', 'b'],
['raffinato', 'adjective', 'b'],
['raffinato', 'noun', 'b'],
['rafforzamento', 'noun', 'c'],
['rafforzare', 'verb', 'b'],
['raffreddore', 'noun', 'c'],
['ragazza', 'noun', 'a'],
['ragazzino', 'noun', 'a'],
['ragazzo', 'noun', 'a'],
['raggio', 'noun', 'a'],
['raggiungere', 'verb', 'a'],
['ragionamento', 'noun', 'b'],
['ragionare', 'verb', 'b'],
['ragione', 'noun', 'a'],
['ragionevole', 'adjective', 'b'],
['ragioniere', 'noun', 'b'],
['ragnatela', 'noun', 'c'],
['ragno', 'noun', 'c'],
['rag', 'noun', 'c'],
['rallegrare', 'verb', 'c'],
['rallentare', 'verb', 'b'],
['rame', 'noun', 'b'],
['rammendo', 'noun', 'c'],
['ramo', 'noun', 'b'],
['rampicante', 'pres_part', 'c'],
['rampicante', 'adjective', 'c'],
['rampicante', 'noun', 'c'],
['rana', 'noun', 'c'],
['rancio', 'noun', 'c'],
['rapa', 'noun', 'c'],
['rapidamente', 'adverb', 'b'],
['rapido', 'adjective', 'a'],
['rapido', 'noun', 'a'],
['rapimento', 'noun', 'c'],
['rapina', 'noun', 'b'],
['rapinatore', 'adjective', 'c'],
['rapinatore', 'noun', 'c'],
['rapire', 'verb', 'b'],
['rapporto', 'noun', 'a'],
['rappresentante', 'pres_part', 'b'],
['rappresentante', 'adjective', 'b'],
['rappresentante', 'noun', 'b'],
['rappresentanza', 'noun', 'b'],
['rappresentare', 'verb', 'a'],
['rappresentazione', 'noun', 'b'],
['raramente', 'adverb', 'b'],
['raro', 'adjective', 'a'],
['raro', 'noun', 'a'],
['raro', 'adverb', 'a'],
['rasare', 'verb', 'c'],
['rasoio', 'noun', 'c'],
['rassegna', 'noun', 'b'],
['rassegnare', 'verb', 'b'],
['rassegnazione', 'noun', 'c'],
['rasserenare', 'verb', 'c'],
['rassicurare', 'verb', 'b'],
['rastrello', 'noun', 'c'],
['rata', 'noun', 'c'],
['rateale', 'adjective', 'c'],
['rattristare', 'verb', 'c'],
['rauco', 'adjective', 'c'],
['ravanello', 'noun', 'c'],
['razionale', 'adjective', 'b'],
['razionale', 'noun', 'b'],
['razza', 'noun', 'b'],
['razzo', 'noun', 'c'],
['re', 'noun', 'a'],
['reagire', 'verb', 'a'],
['reale', 'adjective', 'a'],
['reale', 'noun', 'a'],
['realistico', 'adjective', 'b'],
['realizzare', 'verb', 'a'],
['realizzazione', 'noun', 'b'],
['realmente', 'adverb', 'b'],
['realt', 'noun', 'a'],
['reato', 'noun', 'a'],
['reazione', 'noun', 'a'],
['recare', 'verb', 'a'],
['recensione', 'noun', 'b'],
['recente', 'adjective', 'a'],
['recentemente', 'adverb', 'b'],
['recintare', 'verb', 'c'],
['recinto', 'past_part', 'c'],
['recinto', 'adjective', 'c'],
['recinto', 'noun', 'c'],
['recipiente', 'adjective', 'c'],
['recipiente', 'noun', 'c'],
['reciproco', 'adjective', 'b'],
['reciproco', 'noun', 'b'],
['recita', 'noun', 'c'],
['recitare', 'verb', 'a'],
['reclame', 'noun', 'c'],
['reclame', 'adjective', 'c'],
['reclamo', 'noun', 'c'],
['recluta', 'noun', 'c'],
['record', 'noun', 'b'],
['recuperare', 'verb', 'a'],
['recupero', 'noun', 'b'],
['redazione', 'noun', 'b'],
['reddito', 'noun', 'b'],
['redigere', 'verb', 'b'],
['referendum', 'noun', 'b'],
['regalare', 'verb', 'a'],
['regale', 'adjective', 'b'],
['regalo', 'noun', 'a'],
['reggere', 'verb', 'a'],
['reggimento', 'noun', 'c'],
['reggiseno', 'noun', 'b'],
['regia', 'noun', 'b'],
['regime', 'noun', 'a'],
['regina', 'noun', 'a'],
['regionale', 'adjective', 'b'],
['regionale', 'noun', 'b'],
['regione', 'noun', 'a'],
['regista', 'noun', 'a'],
['registrare', 'verb', 'a'],
['registratore', 'adjective', 'c'],
['registratore', 'noun', 'c'],
['registrazione', 'noun', 'a'],
['registro', 'noun', 'b'],
['regnare', 'verb', 'b'],
['regno', 'noun', 'a'],
['regola', 'noun', 'a'],
['regolamento', 'noun', 'b'],
['regolare', 'adjective', 'b'],
['regolare', 'noun', 'b'],
['regolare', 'verb', 'b'],
['regolarmente', 'adverb', 'b'],
['relativamente', 'adverb', 'b'],
['relativo', 'adjective', 'a'],
['relazione', 'noun', 'a'],
['religione', 'noun', 'a'],
['religioso', 'adjective', 'a'],
['religioso', 'noun', 'a'],
['remare', 'verb', 'c'],
['remo', 'noun', 'c'],
['remoto', 'adjective', 'b'],
['rendere', 'verb', 'a'],
['rene', 'noun', 'b'],
['reparto', 'noun', 'b'],
['repertorio', 'noun', 'b'],
['replica', 'noun', 'b'],
['replicare', 'verb', 'b'],
['repressione', 'noun', 'c'],
['reprimere', 'verb', 'c'],
['repubblica', 'noun', 'a'],
['repubblicano', 'adjective', 'b'],
['repubblicano', 'noun', 'b'],
['requisito', 'noun', 'b'],
['resa', 'noun', 'b'],
['residente', 'adjective', 'b'],
['residente', 'noun', 'b'],
['residenza', 'noun', 'b'],
['residuo', 'adjective', 'b'],
['residuo', 'noun', 'b'],
['resistente', 'pres_part', 'b'],
['resistente', 'adjective', 'b'],
['resistente', 'noun', 'b'],
['resistenza', 'noun', 'b'],
['resistere', 'verb', 'a'],
['resoconto', 'noun', 'c'],
['respingere', 'verb', 'b'],
['respirare', 'verb', 'a'],
['respirazione', 'noun', 'c'],
['respiro', 'noun', 'b'],
['responsabile', 'adjective', 'a'],
['responsabile', 'noun', 'a'],
['responsabilit', 'noun', 'a'],
['restare', 'verb', 'a'],
['restituire', 'verb', 'b'],
['resto', 'noun', 'a'],
['restringere', 'verb', 'b'],
['rete', 'noun', 'a'],
['retorica', 'noun', 'b'],
['retro', 'adverb', 'b'],
['retro', 'noun', 'b'],
['retta', 'noun', 'b'],
['rettangolare', 'adjective', 'c'],
['rettile', 'noun', 'c'],
['rettile', 'adjective', 'c'],
['retto', 'adjective', 'b'],
['retto', 'noun', 'b'],
['revisione', 'noun', 'b'],
['rialzare', 'verb', 'b'],
['riaprire', 'verb', 'b'],
['riassumere', 'verb', 'b'],
['ribadire', 'verb', 'b'],
['ribattere', 'verb', 'b'],
['ribellare', 'verb', 'b'],
['ribelle', 'adjective', 'b'],
['ribelle', 'noun', 'b'],
['ricadere', 'verb', 'b'],
['ricaduta', 'noun', 'c'],
['ricalcare', 'verb', 'c'],
['ricamare', 'verb', 'c'],
['ricambiare', 'verb', 'b'],
['ricambio', 'noun', 'c'],
['ricamo', 'noun', 'c'],
['ricarica', 'noun', 'c'],
['ricavare', 'verb', 'b'],
['ricchezza', 'noun', 'b'],
['riccio', 'adjective', 'c'],
['riccio', 'noun', 'c'],
['ricciolo', 'adjective', 'c'],
['ricciolo', 'noun', 'c'],
['ricco', 'adjective', 'a'],
['ricerca', 'noun', 'a'],
['ricercare', 'verb', 'b'],
['ricercatore', 'adjective', 'b'],
['ricercatore', 'noun', 'b'],
['ricetta', 'noun', 'a'],
['ricevere', 'verb', 'a'],
['ricevimento', 'noun', 'c'],
['ricevuta', 'noun', 'b'],
['richiamare', 'verb', 'a'],
['richiamo', 'noun', 'b'],
['richiedere', 'verb', 'a'],
['richiesta', 'noun', 'a'],
['richiudere', 'verb', 'b'],
['ricominciare', 'verb', 'a'],
['ricompensa', 'noun', 'c'],
['ricompensare', 'verb', 'c'],
['riconciliarsi', 'verb', 'c'],
['riconoscere', 'verb', 'a'],
['riconoscimento', 'noun', 'b'],
['ricopiare', 'verb', 'c'],
['ricoprire', 'verb', 'b'],
['ricordare', 'verb', 'a'],
['ricordo', 'noun', 'a'],
['ricorrere', 'verb', 'b'],
['ricorso', 'noun', 'b'],
['ricostruire', 'verb', 'b'],
['ricostruzione', 'noun', 'b'],
['ricotta', 'noun', 'c'],
['ricoverare', 'verb', 'b'],
['ricovero', 'noun', 'c'],
['ricreazione', 'noun', 'c'],
['ridare', 'verb', 'b'],
['ridere', 'verb', 'a'],
['ridere', 'noun', 'a'],
['ridicolo', 'adjective', 'b'],
['ridicolo', 'noun', 'b'],
['ridotto', 'past_part', 'b'],
['ridotto', 'adjective', 'b'],
['ridotto', 'noun', 'b'],
['ridurre', 'verb', 'a'],
['riduzione', 'noun', 'b'],
['riempire', 'verb', 'a'],
['rientrare', 'verb', 'a'],
['rientro', 'noun', 'b'],
['rifare', 'verb', 'a'],
['riferimento', 'noun', 'a'],
['riferire', 'verb', 'a'],
['rifinire', 'verb', 'c'],
['rifiutare', 'verb', 'a'],
['rifiuto', 'noun', 'a'],
['riflessione', 'noun', 'a'],
['riflesso', 'noun', 'b'],
['riflettere', 'verb', 'a'],
['riflettore', 'noun', 'c'],
['riflettore', 'adjective', 'c'],
['riforma', 'noun', 'b'],
['rifornimento', 'noun', 'c'],
['rifugiare', 'verb', 'b'],
['rifugio', 'noun', 'b'],
['riga', 'noun', 'a'],
['rigattiere', 'noun', 'c'],
['rigido', 'adjective', 'b'],
['rigore', 'noun', 'b'],
['rigoroso', 'adjective', 'b'],
['rigovernare', 'verb', 'c'],
['riguardare', 'verb', 'a'],
['riguardo', 'noun', 'a'],
['rilasciare', 'verb', 'b'],
['rilassare', 'verb', 'a'],
['rilegare', 'verb', 'c'],
['rileggere', 'verb', 'b'],
['rilevante', 'pres_part', 'b'],
['rilevante', 'adjective', 'b'],
['rilevare', 'verb', 'b'],
['rilievo', 'noun', 'b'],
['rima', 'noun', 'b'],
['rimandare', 'verb', 'b'],
['rimanenza', 'noun', 'c'],
['rimanere', 'verb', 'a'],
['rimbombare', 'verb', 'c'],
['rimborsare', 'verb', 'c'],
['rimediare', 'verb', 'b'],
['rimedio', 'noun', 'b'],
['rimettere', 'verb', 'a'],
['rimodernare', 'verb', 'c'],
['rimorchio', 'noun', 'c'],
['rimpiangere', 'verb', 'b'],
['rimproverare', 'verb', 'b'],
['rimprovero', 'noun', 'c'],
['rimuovere', 'verb', 'b'],
['rinascere', 'verb', 'b'],
['rinascimento', 'noun', 'b'],
['rinascimento', 'adjective', 'b'],
['rincarare', 'verb', 'c'],
['rinchiudere', 'verb', 'b'],
['rincorsa', 'noun', 'c'],
['rinforzo', 'noun', 'c'],
['rinfresco', 'noun', 'c'],
['ringhiare', 'verb', 'c'],
['ringhiera', 'noun', 'c'],
['ringhio', 'noun', 'c'],
['ringiovanire', 'verb', 'c'],
['ringraziare', 'verb', 'a'],
['rinnegare', 'verb', 'c'],
['rinnovare', 'verb', 'b'],
['rinoceronte', 'noun', 'c'],
['rintracciare', 'verb', 'b'],
['rinuncia', 'noun', 'c'],
['rinunciare', 'verb', 'a'],
['rinvenire', 'verb', 'b'],
['rinviare', 'verb', 'b'],
['rinvio', 'noun', 'c'],
['rione', 'noun', 'c'],
['riordinare', 'verb', 'c'],
['riparare', 'verb', 'b'],
['riparo', 'noun', 'b'],
['ripartire', 'verb', 'b'],
['ripartire', 'verb', 'b'],
['ripensamento', 'noun', 'c'],
['ripensare', 'verb', 'b'],
['ripetente', 'pres_part', 'c'],
['ripetente', 'adjective', 'c'],
['ripetente', 'noun', 'c'],
['ripetere', 'verb', 'a'],
['ripetizione', 'noun', 'b'],
['ripido', 'adjective', 'c'],
['ripiego', 'noun', 'c'],
['ripieno', 'adjective', 'c'],
['ripieno', 'noun', 'c'],
['riportare', 'verb', 'a'],
['riposare', 'verb', 'b'],
['riposo', 'noun', 'b'],
['riposo', 'loc-comando', 'b'],
['riposo', 'noun', 'b'],
['riprendere', 'verb', 'a'],
['ripresa', 'noun', 'b'],
['riprodurre', 'verb', 'b'],
['riproduzione', 'noun', 'a'],
['riproporre', 'verb', 'b'],
['riprovare', 'verb', 'b'],
['ripulire', 'verb', 'b'],
['risaia', 'noun', 'c'],
['risalire', 'verb', 'a'],
['risarcimento', 'noun', 'b'],
['risata', 'noun', 'b'],
['riscaldamento', 'noun', 'b'],
['riscaldare', 'verb', 'b'],
['riscattare', 'verb', 'c'],
['riscatto', 'noun', 'c'],
['rischiare', 'verb', 'a'],
['rischio', 'noun', 'a'],
['rischioso', 'adjective', 'b'],
['risciacquare', 'verb', 'c'],
['riscontrare', 'verb', 'b'],
['riscontro', 'noun', 'b'],
['riscuotere', 'verb', 'b'],
['risentimento', 'noun', 'c'],
['risentire', 'verb', 'b'],
['riserva', 'noun', 'b'],
['riservare', 'verb', 'a'],
['riservato', 'past_part', 'a'],
['riservato', 'adjective', 'a'],
['risiedere', 'verb', 'b'],
['riso', 'noun', 'b'],
['risoluzione', 'noun', 'b'],
['risolvere', 'verb', 'a'],
['risonanza', 'noun', 'b'],
['risorsa', 'noun', 'a'],
['risparmiare', 'verb', 'b'],
['risparmio', 'noun', 'b'],
['rispettare', 'verb', 'a'],
['rispettivamente', 'adverb', 'b'],
['rispettivo', 'adjective', 'b'],
['rispetto', 'noun', 'a'],
['risplendere', 'verb', 'c'],
['rispondere', 'verb', 'a'],
['risposta', 'noun', 'a'],
['rissa', 'noun', 'b'],
['ristampare', 'verb', 'c'],
['ristorante', 'noun', 'a'],
['ristretto', 'past_part', 'b'],
['ristretto', 'adjective', 'b'],
['ristretto', 'noun', 'b'],
['risultare', 'verb', 'a'],
['risultato', 'past_part', 'a'],
['risultato', 'adjective', 'a'],
['risultato', 'noun', 'a'],
['risvegliare', 'verb', 'b'],
['risveglio', 'noun', 'b'],
['ritagliare', 'verb', 'b'],
['ritardare', 'verb', 'b'],
['ritardo', 'noun', 'a'],
['ritenere', 'verb', 'a'],
['ritirare', 'verb', 'a'],
['ritirata', 'noun', 'c'],
['ritiro', 'noun', 'b'],
['ritmo', 'noun', 'a'],
['rito', 'noun', 'b'],
['ritoccare', 'verb', 'c'],
['ritornare', 'verb', 'a'],
['ritornello', 'noun', 'c'],
['ritorno', 'noun', 'a'],
['ritrarre', 'verb', 'b'],
['ritratto', 'past_part', 'b'],
['ritratto', 'adjective', 'b'],
['ritratto', 'noun', 'b'],
['ritrovare', 'verb', 'a'],
['ritrovo', 'noun', 'c'],
['ritto', 'adjective', 'c'],
['ritto', 'noun', 'c'],
['ritto', 'adverb', 'c'],
['ritto', 'preposition', 'c'],
['rituale', 'adjective', 'b'],
['rituale', 'noun', 'b'],
['riunione', 'noun', 'a'],
['riunire', 'verb', 'a'],
['riunito', 'past_part', 'c'],
['riunito', 'adjective', 'c'],
['riunito', 'noun', 'c'],
['riuscire', 'verb', 'a'],
['riuscita', 'noun', 'c'],
['riva', 'noun', 'b'],
['rivale', 'adjective', 'b'],
['rivale', 'noun', 'b'],
['rivedere', 'verb', 'a'],
['rivelare', 'verb', 'a'],
['rivelazione', 'noun', 'b'],
['rivendicare', 'verb', 'b'],
['rivendita', 'noun', 'c'],
['rivestimento', 'noun', 'c'],
['rivestire', 'verb', 'b'],
['rivincita', 'noun', 'c'],
['rivista', 'noun', 'a'],
['rivisto', 'past_part', 'b'],
['rivisto', 'adjective', 'b'],
['rivolgere', 'verb', 'a'],
['rivolta', 'noun', 'b'],
['rivoltare', 'verb', 'c'],
['rivoluzionario', 'adjective', 'b'],
['rivoluzionario', 'noun', 'b'],
['rivoluzione', 'noun', 'a'],
['roba', 'noun', 'a'],
['robot', 'noun', 'b'],
['robusto', 'adjective', 'b'],
['rocca', 'noun', 'c'],
['rocchetto', 'noun', 'c'],
['roccia', 'noun', 'b'],
['roccioso', 'adjective', 'c'],
['rock', 'noun', 'b'],
['rock', 'adjective', 'b'],
['rodaggio', 'noun', 'c'],
['rodere', 'verb', 'c'],
['romagnolo', 'adjective', 'c'],
['romagnolo', 'noun', 'c'],
['romano', 'adjective', 'a'],
['romano', 'noun', 'a'],
['romantico', 'adjective', 'b'],
['romantico', 'noun', 'b'],
['romanzo', 'noun', 'a'],
['rombo', 'noun', 'c'],
['romeno', 'adjective', 'c'],
['romeno', 'noun', 'c'],
['rompere', 'verb', 'a'],
['rondine', 'noun', 'c'],
['ronzare', 'verb', 'c'],
['ronzio', 'noun', 'c'],
['rosa', 'noun', 'a'],
['rosa', 'adjective', 'a'],
['rosario', 'noun', 'c'],
['rosato', 'adjective', 'c'],
['rosato', 'noun', 'c'],
['roseo', 'adjective', 'c'],
['roseo', 'noun', 'c'],
['rosetta', 'noun', 'c'],
['rosmarino', 'noun', 'c'],
['rosolia', 'noun', 'c'],
['rosso', 'adjective', 'a'],
['rosso', 'noun', 'a'],
['rossore', 'noun', 'c'],
['rosticceria', 'noun', 'c'],
['rotaia', 'noun', 'c'],
['rotella', 'noun', 'c'],
['rotolare', 'verb', 'c'],
['rotondo', 'adjective', 'b'],
['rotondo', 'noun', 'b'],
['rotta', 'noun', 'b'],
['rotto', 'past_part', 'b'],
['rotto', 'adjective', 'b'],
['rotto', 'noun', 'b'],
['rottura', 'noun', 'b'],
['roulotte', 'noun', 'c'],
['rovesciare', 'verb', 'b'],
['rovescio', 'adjective', 'b'],
['rovescio', 'noun', 'b'],
['rovina', 'noun', 'b'],
['rovinare', 'verb', 'a'],
['rovo', 'noun', 'c'],
['rozzo', 'adjective', 'c'],
['rubare', 'verb', 'a'],
['rubinetto', 'noun', 'c'],
['rubrica', 'noun', 'b'],
['rude', 'adjective', 'c'],
['ruga', 'noun', 'c'],
['ruggine', 'noun', 'c'],
['ruggine', 'adjective', 'c'],
['ruggire', 'verb', 'c'],
['ruggito', 'past_part', 'c'],
['ruggito', 'noun', 'c'],
['rullo', 'noun', 'c'],
['rumeno', 'adjective', 'c'],
['rumeno', 'noun', 'c'],
['ruminante', 'pres_part', 'c'],
['ruminante', 'adjective', 'c'],
['ruminante', 'noun', 'c'],
['rumore', 'noun', 'a'],
['ruolo', 'noun', 'a'],
['ruota', 'noun', 'b'],
['ruotare', 'verb', 'b'],
['ruscello', 'noun', 'c'],
['ruspa', 'noun', 'c'],
['russare', 'verb', 'c'],
['russo', 'adjective', 'a'],
['russo', 'noun', 'a'],
['rustico', 'adjective', 'c'],
['rustico', 'noun', 'c'],
['ruttare', 'verb', 'c'],
['rutto', 'noun', 'c'],
['sabato', 'noun', 'a'],
['sabbia', 'noun', 'b'],
['sabbia', 'adjective', 'b'],
['sabotare', 'verb', 'c'],
['saccheggiare', 'verb', 'c'],
['sacchetto', 'noun', 'b'],
['sacco', 'noun', 'a'],
['sacerdote', 'noun', 'b'],
['sacrificare', 'verb', 'b'],
['sacrificio', 'noun', 'b'],
['sacro', 'adjective', 'b'],
['sacro', 'noun', 'b'],
['safari', 'noun', 'c'],
['saga', 'noun', 'b'],
['saggezza', 'noun', 'b'],
['saggio', 'adjective', 'b'],
['saggio', 'noun', 'b'],
['saggio', 'noun', 'b'],
['sagra', 'noun', 'c'],
['sagrestano', 'noun', 'c'],
['sagrestano', 'adjective', 'c'],
['sala', 'noun', 'a'],
['salame', 'noun', 'c'],
['salare', 'verb', 'c'],
['salario', 'adjective', 'b'],
['salario', 'noun', 'b'],
['salatino', 'noun', 'c'],
['salato', 'past_part', 'b'],
['salato', 'adjective', 'b'],
['salato', 'noun', 'b'],
['saldatura', 'noun', 'c'],
['sale', 'noun', 'b'],
['salice', 'noun', 'c'],
['saliera', 'noun', 'c'],
['salire', 'verb', 'a'],
['salita', 'noun', 'b'],
['saliva', 'noun', 'c'],
['salmone', 'noun', 'c'],
['salmone', 'adjective', 'c'],
['salone', 'noun', 'b'],
['salotto', 'noun', 'b'],
['salsa', 'noun', 'b'],
['salsiccia', 'noun', 'c'],
['saltare', 'verb', 'a'],
['saltellare', 'verb', 'c'],
['salto', 'noun', 'b'],
['salume', 'noun', 'c'],
['salutare', 'verb', 'a'],
['salutare', 'noun', 'a'],
['salute', 'noun', 'a'],
['salute', 'exclamation', 'a'],
['saluto', 'noun', 'a'],
['salvadanaio', 'noun', 'c'],
['salvagente', 'noun', 'c'],
['salvare', 'verb', 'a'],
['salvaslip', 'noun', 'c'],
['salvatore', 'adjective', 'b'],
['salvatore', 'noun', 'b'],
['salve', 'exclamation', 'b'],
['salvezza', 'noun', 'b'],
['salvia', 'noun', 'c'],
['salvietta', 'noun', 'c'],
['salvo', 'adjective', 'a'],
['salvo', 'preposition', 'a'],
['sandalo', 'noun', 'c'],
['sangue', 'noun', 'a'],
['sangue', 'adjective', 'a'],
['sanguinare', 'verb', 'c'],
['sanguisuga', 'noun', 'c'],
['sanit', 'noun', 'b'],
['sanitaria', 'noun', 'c'],
['sanitario', 'adjective', 'b'],
['sanitario', 'noun', 'b'],
['sano', 'adjective', 'a'],
['santo', 'adjective', 'a'],
['santo', 'noun', 'a'],
['sanzione', 'noun', 'b'],
['sapere', 'verb', 'a'],
['sapere', 'noun', 'b'],
['sapiente', 'adjective', 'c'],
['sapiente', 'noun', 'c'],
['sapone', 'noun', 'b'],
['saponetta', 'noun', 'c'],
['sapore', 'noun', 'b'],
['saporito', 'past_part', 'c'],
['saporito', 'adjective', 'c'],
['sardina', 'noun', 'c'],
['sardo', 'adjective', 'b'],
['sardo', 'noun', 'b'],
['sarto', 'noun', 'c'],
['sasso', 'noun', 'b'],
['satellite', 'noun', 'b'],
['sazio', 'past_part', 'c'],
['sazio', 'adjective', 'c'],
['sbadato', 'adjective', 'c'],
['sbadato', 'noun', 'c'],
['sbadigliare', 'verb', 'c'],
['sbadiglio', 'noun', 'c'],
['sbagliare', 'verb', 'a'],
['sbagliato', 'past_part', 'a'],
['sbagliato', 'adjective', 'a'],
['sbaglio', 'noun', 'b'],
['sbarbare', 'verb', 'c'],
['sbarcare', 'verb', 'b'],
['sbarra', 'noun', 'c'],
['sbarramento', 'noun', 'c'],
['sbattere', 'verb', 'a'],
['sberla', 'noun', 'c'],
['sbiadire', 'verb', 'c'],
['sbiancare', 'verb', 'c'],
['sbigottire', 'verb', 'c'],
['sbloccare', 'verb', 'c'],
['sboccare', 'verb', 'c'],
['sbocciare', 'verb', 'c'],
['sbocco', 'noun', 'c'],
['sbornia', 'noun', 'c'],
['sbottonare', 'verb', 'c'],
['sbriciolare', 'verb', 'c'],
['sbrigare', 'verb', 'b'],
['sbronza', 'noun', 'c'],
['sbronzo', 'adjective', 'c'],
['sbucciare', 'verb', 'c'],
['sbuffare', 'verb', 'c'],
['scacchiera', 'noun', 'c'],
['scadenza', 'noun', 'b'],
['scadere', 'verb', 'b'],
['scaffale', 'noun', 'b'],
['scafo', 'noun', 'c'],
['scala', 'noun', 'a'],
['scalare', 'verb', 'b'],
['scalata', 'noun', 'c'],
['scaldabagno', 'noun', 'c'],
['scaldare', 'verb', 'b'],
['scalinata', 'noun', 'c'],
['scalino', 'noun', 'c'],
['scalpello', 'noun', 'c'],
['scalzo', 'adjective', 'c'],
['scambiare', 'verb', 'a'],
['scambio', 'noun', 'a'],
['scamorza', 'noun', 'c'],
['scampagnata', 'noun', 'c'],
['scampo', 'noun', 'c'],
['scandalizzare', 'verb', 'c'],
['scandalo', 'noun', 'b'],
['scandire', 'verb', 'b'],
['scansare', 'verb', 'c'],
['scapito', 'noun', 'c'],
['scappamento', 'noun', 'c'],
['scappare', 'verb', 'a'],
['scappatoia', 'noun', 'c'],
['scarabocchiare', 'verb', 'c'],
['scarabocchio', 'noun', 'c'],
['scarafaggio', 'noun', 'c'],
['scarcerare', 'verb', 'c'],
['scaricare', 'verb', 'a'],
['scaricatore', 'noun', 'c'],
['scarico', 'noun', 'b'],
['scarlattina', 'noun', 'c'],
['scarpa', 'noun', 'a'],
['scarpiera', 'noun', 'c'],
['scarpone', 'noun', 'c'],
['scarso', 'adjective', 'b'],
['scartare', 'verb', 'b'],
['scatenare', 'verb', 'b'],
['scatola', 'noun', 'a'],
['scattare', 'verb', 'a'],
['scatto', 'noun', 'b'],
['scavalcare', 'verb', 'c'],
['scavare', 'verb', 'b'],
['scavo', 'noun', 'c'],
['scegliere', 'verb', 'a'],
['scelta', 'noun', 'a'],
['scemo', 'past_part', 'b'],
['scemo', 'adjective', 'b'],
['scemo', 'noun', 'b'],
['scena', 'noun', 'a'],
['scenario', 'noun', 'b'],
['scendere', 'verb', 'a'],
['sceneggiatura', 'noun', 'b'],
['sceriffo', 'noun', 'c'],
['scheda', 'noun', 'b'],
['schedario', 'noun', 'c'],
['scheggia', 'noun', 'c'],
['scheletro', 'noun', 'c'],
['schema', 'noun', 'b'],
['schermo', 'noun', 'a'],
['scherzare', 'verb', 'a'],
['scherzo', 'noun', 'b'],
['scherzoso', 'adjective', 'c'],
['schiacciare', 'verb', 'b'],
['schiacciato', 'past_part', 'c'],
['schiacciato', 'adjective', 'c'],
['schiaffo', 'noun', 'b'],
['schiavo', 'adjective', 'b'],
['schiavo', 'noun', 'b'],
['schiena', 'noun', 'a'],
['schierare', 'verb', 'b'],
['schietto', 'adjective', 'c'],
['schifo', 'noun', 'a'],
['schifo', 'adjective', 'a'],
['schiuma', 'noun', 'c'],
['schizzare', 'verb', 'b'],
['schizzo', 'noun', 'b'],
['sci', 'noun', 'b'],
['scia', 'noun', 'b'],
['sciacquare', 'verb', 'c'],
['scialle', 'noun', 'c'],
['sciame', 'noun', 'c'],
['sciare', 'verb', 'c'],
['sciarpa', 'noun', 'c'],
['sciatore', 'noun', 'c'],
['scientifico', 'adjective', 'a'],
['scientifico', 'noun', 'a'],
['scienza', 'noun', 'a'],
['scienziato', 'noun', 'b'],
['scienziato', 'adjective', 'b'],
['scimmia', 'noun', 'b'],
['scintilla', 'noun', 'b'],
['sciocchezza', 'noun', 'b'],
['sciocco', 'adjective', 'b'],
['sciocco', 'noun', 'b'],
['sciogliere', 'verb', 'b'],
['scioperare', 'verb', 'c'],
['sciopero', 'noun', 'b'],
['scirocco', 'noun', 'c'],
['sciroppo', 'noun', 'c'],
['scivolare', 'verb', 'b'],
['scivolata', 'noun', 'c'],
['scivolo', 'noun', 'c'],
['scocciare', 'verb', 'c'],
['scodella', 'noun', 'c'],
['scodinzolare', 'verb', 'c'],
['scoglio', 'noun', 'c'],
['scoiattolo', 'noun', 'c'],
['scolapiatti', 'noun', 'c'],
['scolaro', 'noun', 'c'],
['scolastico', 'adjective', 'b'],
['scolastico', 'noun', 'b'],
['scolpire', 'verb', 'c'],
['scommessa', 'noun', 'b'],
['scommettere', 'verb', 'b'],
['scomodo', 'adjective', 'c'],
['scomparire', 'verb', 'a'],
['scomparsa', 'noun', 'b'],
['scompartimento', 'noun', 'c'],
['sconfiggere', 'verb', 'b'],
['sconfitta', 'noun', 'b'],
['scongelare', 'verb', 'c'],
['sconosciuto', 'past_part', 'a'],
['sconosciuto', 'adjective', 'a'],
['sconsigliare', 'verb', 'c'],
['scontato', 'past_part', 'b'],
['scontato', 'adjective', 'b'],
['scontento', 'adjective', 'c'],
['sconto', 'noun', 'b'],
['scontrare', 'verb', 'b'],
['scontro', 'noun', 'b'],
['sconvolgere', 'verb', 'b'],
['scopa', 'noun', 'c'],
['scopare', 'verb', 'b'],
['scoperta', 'noun', 'a'],
['scopo', 'noun', 'a'],
['scoppiare', 'verb', 'a'],
['scoprire', 'verb', 'a'],
['scordare', 'verb', 'b'],
['scorgere', 'verb', 'b'],
['scorpione', 'noun', 'c'],
['scorrere', 'verb', 'a'],
['scorretto', 'adjective', 'c'],
['scorso', 'past_part', 'a'],
['scorso', 'adjective', 'a'],
['scorso', 'noun', 'a'],
['scorta', 'noun', 'b'],
['scortese', 'adjective', 'c'],
['scossa', 'noun', 'c'],
['scout', 'noun', 'c'],
['scout', 'adjective', 'c'],
['scozzese', 'adjective', 'c'],
['scozzese', 'noun', 'c'],
['screpolare', 'verb', 'c'],
['scricchiolare', 'verb', 'c'],
['scritta', 'noun', 'b'],
['scritto', 'past_part', 'b'],
['scritto', 'adjective', 'b'],
['scritto', 'noun', 'b'],
['scrittore', 'noun', 'a'],
['scrittura', 'noun', 'a'],
['scrivania', 'noun', 'b'],
['scrivere', 'verb', 'a'],
['scrofa', 'noun', 'c'],
['scrupolo', 'noun', 'c'],
['scudetto', 'noun', 'c'],
['scudo', 'noun', 'b'],
['scultore', 'noun', 'c'],
['scultura', 'noun', 'b'],
['scuola', 'noun', 'a'],
['scuotere', 'verb', 'b'],
['scure', 'noun', 'c'],
['scurire', 'verb', 'c'],
['scuro', 'adjective', 'b'],
['scuro', 'noun', 'b'],
['scuro', 'adverb', 'b'],
['scusa', 'noun', 'a'],
['scusare', 'verb', 'a'],
['sdebitarsi', 'verb', 'c'],
['sdegnare', 'verb', 'c'],
['sdraiare', 'verb', 'b'],
['sdraiato', 'past_part', 'c'],
['sdraiato', 'adjective', 'c'],
['se', 'pronoun', 'a'],
['se', 'conjunction', 'a'],
['se', 'noun', 'a'],
['sebbene', 'conjunction', 'b'],
['seccare', 'verb', 'b'],
['seccatura', 'noun', 'c'],
['secchio', 'noun', 'b'],
['secchione', 'noun', 'b'],
['secco', 'adjective', 'a'],
['secco', 'noun', 'a'],
['secolo', 'noun', 'a'],
['seconda', 'noun', 'b'],
['secondario', 'adjective', 'b'],
['secondario', 'noun', 'b'],
['secondo', 'adjective', 'a'],
['secondo', 'noun', 'a'],
['secondo', 'adverb', 'a'],
['secondo', 'preposition', 'a'],
['secondo', 'conjunction', 'a'],
['sedano', 'noun', 'c'],
['sede', 'noun', 'a'],
['sedere', 'verb', 'a'],
['sedia', 'noun', 'a'],
['sedici', 'adjective', 'b'],
['sedici', 'noun', 'b'],
['sedile', 'noun', 'b'],
['sedurre', 'verb', 'b'],
['seduta', 'noun', 'b'],
['seduttore', 'adjective', 'c'],
['seduttore', 'noun', 'c'],
['seggiolino', 'noun', 'c'],
['seggiovia', 'noun', 'c'],
['segheria', 'noun', 'c'],
['segmento', 'noun', 'b'],
['segnalare', 'verb', 'a'],
['segnalazione', 'noun', 'b'],
['segnale', 'noun', 'a'],
['segnare', 'verb', 'a'],
['segno', 'noun', 'a'],
['segretaria', 'noun', 'b'],
['segretario', 'noun', 'b'],
['segreteria', 'noun', 'b'],
['segreto', 'noun', 'a'],
['segreto', 'adjective', 'a'],
['segreto', 'noun', 'a'],
['segreto', 'adverb', 'a'],
['seguente', 'pres_part', 'a'],
['seguente', 'adjective', 'a'],
['seguente', 'noun', 'a'],
['seguire', 'verb', 'a'],
['seguito', 'noun', 'a'],
['sei', 'adjective', 'a'],
['sei', 'noun', 'a'],
['selezionare', 'verb', 'b'],
['selezione', 'noun', 'b'],
['selva', 'noun', 'c'],
['selvaggina', 'noun', 'c'],
['selvaggio', 'adjective', 'b'],
['selvaggio', 'noun', 'b'],
['semaforo', 'noun', 'c'],
['semantico', 'adjective', 'b'],
['sembrare', 'verb', 'a'],
['seme', 'noun', 'b'],
['semestre', 'noun', 'c'],
['semifreddo', 'adjective', 'c'],
['semifreddo', 'noun', 'c'],
['seminare', 'verb', 'b'],
['semmai', 'conjunction', 'b'],
['semmai', 'adverb', 'b'],
['semolino', 'noun', 'c'],
['semplice', 'adjective', 'a'],
['semplice', 'noun', 'a'],
['semplicemente', 'adverb', 'a'],
['semplicit', 'noun', 'b'],
['semplificare', 'verb', 'b'],
['sempre', 'adverb', 'a'],
['senape', 'noun', 'c'],
['senape', 'adjective', 'c'],
['senato', 'noun', 'b'],
['senatore', 'noun', 'b'],
['senn', 'adverb', 'b'],
['seno', 'noun', 'a'],
['sensazione', 'noun', 'a'],
['sensibile', 'adjective', 'b'],
['sensibile', 'noun', 'b'],
['sensibilit', 'noun', 'b'],
['senso', 'noun', 'a'],
['sensuale', 'adjective', 'b'],
['sentenza', 'noun', 'a'],
['sentiero', 'noun', 'b'],
['sentimentale', 'adjective', 'b'],
['sentimentale', 'noun', 'b'],
['sentimento', 'noun', 'a'],
['sentire', 'verb', 'a'],
['sentito', 'past_part', 'b'],
['sentito', 'adjective', 'b'],
['senza', 'preposition', 'a'],
['senza', 'conjunction', 'a'],
['separare', 'verb', 'a'],
['separato', 'past_part', 'b'],
['separato', 'adjective', 'b'],
['separato', 'noun', 'b'],
['separazione', 'noun', 'b'],
['sepolto', 'past_part', 'b'],
['sepolto', 'adjective', 'b'],
['sepolto', 'noun', 'b'],
['seppellire', 'verb', 'b'],
['seppia', 'noun', 'c'],
['seppia', 'adjective', 'c'],
['seppia', 'noun', 'c'],
['sequenza', 'noun', 'b'],
['sequestrare', 'verb', 'b'],
['sequestro', 'noun', 'b'],
['sera', 'noun', 'a'],
['serata', 'noun', 'a'],
['serbo', 'adjective', 'c'],
['serbo', 'noun', 'c'],
['serenata', 'noun', 'c'],
['serenit', 'noun', 'b'],
['sereno', 'adjective', 'a'],
['sereno', 'noun', 'a'],
['sergente', 'noun', 'b'],
['seriamente', 'adverb', 'b'],
['serie', 'noun', 'a'],
['seriet', 'noun', 'c'],
['serio', 'adjective', 'a'],
['serio', 'noun', 'a'],
['serpente', 'noun', 'b'],
['serra', 'noun', 'b'],
['servire', 'verb', 'a'],
['servizio', 'noun', 'a'],
['servo', 'noun', 'b'],
['servo', 'adjective', 'b'],
['sessanta', 'adjective', 'b'],
['sessanta', 'noun', 'b'],
['sesso', 'noun', 'a'],
['sessuale', 'adjective', 'a'],
['sesto', 'adjective', 'b'],
['sesto', 'noun', 'b'],
['set', 'noun', 'b'],
['seta', 'noun', 'b'],
['sete', 'noun', 'b'],
['setta', 'noun', 'b'],
['settanta', 'adjective', 'b'],
['settanta', 'noun', 'b'],
['sette', 'adjective', 'a'],
['sette', 'noun', 'a'],
['settembre', 'noun', 'a'],
['settentrione', 'noun', 'c'],
['settimana', 'noun', 'a'],
['settimanale', 'adjective', 'b'],
['settimanale', 'noun', 'b'],
['settimo', 'adjective', 'b'],
['settimo', 'noun', 'b'],
['settore', 'noun', 'a'],
['severo', 'adjective', 'b'],
['sexy', 'adjective', 'b'],
['sezione', 'noun', 'a'],
['sfera', 'noun', 'b'],
['sfida', 'noun', 'a'],
['sfidare', 'verb', 'b'],
['sfiducia', 'noun', 'c'],
['sfigato', 'adjective', 'b'],
['sfigato', 'noun', 'b'],
['sfilare', 'verb', 'b'],
['sfilata', 'noun', 'b'],
['sfinire', 'verb', 'c'],
['sfiorare', 'verb', 'b'],
['sfociare', 'verb', 'c'],
['sfogare', 'verb', 'b'],
['sfoglia', 'noun', 'c'],
['sfogliare', 'verb', 'b'],
['sfogo', 'noun', 'b'],
['sfollamento', 'noun', 'c'],
['sfollare', 'verb', 'c'],
['sfondare', 'verb', 'b'],
['sfondo', 'noun', 'b'],
['sfortunato', 'adjective', 'c'],
['sforzare', 'verb', 'b'],
['sforzo', 'noun', 'a'],
['sfrenato', 'past_part', 'c'],
['sfrenato', 'adjective', 'c'],
['sfruttare', 'verb', 'a'],
['sfuggire', 'verb', 'a'],
['sgabello', 'noun', 'c'],
['sganciare', 'verb', 'c'],
['sgarbato', 'adjective', 'c'],
['sgarbato', 'noun', 'c'],
['sgarbo', 'noun', 'c'],
['sgombro', 'noun', 'c'],
['sgomento', 'noun', 'c'],
['sgonfiare', 'verb', 'c'],
['sgozzare', 'verb', 'c'],
['sgrassare', 'verb', 'c'],
['sgrassatore', 'noun', 'c'],
['sgridare', 'verb', 'c'],
['sguardo', 'noun', 'a'],
['shampoo', 'noun', 'c'],
['share', 'noun', 'b'],
['shopping', 'noun', 'b'],
['shorts', 'noun', 'c'],
['show', 'noun', 'b'],
['s', 'adverb', 'a'],
['s', 'noun', 'a'],
['s', 'adjective', 'a'],
['si', 'pronoun', 'a'],
['sia', 'conjunction', 'a'],
['siamese', 'adjective', 'c'],
['siamese', 'noun', 'c'],
['sicch', 'conjunction', 'b'],
['siccit', 'noun', 'c'],
['siccome', 'conjunction', 'a'],
['siccome', 'adverb', 'a'],
['siciliano', 'adjective', 'b'],
['siciliano', 'noun', 'b'],
['sicuramente', 'adverb', 'a'],
['sicurezza', 'noun', 'a'],
['sicuro', 'adjective', 'a'],
['sicuro', 'noun', 'a'],
['sicuro', 'adverb', 'a'],
['siepe', 'noun', 'c'],
['sigaretta', 'noun', 'a'],
['sigaro', 'noun', 'c'],
['sigla', 'noun', 'b'],
['significare', 'verb', 'a'],
['significativo', 'adjective', 'b'],
['significato', 'past_part', 'a'],
['significato', 'noun', 'a'],
['signora', 'noun', 'a'],
['signore', 'noun', 'a'],
['signorina', 'noun', 'a'],
['silenzio', 'noun', 'a'],
['silenzioso', 'adjective', 'b'],
['sillaba', 'noun', 'c'],
['simbolico', 'adjective', 'b'],
['simbolo', 'noun', 'a'],
['simile', 'adjective', 'a'],
['simile', 'adjective', 'a'],
['simile', 'noun', 'a'],
['simile', 'adverb', 'a'],
['simpatia', 'noun', 'b'],
['simpatico', 'adjective', 'a'],
['simulare', 'verb', 'b'],
['sinceramente', 'adverb', 'b'],
['sincero', 'adjective', 'b'],
['sindacale', 'adjective', 'b'],
['sindacato', 'noun', 'b'],
['sindaco', 'noun', 'b'],
['sindrome', 'noun', 'b'],
['single', 'noun', 'b'],
['singolare', 'adjective', 'b'],
['singolare', 'noun', 'b'],
['singolo', 'adjective', 'a'],
['singolo', 'noun', 'a'],
['sinistra', 'noun', 'a'],
['sinistro', 'adjective', 'a'],
['sinistro', 'noun', 'a'],
['sino', 'preposition', 'a'],
['sino', 'adverb', 'a'],
['sinonimo', 'noun', 'b'],
['sintesi', 'noun', 'b'],
['sintetico', 'adjective', 'b'],
['sintetizzare', 'verb', 'b'],
['sintomo', 'noun', 'b'],
['sir', 'noun', 'b'],
['siriano', 'adjective', 'c'],
['siriano', 'noun', 'c'],
['siringa', 'noun', 'c'],
['sistema', 'noun', 'a'],
['sistemare', 'verb', 'a'],
['sito', 'noun', 'a'],
['sito', 'adjective', 'a'],
['situare', 'verb', 'b'],
['situazione', 'noun', 'a'],
['slacciare', 'verb', 'c'],
['slanciato', 'past_part', 'c'],
['slanciato', 'adjective', 'c'],
['slavo', 'adjective', 'c'],
['slavo', 'noun', 'c'],
['slegare', 'verb', 'c'],
['slip', 'noun', 'c'],
['slitta', 'noun', 'c'],
['slogan', 'noun', 'b'],
['slogare', 'verb', 'c'],
['slogatura', 'noun', 'c'],
['slovacco', 'adjective', 'c'],
['slovacco', 'noun', 'c'],
['sloveno', 'adjective', 'c'],
['sloveno', 'noun', 'c'],
['smacchiare', 'verb', 'c'],
['smacchiatore', 'adjective', 'c'],
['smacchiatore', 'noun', 'c'],
['smaltimento', 'noun', 'b'],
['smalto', 'noun', 'c'],
['smascherare', 'verb', 'c'],
['smentire', 'verb', 'b'],
['smettere', 'verb', 'a'],
['smisurato', 'past_part', 'c'],
['smisurato', 'adjective', 'c'],
['smog', 'noun', 'c'],
['smontare', 'verb', 'b'],
['smorfia', 'noun', 'c'],
['smuovere', 'verb', 'c'],
['snack', 'noun', 'c'],
['sneaker', 'noun', 'c'],
['snello', 'adjective', 'c'],
['soccorrere', 'verb', 'c'],
['soccorso', 'noun', 'b'],
['socialdemocratico', 'adjective', 'c'],
['socialdemocratico', 'noun', 'c'],
['sociale', 'adjective', 'a'],
['sociale', 'noun', 'a'],
['socialista', 'adjective', 'b'],
['socialista', 'noun', 'b'],
['societ', 'noun', 'a'],
['socievole', 'adjective', 'c'],
['socio', 'noun', 'b'],
['soddisfare', 'verb', 'a'],
['soddisfatto', 'past_part', 'b'],
['soddisfatto', 'adjective', 'b'],
['soddisfazione', 'noun', 'a'],
['sodo', 'adjective', 'b'],
['sodo', 'noun', 'b'],
['sodo', 'adverb', 'b'],
['sof', 'noun', 'c'],
['sofferenza', 'noun', 'a'],
['soffermare', 'verb', 'b'],
['soffiare', 'verb', 'b'],
['soffice', 'adjective', 'c'],
['soffitta', 'noun', 'c'],
['soffitto', 'noun', 'b'],
['soffocare', 'verb', 'b'],
['soffriggere', 'verb', 'c'],
['soffrire', 'verb', 'a'],
['sofisticato', 'past_part', 'b'],
['sofisticato', 'adjective', 'b'],
['software', 'noun', 'b'],
['soggettivo', 'adjective', 'b'],
['soggetto', 'noun', 'a'],
['soggetto', 'adjective', 'b'],
['soggezione', 'noun', 'c'],
['soggiorno', 'noun', 'a'],
['soglia', 'noun', 'b'],
['sogliola', 'noun', 'c'],
['sognare', 'verb', 'a'],
['sogno', 'noun', 'a'],
['sol', 'noun', 'c'],
['solaio', 'noun', 'c'],
['solamente', 'adverb', 'a'],
['solamente', 'conjunction', 'a'],
['solare', 'adjective', 'b'],
['solare', 'noun', 'b'],
['solco', 'noun', 'b'],
['soldato', 'noun', 'a'],
['soldo', 'noun', 'a'],
['sole', 'noun', 'a'],
['solenne', 'adjective', 'b'],
['solidariet', 'noun', 'b'],
['solido', 'adjective', 'b'],
['solido', 'noun', 'b'],
['solitamente', 'adverb', 'b'],
['solitario', 'adjective', 'b'],
['solitario', 'noun', 'b'],
['solito', 'adjective', 'a'],
['solito', 'noun', 'a'],
['solitudine', 'noun', 'b'],
['solletico', 'noun', 'c'],
['sollevare', 'verb', 'a'],
['sollievo', 'noun', 'b'],
['solo', 'adjective', 'a'],
['solo', 'noun', 'a'],
['solo', 'adverb', 'a'],
['solo', 'conjunction', 'a'],
['soltanto', 'adverb', 'a'],
['soltanto', 'conjunction', 'a'],
['soluzione', 'noun', 'a'],
['somigliare', 'verb', 'b'],
['somma', 'noun', 'a'],
['sommare', 'verb', 'b'],
['sondaggio', 'noun', 'a'],
['sonno', 'noun', 'a'],
['sonoro', 'adjective', 'b'],
['sonoro', 'noun', 'b'],
['soppalco', 'noun', 'c'],
['sopportare', 'verb', 'a'],
['sopra', 'preposition', 'a'],
['sopra', 'adverb', 'a'],
['sopra', 'adjective', 'a'],
['sopra', 'noun', 'a'],
['soprabito', 'noun', 'c'],
['sopracciglio', 'noun', 'c'],
['soprammobile', 'noun', 'c'],
['soprannome', 'noun', 'c'],
['soprattutto', 'adverb', 'a'],
['sopravvalutare', 'verb', 'c'],
['sopravvivenza', 'noun', 'b'],
['sopravvivere', 'verb', 'a'],
['sorcio', 'noun', 'c'],
['sordo', 'adjective', 'b'],
['sordo', 'noun', 'b'],
['sorella', 'noun', 'a'],
['sorgente', 'pres_part', 'b'],
['sorgente', 'adjective', 'b'],
['sorgente', 'noun', 'b'],
['sorgere', 'verb', 'b'],
['sorpassare', 'verb', 'c'],
['sorpasso', 'noun', 'c'],
['sorprendente', 'pres_part', 'b'],
['sorprendente', 'adjective', 'b'],
['sorprendere', 'verb', 'b'],
['sorpresa', 'noun', 'a'],
['sorridente', 'pres_part', 'c'],
['sorridente', 'adjective', 'c'],
['sorridere', 'verb', 'a'],
['sorriso', 'noun', 'a'],
['sorso', 'noun', 'c'],
['sorta', 'noun', 'a'],
['sorte', 'noun', 'b'],
['sorteggiare', 'verb', 'c'],
['sorteggio', 'noun', 'c'],
['sorvegliare', 'verb', 'b'],
['sospendere', 'verb', 'b'],
['sospensione', 'noun', 'b'],
['sospeso', 'past_part', 'b'],
['sospeso', 'adjective', 'b'],
['sospeso', 'noun', 'b'],
['sospettare', 'verb', 'b'],
['sospetto', 'noun', 'a'],
['sospetto', 'adjective', 'a'],
['sospetto', 'noun', 'a'],
['sospirare', 'verb', 'b'],
['sospiro', 'noun', 'b'],
['sosta', 'noun', 'b'],
['sostanza', 'noun', 'a'],
['sostanzialmente', 'adverb', 'b'],
['sostare', 'verb', 'c'],
['sostegno', 'noun', 'b'],
['sostenere', 'verb', 'a'],
['sostenitore', 'adjective', 'b'],
['sostenitore', 'noun', 'b'],
['sostituire', 'verb', 'a'],
['sostituzione', 'noun', 'b'],
['sottaceto', 'adjective', 'c'],
['sottaceto', 'adverb', 'c'],
['sottaceto', 'noun', 'c'],
['sotterraneo', 'adjective', 'b'],
['sotterraneo', 'noun', 'b'],
['sottile', 'adjective', 'a'],
['sottile', 'noun', 'a'],
['sottile', 'adverb', 'a'],
['sottinteso', 'past_part', 'c'],
['sottinteso', 'adjective', 'c'],
['sottinteso', 'noun', 'c'],
['sotto', 'preposition', 'a'],
['sotto', 'adverb', 'a'],
['sotto', 'adjective', 'a'],
['sotto', 'noun', 'a'],
['sottofondo', 'noun', 'b'],
['sottolineare', 'verb', 'a'],
['sottolio', 'adverb', 'c'],
['sottolio', 'adjective', 'c'],
['sottomarino', 'adjective', 'c'],
['sottomarino', 'noun', 'c'],
['sottopassaggio', 'noun', 'c'],
['sottoporre', 'verb', 'a'],
['sottoscrivere', 'verb', 'b'],
['sottovalutare', 'verb', 'b'],
['sottrarre', 'verb', 'b'],
['sovietico', 'adjective', 'b'],
['sovietico', 'noun', 'b'],
['sovrano', 'adjective', 'b'],
['sovrano', 'noun', 'b'],
['sovrapporre', 'verb', 'b'],
['spaccare', 'verb', 'b'],
['spaccatura', 'noun', 'c'],
['spacciare', 'verb', 'b'],
['spacciatore', 'noun', 'c'],
['spaccio', 'noun', 'c'],
['spada', 'noun', 'b'],
['spaghetto', 'noun', 'b'],
['spagnolo', 'adjective', 'a'],
['spagnolo', 'noun', 'a'],
['spago', 'noun', 'c'],
['spalancare', 'verb', 'b'],
['spalla', 'noun', 'a'],
['spalmabile', 'adjective', 'c'],
['spalmare', 'verb', 'c'],
['spam', 'noun', 'b'],
['sparare', 'verb', 'a'],
['sparecchiare', 'verb', 'c'],
['spargere', 'verb', 'b'],
['sparire', 'verb', 'a'],
['sparo', 'noun', 'b'],
['sparso', 'past_part', 'b'],
['sparso', 'adjective', 'b'],
['spassare', 'verb', 'b'],
['spasso', 'noun', 'c'],
['spavaldo', 'adjective', 'c'],
['spaventare', 'verb', 'a'],
['spaventato', 'past_part', 'b'],
['spaventato', 'adjective', 'b'],
['spaventoso', 'adjective', 'b'],
['spaziale', 'adjective', 'b'],
['spazio', 'noun', 'a'],
['spazioso', 'adjective', 'c'],
['spazzare', 'verb', 'b'],
['spazzatura', 'noun', 'b'],
['spazzino', 'noun', 'c'],
['spazzola', 'noun', 'c'],
['spazzolare', 'verb', 'c'],
['spazzolino', 'noun', 'c'],
['spazzolone', 'noun', 'c'],
['specchiarsi', 'verb', 'c'],
['specchio', 'noun', 'a'],
['speciale', 'adjective', 'a'],
['speciale', 'noun', 'a'],
['specialista', 'noun', 'b'],
['specializzato', 'past_part', 'b'],
['specializzato', 'adjective', 'b'],
['specializzato', 'noun', 'b'],
['specialmente', 'adverb', 'b'],
['specie', 'noun', 'a'],
['specie', 'adverb', 'a'],
['specificare', 'verb', 'b'],
['specifico', 'adjective', 'a'],
['specifico', 'noun', 'a'],
['speck', 'noun', 'c'],
['spedire', 'verb', 'b'],
['spedizione', 'noun', 'b'],
['spegnere', 'verb', 'a'],
['spellare', 'verb', 'c'],
['spendere', 'verb', 'a'],
['spennare', 'verb', 'c'],
['spensierato', 'adjective', 'c'],
['spento', 'past_part', 'b'],
['spento', 'adjective', 'b'],
['speranza', 'noun', 'a'],
['sperare', 'verb', 'a'],
['sperimentale', 'adjective', 'b'],
['sperimentare', 'verb', 'b'],
['sperimentazione', 'noun', 'b'],
['sperone', 'noun', 'c'],
['spesa', 'noun', 'a'],
['spesso', 'adjective', 'b'],
['spesso', 'adverb', 'a'],
['spessore', 'noun', 'b'],
['spettacolare', 'adjective', 'b'],
['spettacolo', 'noun', 'a'],
['spettare', 'verb', 'b'],
['spettatore', 'noun', 'b'],
['spettinare', 'verb', 'c'],
['spettro', 'noun', 'b'],
['spezia', 'noun', 'c'],
['spezzare', 'verb', 'b'],
['spia', 'noun', 'b'],
['spiacere', 'verb', 'b'],
['spiaggia', 'noun', 'a'],
['spianare', 'verb', 'c'],
['spiare', 'verb', 'b'],
['spiazzo', 'noun', 'c'],
['spiccare', 'verb', 'b'],
['spicciolo', 'adjective', 'c'],
['spicciolo', 'noun', 'c'],
['spiedino', 'noun', 'c'],
['spiedo', 'noun', 'c'],
['spiegare', 'verb', 'a'],
['spiegazione', 'noun', 'a'],
['spietato', 'adjective', 'b'],
['spiga', 'noun', 'c'],
['spigolo', 'noun', 'c'],
['spillo', 'noun', 'c'],
['spina', 'noun', 'b'],
['spinacio', 'noun', 'c'],
['spingere', 'verb', 'a'],
['spinta', 'noun', 'b'],
['spionaggio', 'noun', 'c'],
['spirito', 'noun', 'a'],
['spiritoso', 'adjective', 'c'],
['spirituale', 'adjective', 'b'],
['spirituale', 'noun', 'b'],
['splendente', 'pres_part', 'c'],
['splendente', 'adjective', 'c'],
['splendere', 'verb', 'b'],
['splendido', 'adjective', 'b'],
['splendore', 'noun', 'b'],
['spogliare', 'verb', 'b'],
['spogliatoio', 'noun', 'c'],
['spoglio', 'noun', 'c'],
['spolverare', 'verb', 'c'],
['sponda', 'noun', 'b'],
['spontaneo', 'adjective', 'b'],
['sporcare', 'verb', 'b'],
['sporcizia', 'noun', 'c'],
['sporco', 'adjective', 'a'],
['sporco', 'noun', 'a'],
['sporgente', 'pres_part', 'c'],
['sporgente', 'adjective', 'c'],
['sporgente', 'noun', 'c'],
['sporgere', 'verb', 'b'],
['sport', 'noun', 'a'],
['sport', 'adjective', 'a'],
['sportello', 'noun', 'b'],
['sportivo', 'adjective', 'a'],
['sportivo', 'noun', 'a'],
['sposare', 'verb', 'a'],
['sposato', 'past_part', 'b'],
['sposato', 'adjective', 'b'],
['sposato', 'noun', 'b'],
['sposo', 'noun', 'b'],
['spostamento', 'noun', 'b'],
['spostare', 'verb', 'a'],
['spot', 'noun', 'b'],
['spranga', 'noun', 'c'],
['spray', 'adjective', 'c'],
['spray', 'noun', 'c'],
['sprecare', 'verb', 'b'],
['spreco', 'noun', 'c'],
['spremere', 'verb', 'c'],
['spremuta', 'noun', 'c'],
['sprofondare', 'verb', 'b'],
['sproposito', 'noun', 'c'],
['spruzzare', 'verb', 'c'],
['spuma', 'noun', 'c'],
['spumante', 'pres_part', 'c'],
['spumante', 'adjective', 'c'],
['spumante', 'noun', 'c'],
['spuntare', 'verb', 'b'],
['spuntino', 'noun', 'c'],
['spunto', 'noun', 'b'],
['sputare', 'verb', 'b'],
['sputo', 'noun', 'c'],
['squadra', 'noun', 'a'],
['squallido', 'adjective', 'c'],
['squalo', 'noun', 'c'],
['squarcio', 'noun', 'c'],
['squillare', 'verb', 'b'],
['squisito', 'adjective', 'c'],
['stabile', 'adjective', 'b'],
['stabile', 'noun', 'b'],
['stabilire', 'verb', 'a'],
['stabilit', 'noun', 'b'],
['staccare', 'verb', 'a'],
['stacco', 'noun', 'c'],
['stadio', 'noun', 'b'],
['staffa', 'noun', 'c'],
['stagione', 'noun', 'a'],
['stagno', 'noun', 'c'],
['stalla', 'noun', 'b'],
['stallone', 'noun', 'c'],
['stamattina', 'adverb', 'b'],
['stampa', 'noun', 'a'],
['stampare', 'verb', 'b'],
['stampatello', 'noun', 'c'],
['stampato', 'past_part', 'b'],
['stampato', 'adjective', 'b'],
['stampato', 'noun', 'b'],
['stampella', 'noun', 'c'],
['stampo', 'noun', 'c'],
['stancare', 'verb', 'b'],
['stanchezza', 'noun', 'b'],
['stanco', 'adjective', 'a'],
['standard', 'noun', 'b'],
['standard', 'adjective', 'b'],
['stanga', 'noun', 'c'],
['stanotte', 'adverb', 'b'],
['stanza', 'noun', 'a'],
['star', 'noun', 'b'],
['stare', 'verb', 'a'],
['stasera', 'adverb', 'a'],
['statale', 'adjective', 'b'],
['statale', 'noun', 'b'],
['statistica', 'noun', 'b'],
['statistico', 'adjective', 'b'],
['statistico', 'noun', 'b'],
['stato', 'noun', 'a'],
['stato', 'noun', 'a'],
['statua', 'noun', 'b'],
['statunitense', 'adjective', 'b'],
['statunitense', 'noun', 'b'],
['status', 'noun', 'b'],
['stavolta', 'adverb', 'b'],
['stazione', 'noun', 'a'],
['stella', 'noun', 'a'],
['stellare', 'adjective', 'b'],
['stendere', 'verb', 'b'],
['stendibiancheria', 'noun', 'c'],
['stereo', 'adjective', 'c'],
['stereo', 'noun', 'c'],
['sterlina', 'noun', 'b'],
['sterzare', 'verb', 'c'],
['sterzo', 'noun', 'c'],
['stesso', 'adjective', 'a'],
['stesso', 'pronoun', 'a'],
['stile', 'noun', 'a'],
['stima', 'noun', 'b'],
['stimare', 'verb', 'b'],
['stimolare', 'verb', 'b'],
['stimolo', 'noun', 'b'],
['stinco', 'noun', 'c'],
['stipendiare', 'verb', 'c'],
['stipendio', 'noun', 'a'],
['stirare', 'verb', 'b'],
['stivaletto', 'noun', 'c'],
['stoffa', 'noun', 'b'],
['stomaco', 'noun', 'b'],
['stonare', 'verb', 'c'],
['stop', 'loc-comando', 'c'],
['stop', 'noun', 'c'],
['stoppa', 'noun', 'c'],
['storcere', 'verb', 'c'],
['storia', 'noun', 'a'],
['storico', 'adjective', 'a'],
['storico', 'noun', 'a'],
['stornello', 'noun', 'c'],
['storta', 'noun', 'c'],
['storto', 'past_part', 'b'],
['storto', 'adjective', 'b'],
['storto', 'adverb', 'b'],
['storto', 'noun', 'b'],
['stoviglia', 'noun', 'c'],
['stracchino', 'noun', 'c'],
['straccio', 'noun', 'b'],
['strada', 'noun', 'a'],
['stradale', 'adjective', 'b'],
['stradale', 'noun', 'b'],
['strage', 'noun', 'b'],
['strangolare', 'verb', 'c'],
['straniero', 'adjective', 'a'],
['straniero', 'noun', 'a'],
['strano', 'adjective', 'a'],
['straordinario', 'adjective', 'a'],
['straordinario', 'noun', 'a'],
['strappare', 'verb', 'b'],
['strategia', 'noun', 'a'],
['strategico', 'adjective', 'b'],
['strato', 'noun', 'b'],
['strega', 'noun', 'a'],
['stregare', 'verb', 'b'],
['stregone', 'noun', 'c'],
['stress', 'noun', 'b'],
['stretta', 'noun', 'b'],
['strettamente', 'adverb', 'b'],
['stretto', 'past_part', 'a'],
['stretto', 'adjective', 'a'],
['stretto', 'noun', 'a'],
['strillare', 'verb', 'b'],
['strillo', 'noun', 'c'],
['stringa', 'noun', 'c'],
['stringere', 'verb', 'a'],
['striscia', 'noun', 'b'],
['strisciare', 'verb', 'b'],
['strofinaccio', 'noun', 'c'],
['stronzata', 'noun', 'b'],
['stronzo', 'noun', 'a'],
['stronzo', 'adjective', 'a'],
['strumento', 'noun', 'a'],
['strutto', 'past_part', 'c'],
['strutto', 'adjective', 'c'],
['strutto', 'noun', 'c'],
['struttura', 'noun', 'a'],
['strutturale', 'adjective', 'b'],
['struzzo', 'noun', 'c'],
['studente', 'noun', 'a'],
['studiare', 'verb', 'a'],
['studio', 'noun', 'a'],
['studioso', 'adjective', 'b'],
['studioso', 'noun', 'b'],
['stufa', 'noun', 'c'],
['stuoia', 'noun', 'c'],
['stupefacente', 'pres_part', 'b'],
['stupefacente', 'adjective', 'b'],
['stupefacente', 'noun', 'b'],
['stupendo', 'adjective', 'b'],
['stupido', 'adjective', 'a'],
['stupido', 'noun', 'a'],
['stupire', 'verb', 'b'],
['stupito', 'past_part', 'b'],
['stupito', 'adjective', 'b'],
['stupore', 'noun', 'b'],
['stuzzicadenti', 'noun', 'c'],
['stuzzicare', 'verb', 'c'],
['style', 'noun', 'b'],
['su', 'preposition', 'a'],
['su', 'adverb', 'a'],
['su', 'exclamation', 'a'],
['su', 'noun', 'a'],
['subire', 'verb', 'a'],
['subito', 'adverb', 'a'],
['succedere', 'verb', 'a'],
['successione', 'noun', 'b'],
['successivamente', 'adverb', 'b'],
['successivo', 'adjective', 'a'],
['successo', 'noun', 'a'],
['succhiare', 'verb', 'b'],
['succo', 'noun', 'b'],
['sud', 'noun', 'a'],
['sud', 'adjective', 'a'],
['sudamericano', 'adjective', 'c'],
['sudamericano', 'noun', 'c'],
['sudare', 'verb', 'b'],
['sudato', 'past_part', 'c'],
['sudato', 'adjective', 'c'],
['suddito', 'noun', 'b'],
['suddito', 'adjective', 'b'],
['suddividere', 'verb', 'b'],
['sudicio', 'adjective', 'c'],
['sudicio', 'noun', 'c'],
['sudore', 'noun', 'b'],
['sudtirolese', 'adjective', 'c'],
['sudtirolese', 'noun', 'c'],
['sufficiente', 'adjective', 'a'],
['suggerimento', 'noun', 'b'],
['suggerire', 'verb', 'a'],
['suggestivo', 'adjective', 'b'],
['sughero', 'noun', 'c'],
['sugo', 'noun', 'b'],
['suicidio', 'noun', 'b'],
['suino', 'noun', 'c'],
['suino', 'adjective', 'c'],
['suo', 'adjective', 'a'],
['suo', 'pronoun', 'a'],
['suocera', 'noun', 'c'],
['suocero', 'noun', 'c'],
['suola', 'noun', 'c'],
['suolo', 'noun', 'b'],
['suonare', 'verb', 'a'],
['suono', 'noun', 'a'],
['suora', 'noun', 'a'],
['super', 'adjective', 'b'],
['super', 'noun', 'b'],
['superare', 'verb', 'a'],
['superbia', 'noun', 'c'],
['superficiale', 'adjective', 'b'],
['superficie', 'noun', 'a'],
['superiore', 'adjective', 'a'],
['superiore', 'noun', 'a'],
['supermercato', 'noun', 'b'],
['supporre', 'verb', 'b'],
['supportare', 'verb', 'b'],
['supporto', 'noun', 'a'],
['supremo', 'adjective', 'b'],
['surgelato', 'past_part', 'c'],
['surgelato', 'adjective', 'c'],
['surgelato', 'noun', 'c'],
['suscitare', 'verb', 'b'],
['susina', 'noun', 'c'],
['susino', 'noun', 'c'],
['susseguirsi', 'verb', 'c'],
['sussurrare', 'verb', 'b'],
['svanire', 'verb', 'b'],
['svedese', 'adjective', 'c'],
['svedese', 'noun', 'c'],
['sveglia', 'noun', 'c'],
['svegliare', 'verb', 'a'],
['svegliarsi', 'verb', 'c'],
['sveglio', 'past_part', 'b'],
['sveglio', 'adjective', 'b'],
['svelare', 'verb', 'b'],
['svelto', 'adjective', 'c'],
['svenire', 'verb', 'b'],
['sventola', 'noun', 'c'],
['sviluppare', 'verb', 'a'],
['sviluppato', 'past_part', 'b'],
['sviluppato', 'adjective', 'b'],
['sviluppo', 'noun', 'a'],
['svizzero', 'adjective', 'b'],
['svizzero', 'noun', 'b'],
['svolazzare', 'verb', 'c'],
['svolgere', 'verb', 'a'],
['svolgimento', 'noun', 'c'],
['svolta', 'noun', 'b'],
['svuotare', 'verb', 'b'],
['tabaccaio', 'noun', 'c'],
['tabella', 'noun', 'b'],
['tacca', 'noun', 'c'],
['tacchino', 'noun', 'c'],
['tacco', 'noun', 'b'],
['tacere', 'verb', 'a'],
['tacere', 'noun', 'a'],
['tag', 'noun', 'b'],
['taglia', 'noun', 'b'],
['tagliare', 'verb', 'a'],
['tagliatella', 'noun', 'c'],
['tagliato', 'past_part', 'b'],
['tagliato', 'adjective', 'b'],
['tagliere', 'noun', 'c'],
['taglio', 'noun', 'a'],
['tagliola', 'noun', 'c'],
['talco', 'noun', 'c'],
['tale', 'adjective', 'a'],
['tale', 'pronoun', 'a'],
['tale', 'adverb', 'a'],
['taleggio', 'noun', 'c'],
['talento', 'noun', 'b'],
['talmente', 'adverb', 'a'],
['talpa', 'noun', 'c'],
['talpa', 'adjective', 'c'],
['talpa', 'noun', 'c'],
['talvolta', 'adverb', 'b'],
['tamburo', 'noun', 'c'],
['tamponare', 'verb', 'c'],
['tangente', 'pres_part', 'b'],
['tangente', 'adjective', 'b'],
['tangente', 'noun', 'b'],
['tanto', 'adjective', 'a'],
['tanto', 'pronoun', 'a'],
['tanto', 'noun', 'a'],
['tanto', 'adverb', 'a'],
['tanto', 'conjunction', 'a'],
['tappa', 'noun', 'b'],
['tappare', 'verb', 'b'],
['tappetino', 'noun', 'c'],
['tappeto', 'noun', 'b'],
['tappezzare', 'verb', 'c'],
['tappo', 'noun', 'c'],
['tarallo', 'noun', 'c'],
['tarantella', 'noun', 'c'],
['tardi', 'adverb', 'a'],
['tardo', 'adjective', 'a'],
['tardo', 'adverb', 'a'],
['targa', 'noun', 'b'],
['tariffa', 'noun', 'b'],
['tarlo', 'noun', 'c'],
['tartaruga', 'noun', 'c'],
['tartufo', 'noun', 'c'],
['tasca', 'noun', 'a'],
['tassa', 'noun', 'a'],
['tassare', 'verb', 'c'],
['tassello', 'noun', 'c'],
['tasso', 'noun', 'b'],
['tastiera', 'noun', 'b'],
['tasto', 'noun', 'b'],
['tatto', 'noun', 'c'],
['tatuaggio', 'noun', 'b'],
['taverna', 'noun', 'c'],
['tavola', 'noun', 'a'],
['tavoletta', 'noun', 'c'],
['tavolino', 'noun', 'b'],
['tavolo', 'noun', 'a'],
['taxi', 'noun', 'b'],
['tazza', 'noun', 'b'],
['t', 'noun', 'b'],
['te', 'pronoun', 'noun'],
['te', 'team', 'noun'],
['teatrale', 'adjective', 'b'],
['teatro', 'noun', 'a'],
['tecnica', 'noun', 'a'],
['tecnicamente', 'adverb', 'b'],
['tecnico', 'adjective', 'a'],
['tecnico', 'noun', 'a'],
['tecnologia', 'noun', 'a'],
['tecnologico', 'adjective', 'b'],
['tedesco', 'adjective', 'a'],
['tedesco', 'noun', 'a'],
['tegame', 'noun', 'c'],
['teglia', 'noun', 'c'],
['tegola', 'noun', 'c'],
['tela', 'noun', 'b'],
['telaio', 'noun', 'c'],
['telecamera', 'noun', 'b'],
['telecomandato', 'past_part', 'c'],
['telecomandato', 'adjective', 'c'],
['telecronaca', 'noun', 'c'],
['telecronista', 'noun', 'c'],
['telefilm', 'noun', 'b'],
['telefonare', 'verb', 'a'],
['telefonata', 'noun', 'a'],
['telefonico', 'adjective', 'a'],
['telefonino', 'noun', 'b'],
['telefono', 'noun', 'a'],
['telegiornale', 'noun', 'b'],
['telegrafico', 'adjective', 'c'],
['telegrafo', 'noun', 'c'],
['telegramma', 'noun', 'c'],
['telescopio', 'noun', 'b'],
['televisione', 'noun', 'a'],
['televisivo', 'adjective', 'a'],
['televisore', 'noun', 'b'],
['tema', 'noun', 'a'],
['temere', 'verb', 'a'],
['temperatura', 'noun', 'a'],
['tempesta', 'noun', 'b'],
['tempio', 'noun', 'b'],
['tempo', 'noun', 'a'],
['temporale', 'noun', 'b'],
['temporaneo', 'adjective', 'b'],
['tenaglia', 'noun', 'c'],
['tenda', 'noun', 'a'],
['tendenza', 'noun', 'a'],
['tendere', 'verb', 'a'],
['tenebra', 'noun', 'c'],
['tenente', 'noun', 'b'],
['tenere', 'verb', 'a'],
['tenerezza', 'noun', 'b'],
['tenero', 'adjective', 'b'],
['tenero', 'noun', 'b'],
['tennis', 'noun', 'b'],
['tensione', 'noun', 'a'],
['tentare', 'verb', 'a'],
['tentativo', 'noun', 'a'],
['tentazione', 'noun', 'b'],
['tenuta', 'noun', 'b'],
['teologia', 'noun', 'b'],
['teologo', 'noun', 'b'],
['teoria', 'noun', 'a'],
['teorico', 'adjective', 'b'],
['teorico', 'noun', 'b'],
['terapia', 'noun', 'a'],
['tergicristallo', 'noun', 'c'],
['terminale', 'adjective', 'b'],
['terminale', 'noun', 'b'],
['terminare', 'verb', 'a'],
['termine', 'noun', 'a'],
['termosifone', 'noun', 'c'],
['terra', 'noun', 'a'],
['terrazzo', 'noun', 'b'],
['terremoto', 'noun', 'b'],
['terreno', 'noun', 'a'],
['terrestre', 'adjective', 'b'],
['terrestre', 'noun', 'b'],
['terribile', 'adjective', 'a'],
['terriccio', 'noun', 'c'],
['territoriale', 'adjective', 'b'],
['territoriale', 'noun', 'b'],
['territorio', 'noun', 'a'],
['terrore', 'noun', 'b'],
['terrorismo', 'noun', 'b'],
['terrorista', 'adjective', 'b'],
['terrorista', 'noun', 'b'],
['terrorizzare', 'verb', 'b'],
['terzo', 'adjective', 'a'],
['terzo', 'noun', 'a'],
['teschio', 'noun', 'b'],
['tesi', 'noun', 'a'],
['teso', 'past_part', 'b'],
['teso', 'adjective', 'b'],
['tesoro', 'noun', 'a'],
['tessera', 'noun', 'b'],
['tessile', 'adjective', 'c'],
['tessile', 'noun', 'c'],
['tessuto', 'past_part', 'b'],
['tessuto', 'adjective', 'b'],
['tessuto', 'noun', 'b'],
['test', 'noun', 'a'],
['testa', 'noun', 'a'],
['testamento', 'noun', 'b'],
['testare', 'verb', 'b'],
['testimone', 'noun', 'a'],
['testimonianza', 'noun', 'b'],
['testimoniare', 'verb', 'b'],
['testo', 'noun', 'a'],
['tetta', 'noun', 'b'],
['tetto', 'noun', 'a'],
['tettoia', 'noun', 'c'],
['tg', 'sigla', 'b'],
['thermos', 'noun', 'c'],
['ti', 'noun', 'c'],
['ti', 'pronoun', 'a'],
['tic', 'noun', 'c'],
['ticchettio', 'noun', 'c'],
['tifare', 'verb', 'b'],
['tifo', 'noun', 'c'],
['tifoso', 'adjective', 'b'],
['tifoso', 'noun', 'b'],
['tigre', 'noun', 'b'],
['timbro', 'noun', 'c'],
['timidezza', 'noun', 'c'],
['timido', 'adjective', 'b'],
['timido', 'noun', 'b'],
['timone', 'noun', 'c'],
['timoniere', 'noun', 'c'],
['timore', 'noun', 'b'],
['tinello', 'noun', 'c'],
['tino', 'noun', 'c'],
['tipico', 'adjective', 'a'],
['tipo', 'noun', 'a'],
['tipologia', 'noun', 'b'],
['tiramis', 'noun', 'c'],
['tiranno', 'noun', 'c'],
['tiranno', 'adjective', 'c'],
['tirare', 'verb', 'a'],
['tiro', 'noun', 'b'],
['tirocinio', 'noun', 'b'],
['tirrenico', 'adjective', 'c'],
['tisana', 'noun', 'c'],
['titolare', 'adjective', 'b'],
['titolare', 'noun', 'b'],
['titolo', 'noun', 'a'],
['tiv', 'noun', 'a'],
['tizio', 'noun', 'b'],
['toast', 'noun', 'c'],
['toccare', 'verb', 'a'],
['tocco', 'noun', 'b'],
['togliere', 'verb', 'a'],
['toilette', 'noun', 'c'],
['toletta', 'noun', 'c'],
['tolleranza', 'noun', 'b'],
['tollerare', 'verb', 'b'],
['tomba', 'noun', 'b'],
['tombola', 'noun', 'c'],
['tonaca', 'noun', 'c'],
['tondo', 'adjective', 'b'],
['tondo', 'noun', 'b'],
['tonnellata', 'noun', 'b'],
['tonno', 'noun', 'c'],
['tono', 'noun', 'a'],
['tonsilla', 'noun', 'c'],
['top', 'noun', 'b'],
['topo', 'noun', 'b'],
['topo', 'adjective', 'b'],
['toppa', 'noun', 'c'],
['torbido', 'adjective', 'c'],
['torbido', 'noun', 'c'],
['torcere', 'verb', 'b'],
['torcia', 'noun', 'c'],
['torcicollo', 'noun', 'c'],
['tordo', 'noun', 'c'],
['torero', 'noun', 'c'],
['torinese', 'adjective', 'c'],
['torinese', 'noun', 'c'],
['tormentare', 'verb', 'b'],
['tornaconto', 'noun', 'c'],
['tornare', 'verb', 'a'],
['torneo', 'noun', 'b'],
['tornio', 'noun', 'c'],
['toro', 'noun', 'b'],
['torre', 'noun', 'b'],
['torrone', 'noun', 'c'],
['torta', 'noun', 'b'],
['tortellino', 'noun', 'c'],
['torto', 'noun', 'b'],
['tortora', 'noun', 'c'],
['tortora', 'adjective', 'c'],
['tortora', 'noun', 'c'],
['tosare', 'verb', 'c'],
['toscano', 'adjective', 'b'],
['toscano', 'noun', 'b'],
['tosse', 'noun', 'b'],
['tossico', 'adjective', 'b'],
['tossico', 'noun', 'b'],
['tossire', 'verb', 'c'],
['tostapane', 'noun', 'c'],
['totale', 'adjective', 'a'],
['totale', 'noun', 'a'],
['totalmente', 'adverb', 'b'],
['tour', 'noun', 'b'],
['tovaglia', 'noun', 'b'],
['tovaglietta', 'noun', 'c'],
['tovagliolo', 'noun', 'c'],
['tra', 'preposition', 'a'],
['traballare', 'verb', 'c'],
['traboccare', 'verb', 'c'],
['trabocchetto', 'noun', 'c'],
['traccia', 'noun', 'a'],
['tracciare', 'verb', 'b'],
['tradimento', 'noun', 'b'],
['tradire', 'verb', 'b'],
['tradizionale', 'adjective', 'a'],
['tradizione', 'noun', 'a'],
['tradurre', 'verb', 'a'],
['traduzione', 'noun', 'a'],
['traffico', 'noun', 'a'],
['trafila', 'noun', 'c'],
['traforo', 'noun', 'c'],
['tragedia', 'noun', 'b'],
['traghetto', 'noun', 'c'],
['tragico', 'adjective', 'b'],
['tragico', 'noun', 'b'],
['trainare', 'verb', 'c'],
['trama', 'noun', 'b'],
['tramezzino', 'noun', 'c'],
['tramite', 'noun', 'preposition'],
['tramontare', 'verb', 'c'],
['tramonto', 'noun', 'b'],
['trampolino', 'noun', 'c'],
['trancio', 'noun', 'c'],
['tranne', 'preposition', 'a'],
['tranquillamente', 'adverb', 'b'],
['tranquillit', 'noun', 'b'],
['tranquillizzare', 'verb', 'c'],
['tranquillo', 'adjective', 'a'],
['tranquillo', 'adverb', 'a'],
['tranquillo', 'noun', 'a'],
['transito', 'noun', 'c'],
['trapano', 'noun', 'c'],
['trapezio', 'noun', 'c'],
['trapezio', 'adjective', 'c'],
['trapianto', 'noun', 'c'],
['trappola', 'noun', 'b'],
['trapunta', 'noun', 'c'],
['trarre', 'verb', 'a'],
['trascinare', 'verb', 'a'],
['trascorrere', 'verb', 'a'],
['trascrizione', 'noun', 'b'],
['trascurare', 'verb', 'b'],
['trasferimento', 'noun', 'b'],
['trasferire', 'verb', 'a'],
['trasformare', 'verb', 'a'],
['trasformazione', 'noun', 'b'],
['trasfusione', 'noun', 'c'],
['traslocare', 'verb', 'c'],
['trasloco', 'noun', 'c'],
['trasmettere', 'verb', 'a'],
['trasmissione', 'noun', 'a'],
['trasparente', 'adjective', 'b'],
['trasparente', 'noun', 'b'],
['trasparenza', 'noun', 'b'],
['trasportare', 'verb', 'b'],
['trasporto', 'noun', 'a'],
['trattamento', 'noun', 'a'],
['trattare', 'verb', 'a'],
['trattativa', 'noun', 'b'],
['trattato', 'noun', 'b'],
['trattenere', 'verb', 'a'],
['trattenuta', 'noun', 'c'],
['tratto', 'noun', 'a'],
['trattore', 'noun', 'c'],
['trauma', 'noun', 'b'],
['travasare', 'verb', 'c'],
['travestire', 'verb', 'c'],
['travolgere', 'verb', 'b'],
['tre', 'adjective', 'a'],
['tre', 'noun', 'a'],
['trebbiare', 'verb', 'c'],
['trecento', 'adjective', 'b'],
['trecento', 'noun', 'b'],
['tredici', 'adjective', 'b'],
['tredici', 'noun', 'b'],
['tremare', 'verb', 'b'],
['tremendo', 'adjective', 'b'],
['trend', 'noun', 'b'],
['treno', 'noun', 'a'],
['trenta', 'adjective', 'a'],
['trenta', 'noun', 'a'],
['trentino', 'adjective', 'c'],
['trentino', 'noun', 'c'],
['triangolo', 'noun', 'b'],
['trib', 'noun', 'c'],
['tribunale', 'noun', 'a'],
['triestino', 'adjective', 'c'],
['triestino', 'noun', 'c'],
['trifoglio', 'noun', 'c'],
['trina', 'noun', 'c'],
['trincea', 'noun', 'c'],
['trionfo', 'noun', 'b'],
['triste', 'adjective', 'a'],
['tristezza', 'noun', 'b'],
['tritare', 'verb', 'c'],
['trofeo', 'noun', 'c'],
['tronco', 'noun', 'b'],
['trono', 'noun', 'b'],
['troppo', 'adjective', 'a'],
['troppo', 'pronoun', 'a'],
['troppo', 'adverb', 'a'],
['troppo', 'noun', 'a'],
['trota', 'noun', 'c'],
['trottare', 'verb', 'c'],
['trottola', 'noun', 'c'],
['trovare', 'verb', 'a'],
['truccare', 'verb', 'c'],
['trucco', 'noun', 'b'],
['trucco', 'noun', 'b'],
['truffa', 'noun', 'b'],
['truffare', 'verb', 'c'],
['truppa', 'noun', 'b'],
['t-shirt', 'noun', 'c'],
['tu', 'pronoun', 'a'],
['tubo', 'noun', 'b'],
['tuffare', 'verb', 'b'],
['tuffo', 'noun', 'c'],
['tulipano', 'noun', 'c'],
['tumore', 'noun', 'b'],
['tunica', 'noun', 'c'],
['tunisino', 'adjective', 'c'],
['tunisino', 'noun', 'c'],
['tunnel', 'noun', 'c'],
['tuo', 'adjective', 'a'],
['tuo', 'pronoun', 'a'],
['tuono', 'noun', 'c'],
['turbare', 'verb', 'b'],
['turco', 'adjective', 'b'],
['turco', 'noun', 'b'],
['turismo', 'noun', 'b'],
['turista', 'noun', 'b'],
['turistico', 'adjective', 'b'],
['turno', 'noun', 'a'],
['tuta', 'noun', 'b'],
['tutela', 'noun', 'b'],
['tutelare', 'verb', 'b'],
['tutore', 'noun', 'c'],
['tuttavia', 'conjunction', 'a'],
['tuttavia', 'adverb', 'a'],
['tutto', 'adjective', 'a'],
['tutto', 'pronoun', 'a'],
['tuttora', 'adverb', 'b'],
['u', 'noun', 'c'],
['ubriaco', 'adjective', 'b'],
['ubriaco', 'noun', 'b'],
['uccello', 'noun', 'a'],
['uccidere', 'verb', 'a'],
['ucraino', 'adjective', 'c'],
['ucraino', 'noun', 'c'],
['udienza', 'noun', 'b'],
['udinese', 'adjective', 'c'],
['udinese', 'noun', 'c'],
['udire', 'verb', 'b'],
['udire', 'noun', 'b'],
['ufficiale', 'noun', 'b'],
['ufficiale', 'adjective', 'a'],
['ufficialmente', 'adverb', 'b'],
['ufficio', 'noun', 'a'],
['uguale', 'adjective', 'a'],
['uguale', 'adverb', 'a'],
['uguale', 'noun', 'a'],
['ugualmente', 'adverb', 'b'],
['ulcera', 'noun', 'c'],
['ulteriore', 'adjective', 'a'],
['ulteriormente', 'adverb', 'b'],
['ultimamente', 'adverb', 'b'],
['ultimo', 'adjective', 'a'],
['ultimo', 'noun', 'a'],
['ultravioletto', 'noun', 'c'],
['ultravioletto', 'adjective', 'c'],
['umanit', 'noun', 'a'],
['umano', 'adjective', 'a'],
['umano', 'noun', 'a'],
['umbro', 'adjective', 'c'],
['umbro', 'noun', 'c'],
['umido', 'adjective', 'b'],
['umido', 'noun', 'b'],
['umile', 'adjective', 'b'],
['umile', 'noun', 'b'],
['umiliare', 'verb', 'b'],
['umore', 'noun', 'b'],
['umorismo', 'noun', 'c'],
['una', 'determiner', 'a'],
['una', 'pronoun', 'a'],
['undici', 'adjective', 'b'],
['undici', 'noun', 'b'],
['ungherese', 'adjective', 'c'],
['ungherese', 'noun', 'c'],
['unghia', 'noun', 'b'],
['unguento', 'noun', 'c'],
['unico', 'adjective', 'a'],
['unico', 'noun', 'a'],
['uniforme', 'adjective', 'b'],
['unione', 'noun', 'b'],
['unire', 'verb', 'a'],
['unit', 'noun', 'a'],
['unito', 'past_part', 'a'],
['unito', 'adjective', 'a'],
['unito', 'noun', 'a'],
['universale', 'adjective', 'b'],
['universale', 'noun', 'b'],
['universit', 'noun', 'a'],
['universitario', 'adjective', 'b'],
['universitario', 'noun', 'b'],
['universo', 'noun', 'a'],
['uno', 'adjective', 'a'],
['uno', 'noun', 'a'],
['uno', 'determiner', 'a'],
['uno', 'pronoun', 'a'],
['uomo', 'noun', 'a'],
['uovo', 'noun', 'a'],
['uragano', 'noun', 'c'],
['urbanistico', 'adjective', 'b'],
['urbano', 'adjective', 'b'],
['urgente', 'adjective', 'b'],
['urgenza', 'noun', 'b'],
['urlare', 'verb', 'a'],
['urlo', 'noun', 'b'],
['urna', 'noun', 'c'],
['urtare', 'verb', 'b'],
['usare', 'verb', 'a'],
['usato', 'past_part', 'b'],
['usato', 'adjective', 'b'],
['usato', 'noun', 'b'],
['uscire', 'verb', 'a'],
['uscita', 'noun', 'a'],
['usignolo', 'noun', 'c'],
['uso', 'noun', 'a'],
['utensile', 'noun', 'c'],
['utente', 'noun', 'a'],
['utenza', 'noun', 'b'],
['utile', 'adjective', 'a'],
['utile', 'noun', 'a'],
['utilit', 'noun', 'b'],
['utilizzare', 'verb', 'a'],
['utilizzo', 'noun', 'b'],
['vabb', 'exclamation', 'b'],
['vacanza', 'noun', 'a'],
['vacca', 'noun', 'b'],
['vaccino', 'noun', 'c'],
['vaffanculo', 'exclamation', 'b'],
['vagare', 'verb', 'b'],
['vagire', 'verb', 'c'],
['vago', 'adjective', 'b'],
['vago', 'noun', 'b'],
['valanga', 'noun', 'c'],
['valdostano', 'adjective', 'c'],
['valdostano', 'noun', 'c'],
['valere', 'verb', 'a'],
['valido', 'adjective', 'b'],
['valigia', 'noun', 'b'],
['valle', 'noun', 'b'],
['valore', 'noun', 'a'],
['valorizzare', 'verb', 'b'],
['valoroso', 'adjective', 'c'],
['valoroso', 'noun', 'c'],
['valutare', 'verb', 'a'],
['valutazione', 'noun', 'b'],
['valvola', 'noun', 'c'],
['vampata', 'noun', 'c'],
['vampiro', 'noun', 'b'],
['vandalo', 'adjective', 'c'],
['vandalo', 'noun', 'c'],
['vanga', 'noun', 'c'],
['vangelo', 'noun', 'b'],
['vanitoso', 'adjective', 'c'],
['vanitoso', 'noun', 'c'],
['vano', 'adjective', 'b'],
['vano', 'noun', 'b'],
['vantaggio', 'noun', 'a'],
['vantaggioso', 'adjective', 'c'],
['vantare', 'verb', 'b'],
['vanto', 'noun', 'c'],
['vapore', 'noun', 'b'],
['variabile', 'adjective', 'b'],
['variabile', 'noun', 'b'],
['variante', 'pres_part', 'b'],
['variante', 'adjective', 'b'],
['variante', 'noun', 'b'],
['variare', 'verb', 'b'],
['variazione', 'noun', 'b'],
['variet', 'noun', 'b'],
['vario', 'adjective', 'a'],
['vario', 'adjective', 'a'],
['vario', 'pronoun', 'a'],
['variopinto', 'adjective', 'c'],
['vasca', 'noun', 'b'],
['vaso', 'noun', 'b'],
['vasto', 'adjective', 'b'],
['vasto', 'noun', 'b'],
['ve', 'pronoun', 'a'],
['ve', 'adverb', 'a'],
['vecchio', 'adjective', 'a'],
['vecchio', 'noun', 'a'],
['vedere', 'verb', 'a'],
['vedere', 'noun', 'a'],
['vedova', 'noun', 'b'],
['vegetale', 'adjective', 'b'],
['vegetale', 'noun', 'b'],
['veglia', 'noun', 'c'],
['veglione', 'noun', 'c'],
['veicolo', 'noun', 'b'],
['vela', 'noun', 'b'],
['veleno', 'noun', 'b'],
['velenoso', 'adjective', 'c'],
['vellutato', 'past_part', 'c'],
['vellutato', 'adjective', 'c'],
['velluto', 'noun', 'c'],
['velo', 'noun', 'b'],
['veloce', 'adjective', 'a'],
['veloce', 'adverb', 'a'],
['veloce', 'noun', 'a'],
['velocemente', 'adverb', 'b'],
['velocit', 'noun', 'a'],
['vena', 'noun', 'b'],
['vendemmiare', 'verb', 'c'],
['vendere', 'verb', 'a'],
['vendetta', 'noun', 'b'],
['vendicare', 'verb', 'b'],
['vendita', 'noun', 'a'],
['venditore', 'adjective', 'b'],
['venditore', 'noun', 'b'],
['venerd', 'noun', 'a'],
['veneto', 'adjective', 'b'],
['veneto', 'noun', 'b'],
['veneziano', 'adjective', 'c'],
['veneziano', 'noun', 'c'],
['venire', 'verb', 'a'],
['ventaglio', 'noun', 'c'],
['ventata', 'noun', 'c'],
['venti', 'adjective', 'a'],
['venti', 'noun', 'a'],
['venticinque', 'adjective', 'b'],
['venticinque', 'noun', 'b'],
['ventilatore', 'adjective', 'c'],
['ventilatore', 'noun', 'c'],
['ventina', 'noun', 'b'],
['ventiquattro', 'adjective', 'b'],
['ventiquattro', 'noun', 'b'],
['vento', 'noun', 'a'],
['ventre', 'noun', 'b'],
['venuta', 'noun', 'c'],
['veramente', 'adverb', 'a'],
['verbale', 'adjective', 'a'],
['verbale', 'noun', 'a'],
['verbo', 'noun', 'b'],
['verde', 'adjective', 'a'],
['verde', 'noun', 'a'],
['verdura', 'noun', 'b'],
['vergine', 'adjective', 'b'],
['vergine', 'noun', 'b'],
['vergogna', 'noun', 'b'],
['vergognarsi', 'verb', 'b'],
['verifica', 'noun', 'b'],
['verificare', 'verb', 'a'],
['verit', 'noun', 'a'],
['verme', 'noun', 'b'],
['vernice', 'noun', 'b'],
['vero', 'adjective', 'a'],
['vero', 'noun', 'a'],
['versare', 'verb', 'a'],
['versione', 'noun', 'a'],
['verso', 'noun', 'a'],
['verso', 'preposition', 'a'],
['vertebra', 'noun', 'c'],
['verticale', 'adjective', 'b'],
['verticale', 'noun', 'b'],
['vertice', 'noun', 'b'],
['vertigine', 'noun', 'c'],
['vescovo', 'noun', 'b'],
['vescovo', 'adjective', 'b'],
['vespa', 'noun', 'c'],
['veste', 'noun', 'b'],
['vestire', 'verb', 'a'],
['vestito', 'noun', 'a'],
['vestito', 'past_part', 'b'],
['vestito', 'adjective', 'b'],
['veterinario', 'adjective', 'c'],
['veterinario', 'noun', 'c'],
['vetrina', 'noun', 'b'],
['vetro', 'noun', 'a'],
['vettura', 'noun', 'b'],
['vi', 'pronoun', 'a'],
['vi', 'adverb', 'a'],
['via', 'noun', 'a'],
['via', 'adverb', 'a'],
['via', 'exclamation', 'a'],
['via', 'noun', 'a'],
['viaggiare', 'verb', 'a'],
['viaggiatore', 'noun', 'b'],
['viaggiatrice', 'noun', 'c'],
['viaggio', 'noun', 'a'],
['viale', 'noun', 'b'],
['vibrare', 'verb', 'b'],
['vice', 'noun', 'b'],
['vicenda', 'noun', 'a'],
['viceversa', 'adverb', 'b'],
['vicinanza', 'noun', 'b'],
['vicino', 'adjective', 'a'],
['vicino', 'noun', 'a'],
['vicino', 'adverb', 'a'],
['vicolo', 'noun', 'b'],
['video', 'adjective', 'a'],
['video', 'noun', 'a'],
['videogioco', 'noun', 'b'],
['viennese', 'adjective', 'c'],
['viennese', 'noun', 'c'],
['vietare', 'verb', 'b'],
['vigile', 'adjective', 'b'],
['vigile', 'noun', 'b'],
['vigilia', 'noun', 'b'],
['vigna', 'noun', 'c'],
['vigore', 'noun', 'b'],
['villa', 'noun', 'a'],
['villaggio', 'noun', 'a'],
['vincente', 'pres_part', 'b'],
['vincente', 'adjective', 'b'],
['vincente', 'noun', 'b'],
['vincere', 'verb', 'a'],
['vincitore', 'adjective', 'b'],
['vincitore', 'noun', 'b'],
['vincolo', 'noun', 'b'],
['vino', 'noun', 'a'],
['vino', 'adjective', 'a'],
['viola', 'noun', 'b'],
['viola', 'adjective', 'b'],
['violare', 'verb', 'b'],
['violazione', 'noun', 'b'],
['violentare', 'verb', 'c'],
['violento', 'adjective', 'a'],
['violento', 'noun', 'a'],
['violenza', 'noun', 'a'],
['violetta', 'noun', 'c'],
['violetto', 'adjective', 'c'],
['violetto', 'noun', 'c'],
['violino', 'noun', 'b'],
['vipera', 'noun', 'c'],
['virgola', 'noun', 'b'],
['virt', 'noun', 'b'],
['virtuale', 'adjective', 'b'],
['virus', 'noun', 'b'],
['visibile', 'adjective', 'b'],
['visibile', 'noun', 'b'],
['visione', 'noun', 'a'],
['visita', 'noun', 'a'],
['visitare', 'verb', 'a'],
['visitatore', 'noun', 'b'],
['visivo', 'adjective', 'b'],
['viso', 'noun', 'a'],
['vissuto', 'past_part', 'b'],
['vissuto', 'adjective', 'b'],
['vissuto', 'noun', 'b'],
['vista', 'noun', 'a'],
['vita', 'noun', 'a'],
['vitale', 'adjective', 'b'],
['vitale', 'noun', 'b'],
['vitamina', 'noun', 'c'],
['vite', 'noun', 'c'],
['vitello', 'noun', 'c'],
['vittima', 'noun', 'a'],
['vittoria', 'noun', 'a'],
['vivace', 'adjective', 'b'],
['vivace', 'adverb', 'b'],
['vivace', 'noun', 'b'],
['vivente', 'pres_part', 'b'],
['vivente', 'adjective', 'b'],
['vivente', 'noun', 'b'],
['vivere', 'verb', 'a'],
['vivere', 'noun', 'a'],
['vivo', 'adjective', 'a'],
['vivo', 'noun', 'a'],
['viziare', 'verb', 'c'],
['viziato', 'past_part', 'c'],
['viziato', 'adjective', 'c'],
['vizio', 'noun', 'b'],
['vocabolario', 'noun', 'b'],
['vocale', 'noun', 'b'],
['vocale', 'adjective', 'b'],
['vocazione', 'noun', 'b'],
['voce', 'noun', 'a'],
['vodka', 'noun', 'c'],
['voglia', 'noun', 'a'],
['voi', 'pronoun', 'a'],
['volantino', 'noun', 'c'],
['volare', 'verb', 'a'],
['volata', 'noun', 'c'],
['volenteroso', 'adjective', 'c'],
['volentieri', 'adverb', 'b'],
['volere', 'verb', 'a'],
['volgare', 'adjective', 'b'],
['volgare', 'noun', 'b'],
['volgere', 'verb', 'b'],
['volo', 'noun', 'a'],
['volont', 'noun', 'a'],
['volontariato', 'noun', 'b'],
['volontario', 'adjective', 'b'],
['volontario', 'noun', 'b'],
['volta', 'noun', 'a'],
['voltare', 'verb', 'a'],
['volto', 'noun', 'a'],
['volume', 'noun', 'a'],
['vomitare', 'verb', 'b'],
['vomito', 'noun', 'c'],
['vongola', 'noun', 'c'],
['vostro', 'adjective', 'a'],
['vostro', 'pronoun', 'a'],
['votare', 'verb', 'a'],
['votazione', 'noun', 'c'],
['voto', 'noun', 'a'],
['vu', 'noun', 'c'],
['vuotare', 'verb', 'c'],
['vuoto', 'adjective', 'a'],
['vuoto', 'noun', 'a'],
['wafer', 'noun', 'c'],
['web', 'noun', 'a'],
['weekend', 'noun', 'b'],
['whisky', 'noun', 'c'],
['wurstel', 'noun', 'c'],
['yogurt', 'noun', 'c'],
['zaino', 'noun', 'b'],
['zampa', 'noun', 'b'],
['zampogna', 'noun', 'c'],
['zanna', 'noun', 'c'],
['zanzara', 'noun', 'c'],
['zattera', 'noun', 'c'],
['zebra', 'noun', 'c'],
['zero', 'adjective', 'a'],
['zero', 'noun', 'a'],
['zero', 'symbol', 'a'],
['zeta', 'noun', 'c'],
['zia', 'noun', 'a'],
['zingaro', 'adjective', 'c'],
['zingaro', 'noun', 'c'],
['zio', 'noun', 'a'],
['zitella', 'noun', 'c'],
['zitto', 'adjective', 'a'],
['zitto', 'noun', 'a'],
['zoccolo', 'noun', 'c'],
['zolla', 'noun', 'c'],
['zona', 'noun', 'a'],
['zoo', 'noun', 'c'],
['zoppicare', 'verb', 'c'],
['zoppo', 'adjective', 'c'],
['zoppo', 'noun', 'c'],
['zucca', 'noun', 'b'],
['zucchero', 'noun', 'b'],
['zucchina', 'noun', 'c'],
['zuffa', 'noun', 'c'],
['zuppa', 'noun', 'c'],
] | 28.130757 | 40 | 0.50236 |
81603469dc6c0438b2388abedcbbaed330402503 | 940 | py | Python | pytorch/torch/_utils_internal.py | raghavnauhria/whatmt | c20483a437c82936cb0fb8080925e37b9c4bba87 | [
"MIT"
] | 15 | 2019-08-10T02:36:38.000Z | 2021-07-14T13:45:32.000Z | torch/_utils_internal.py | wxwoods/mctorch | 7cd6eb51fdd01fa75ed9245039a4f145ba342de2 | [
"BSD-3-Clause"
] | 7 | 2019-10-21T03:08:51.000Z | 2022-03-11T23:54:28.000Z | pytorch/torch/_utils_internal.py | raghavnauhria/whatmt | c20483a437c82936cb0fb8080925e37b9c4bba87 | [
"MIT"
] | 5 | 2019-09-27T02:41:40.000Z | 2021-11-05T20:40:49.000Z | from __future__ import absolute_import, division, print_function, unicode_literals
import os
# this arbitrary-looking assortment of functionality is provided here
# to have a central place for overrideable behavior. The motivating
# use is the FB build environment, where this source file is replaced
# by an equivalent.
if os.path.basename(os.path.dirname(__file__)) == 'shared':
torch_parent = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
else:
torch_parent = os.path.dirname(os.path.dirname(__file__))
TEST_MASTER_ADDR = '127.0.0.1'
TEST_MASTER_PORT = 29500
| 24.736842 | 82 | 0.774468 |
816071ad63a52503bfce3572b1ef2ec295dea013 | 9,099 | py | Python | libs/models.py | aquastripe/DenseCLIP | 9481bc4aac39265eb313041ae53b8f33d755508b | [
"Apache-2.0"
] | 7 | 2022-02-02T14:34:38.000Z | 2022-03-21T09:54:14.000Z | libs/models.py | aquastripe/DenseCLIP | 9481bc4aac39265eb313041ae53b8f33d755508b | [
"Apache-2.0"
] | null | null | null | libs/models.py | aquastripe/DenseCLIP | 9481bc4aac39265eb313041ae53b8f33d755508b | [
"Apache-2.0"
] | null | null | null | import json
from collections import OrderedDict
from typing import Union, List
import clip
import torch
import torch.nn as nn
import torch.nn.functional as F
from libs.definitions import ROOT
label_file = ROOT / 'imagenet_class_index.json'
with open(label_file, 'r') as f:
labels = json.load(f)
_DEFAULT_CLASSNAMES = [value[1] for value in labels.values()]
# templates are copied from https://github.com/openai/CLIP/blob/main/notebooks/Prompt_Engineering_for_ImageNet.ipynb
_DEFAULT_TEMPLATES = [
'a bad photo of a {}.',
'a photo of many {}.',
'a sculpture of a {}.',
'a photo of the hard to see {}.',
'a low resolution photo of the {}.',
'a rendering of a {}.',
'graffiti of a {}.',
'a bad photo of the {}.',
'a cropped photo of the {}.',
'a tattoo of a {}.',
'the embroidered {}.',
'a photo of a hard to see {}.',
'a bright photo of a {}.',
'a photo of a clean {}.',
'a photo of a dirty {}.',
'a dark photo of the {}.',
'a drawing of a {}.',
'a photo of my {}.',
'the plastic {}.',
'a photo of the cool {}.',
'a close-up photo of a {}.',
'a black and white photo of the {}.',
'a painting of the {}.',
'a painting of a {}.',
'a pixelated photo of the {}.',
'a sculpture of the {}.',
'a bright photo of the {}.',
'a cropped photo of a {}.',
'a plastic {}.',
'a photo of the dirty {}.',
'a jpeg corrupted photo of a {}.',
'a blurry photo of the {}.',
'a photo of the {}.',
'a good photo of the {}.',
'a rendering of the {}.',
'a {} in a video game.',
'a photo of one {}.',
'a doodle of a {}.',
'a close-up photo of the {}.',
'a photo of a {}.',
'the origami {}.',
'the {} in a video game.',
'a sketch of a {}.',
'a doodle of the {}.',
'a origami {}.',
'a low resolution photo of a {}.',
'the toy {}.',
'a rendition of the {}.',
'a photo of the clean {}.',
'a photo of a large {}.',
'a rendition of a {}.',
'a photo of a nice {}.',
'a photo of a weird {}.',
'a blurry photo of a {}.',
'a cartoon {}.',
'art of a {}.',
'a sketch of the {}.',
'a embroidered {}.',
'a pixelated photo of a {}.',
'itap of the {}.',
'a jpeg corrupted photo of the {}.',
'a good photo of a {}.',
'a plushie {}.',
'a photo of the nice {}.',
'a photo of the small {}.',
'a photo of the weird {}.',
'the cartoon {}.',
'art of the {}.',
'a drawing of the {}.',
'a photo of the large {}.',
'a black and white photo of a {}.',
'the plushie {}.',
'a dark photo of a {}.',
'itap of a {}.',
'graffiti of the {}.',
'a toy {}.',
'itap of my {}.',
'a photo of a cool {}.',
'a photo of a small {}.',
'a tattoo of the {}.',
]
| 35.964427 | 116 | 0.593032 |
8160fc2ecf8175573434885167d35e68b574a5af | 11,463 | py | Python | src/basset_sick_loss.py | shtoneyan/Basset | b6c7f8995bb4f8fc37eccf3ee0f78478beef51d7 | [
"MIT"
] | 248 | 2015-10-06T12:30:53.000Z | 2022-02-02T20:30:34.000Z | src/basset_sick_loss.py | Deepstatsanalysis/Basset | 18753ad9ff5a46291021a0fa1abaad037b6f64f0 | [
"MIT"
] | 51 | 2015-10-08T04:57:41.000Z | 2021-08-12T19:53:04.000Z | src/basset_sick_loss.py | Deepstatsanalysis/Basset | 18753ad9ff5a46291021a0fa1abaad037b6f64f0 | [
"MIT"
] | 120 | 2015-10-15T00:49:44.000Z | 2022-02-16T21:17:17.000Z | #!/usr/bin/env python
from __future__ import print_function
from optparse import OptionParser
import os
import random
import subprocess
import matplotlib
matplotlib.use('Agg')
import numpy as np
import matplotlib.pyplot as plt
import pysam
from scipy.stats import binom
from scipy.stats.mstats import mquantiles
import seaborn as sns
import stats
################################################################################
# basset_sick_loss.py
#
# Shuffle SNPs that overlap DNase sites within their sites and compare the SAD
# distributions.
#
# Todo:
# -Control for GC% changes introduced by mutation shuffles.
# -Control for positional changes within the DHS regions.
# -Properly handle indels.
################################################################################
################################################################################
# main
################################################################################
def compute_sad(sample_vcf_file, model_file, si, out_dir, seq_len, gpu, replot):
''' Run basset_sad.py to compute scores. '''
cuda_str = ''
if gpu:
cuda_str = '--cudnn'
cmd = 'basset_sad.py %s -l %d -o %s %s %s' % (cuda_str, seq_len, out_dir, model_file, sample_vcf_file)
if not replot:
subprocess.call(cmd, shell=True)
sad = []
for line in open('%s/sad_table.txt' % out_dir):
a = line.split()
if a[3] == 't%d'%si:
sad.append(float(a[-1]))
return np.array(sad)
def filter_vcf(vcf_file, bed_file, sample_vcf_file):
''' Filter the VCF file for SNPs that overlap
the BED file, removing indels. '''
# open filtered file
sample_vcf_out = open(sample_vcf_file, 'w')
# intersect
p = subprocess.Popen('bedtools intersect -wo -a %s -b %s' % (vcf_file, bed_file), stdout=subprocess.PIPE, shell=True)
for line in p.stdout:
a = line.split()
if len(a[3]) == len(a[4]) == 1:
print(line, file=sample_vcf_out, end='')
sample_vcf_out.close()
def retrieve_sad(sample_vcf_file, sad_table_file, si):
''' Retrieve SAD scores from a pre-computed table.
Note that I'm assuming here the table has all
SAD scores in one row for each SNP so I can
pull out the score I want as column si+1.
'''
snp_indexes = {}
vi = 0
for line in open(sample_vcf_file):
a = line.split()
snp_indexes[a[2]] = vi
vi += 1
sad = np.zeros(len(snp_indexes))
for line in open(sad_table_file):
a = line.split()
print(a)
if a[0] in snp_indexes:
sad[snp_indexes[a[0]]] = float(a[si+1])
return sad
def shuffle_snps(in_vcf_file, out_vcf_file, genome):
''' Shuffle the SNPs within their overlapping DHS. '''
out_vcf_open = open(out_vcf_file, 'w')
for line in open(in_vcf_file):
a = line.split()
# read SNP info
snp_chrom = a[0]
snp_pos = int(a[1])
snp_nt = a[3]
# determine BED start
bi = 5
while a[bi] != snp_chrom:
bi += 1
# read BED info
bed_chrom = a[bi]
bed_start = int(a[bi+1])
bed_end = int(a[bi+2])
# get sequence
bed_seq = genome.fetch(bed_chrom, bed_start, bed_end)
# determine matching positions
bed_nt_matches = [i for i in range(len(bed_seq)) if bed_seq[i] == snp_nt]
while len(bed_nt_matches) == 0:
# expand segment by 10 nt
bed_start = max(0, bed_start-10)
bed_end += 10
bed_seq = genome.fetch(bed_chrom, bed_start, bed_end)
# sample new SNP position
shuf_pos = bed_start + 1 + random.choice(bed_nt_matches)
# write into columns
a[1] = str(shuf_pos)
print('\t'.join(a), file=out_vcf_open)
out_vcf_open.close()
def shuffle_snps_old(in_vcf_file, out_vcf_file, genome):
''' Shuffle the SNPs within their overlapping DHS. '''
out_vcf_open = open(out_vcf_file, 'w')
for line in open(in_vcf_file):
a = line.split()
# read SNP info
snp_chrom = a[0]
snp_pos = int(a[1])
# determine BED start
bi = 5
while a[bi] != snp_chrom:
bi += 1
# read BED info
bed_chrom = a[bi]
bed_start = int(a[bi+1])
bed_end = int(a[bi+2])
# sample new SNP position
shuf_pos = random.randint(bed_start, bed_end-1)
while shuf_pos == snp_pos:
shuf_pos = random.randint(bed_start, bed_end-1)
# set reference allele
ref_nt = genome.fetch(snp_chrom, shuf_pos-1, shuf_pos)
# sample alternate allele
alt_nt = random.choice('ACGT')
while alt_nt == ref_nt:
alt_nt = random.choice('ACGT')
# write into columns
a[1] = str(shuf_pos)
a[3] = ref_nt
a[4] = alt_nt
print('\t'.join(a), file=out_vcf_open)
################################################################################
# __main__
################################################################################
if __name__ == '__main__':
main()
| 34.527108 | 175 | 0.566693 |
816115c45af8c3075e38530126280e891f465fb7 | 1,657 | py | Python | level2/huge/split_huge_from_tar_strace.py | fishilico/sstic-2016 | 9a05bb18df4c8d2e76f1e30fda6b38b1bc930e8c | [
"Beerware"
] | null | null | null | level2/huge/split_huge_from_tar_strace.py | fishilico/sstic-2016 | 9a05bb18df4c8d2e76f1e30fda6b38b1bc930e8c | [
"Beerware"
] | null | null | null | level2/huge/split_huge_from_tar_strace.py | fishilico/sstic-2016 | 9a05bb18df4c8d2e76f1e30fda6b38b1bc930e8c | [
"Beerware"
] | 1 | 2020-04-03T06:19:11.000Z | 2020-04-03T06:19:11.000Z | #!/usr/bin/env python3
import codecs
import re
def trans_addr(addr):
"""Traduit une position de fichier en une adresse du programme"""
if addr < 0x1000:
return 0
if 0x0000000000001000 <= addr < 0x0000000000001000 + 0x00001ef000000000:
return 0x00002b0000000000 + addr - 0x0000000000001000
if 0x00002afffffe1000 <= addr < 0x00002afffffe1000 + 0x0000161000000000:
return 0x000049f000000000 + addr - 0x00002afffffe1000
if 0x000049effffe1000 <= addr < 0x000049effffe1000 + 0x00002afffffe0000:
return 0x0000000000020000 + addr - 0x000049effffe1000
raise Exception("Invalid addr {:#x}".format(addr))
blobs = {}
with open('strace_tar_output.log', 'r') as f:
curseek = 0
for line in f:
m = re.match(r'lseek\(4, ([^,]*), SEEK_SET\)', line)
if m is not None:
curseek = int(m.group(1))
continue
if line.startswith('write(4, "'):
m = re.match(r'write\(4, "(.*)", ([0-9]*)\) = ([0-9]*)', line)
assert m is not None:
rawdata, count1, count2 = m.groups()
assert count1 == count2
addr = curseek
curseek += int(count1)
data = codecs.escape_decode(rawdata.encode('ascii'))[0]
# Trouve le premier octet non-nul dans le bloc de donnes
i = 0
while i < len(data) and not data[i]:
i += 1
if i >= len(data):
continue
addr = trans_addr(addr + i)
data = data[i:].rstrip(b'\0')
with open('out/blob-{:016x}.bin'.format(addr), 'wb') as f:
f.write(data)
| 36.822222 | 76 | 0.569101 |
81617162fc6604315b045e09dee8878d1bf83430 | 4,826 | py | Python | UI/test/ui_test.py | tunapro1234/ai.destroy | 0c0e1be378e40ef069fcafaa91eb9c98de31520f | [
"MIT"
] | null | null | null | UI/test/ui_test.py | tunapro1234/ai.destroy | 0c0e1be378e40ef069fcafaa91eb9c98de31520f | [
"MIT"
] | null | null | null | UI/test/ui_test.py | tunapro1234/ai.destroy | 0c0e1be378e40ef069fcafaa91eb9c98de31520f | [
"MIT"
] | null | null | null | import pygame_gui
import pygame
TITLE = "TUNAPRO1234"
BACKGROUND = colors.darkslategray
WIDTH, HEIGHT = 1920, 1080
"""
Hzlca bir plan yapacam
Neural alar kontrol edebileceimiz kk bir framework
Alarn geliimini grebileceiz deitirebileceiz ve kaydedebileceiz
Bunun iin
-select box yaps
-balatmak iin buton
-kaydetme olaylar iin stteki eyden
Pencereler
-tkladmz nronun bilgilerini gsteren ve deitirebilen bir pencere
-tkladmz weightin deimesini salayan bir pencere
Norn, katman ve a iin pygame wrapperlar yazacam
Weigth iin de bir class olur
Kaydetme olayna daha var
"""
# elements: dict: {"buttons": butonlar, "entries", entryler}
if __name__ == "__main__":
main() | 33.282759 | 187 | 0.643804 |
8163184bea4450d8faedd6f3d068c99c6560b188 | 2,814 | py | Python | tests/test_temperature_system.py | SmartSleepIoT/SmartSleepCoding | 21c19489f0c477cbfbabd3a1d232f526f84a9e49 | [
"BSD-3-Clause"
] | null | null | null | tests/test_temperature_system.py | SmartSleepIoT/SmartSleepCoding | 21c19489f0c477cbfbabd3a1d232f526f84a9e49 | [
"BSD-3-Clause"
] | 41 | 2021-10-20T17:54:59.000Z | 2022-02-02T20:43:53.000Z | tests/test_temperature_system.py | SmartSleepIoT/SmartSleepCoding | 21c19489f0c477cbfbabd3a1d232f526f84a9e49 | [
"BSD-3-Clause"
] | null | null | null | import time
import pytest
from flask import g
from flask import session
import paho.mqtt.client as paho
from SmartSleep.db import get_db
from flask import json
import runpy
msg_nr = 0
messages = [""]
broker = 'broker.emqx.io'
port = 1883
| 29.621053 | 105 | 0.646411 |
81642e5d95ded6a23159027c35921f4b03706531 | 136 | py | Python | 3_gabor/model/gabor_rf/maprf/invlink.py | mackelab/IdentifyMechanisticModels_2020 | b93c90ec6156ae5f8afee6aaac7317373e9caf5e | [
"MIT"
] | 3 | 2020-10-23T02:53:11.000Z | 2021-03-12T11:04:37.000Z | 3_gabor/model/gabor_rf/maprf/invlink.py | mackelab/IdentifyMechanisticModels_2020 | b93c90ec6156ae5f8afee6aaac7317373e9caf5e | [
"MIT"
] | null | null | null | 3_gabor/model/gabor_rf/maprf/invlink.py | mackelab/IdentifyMechanisticModels_2020 | b93c90ec6156ae5f8afee6aaac7317373e9caf5e | [
"MIT"
] | 1 | 2021-07-28T08:38:05.000Z | 2021-07-28T08:38:05.000Z | import theano.tensor as tt
| 12.363636 | 42 | 0.639706 |
81643c548f00589437ba73490093135a47e9e5d0 | 470 | py | Python | readtest.py | pyEtherCAT/Test-Source | 32e7f36873cf311580acc25ab76db589e209e479 | [
"MIT"
] | null | null | null | readtest.py | pyEtherCAT/Test-Source | 32e7f36873cf311580acc25ab76db589e209e479 | [
"MIT"
] | null | null | null | readtest.py | pyEtherCAT/Test-Source | 32e7f36873cf311580acc25ab76db589e209e479 | [
"MIT"
] | null | null | null | from pyEtherCAT import MasterEtherCAT #
nic = "eth0" #
cat = MasterEtherCAT.MasterEtherCAT(nic)
ADP = 0x0000 #1
ADDR = 0x0E00 #
cat.APRD(IDX=0x00, ADP=ADP, ADO=ADDR, DATA=[0,0,0,0,0,0,0,0]) #DATA(64bit)
(DATA, WKC) = cat.socket_read() #
print("[0x{:04X}]= 0x{:02x}{:02x},0x{:02x}{:02x},0x{:02x}{:02x},0x{:02x}{:02x}".format(ADDR, DATA[7],DATA[6],DATA[5],DATA[4],DATA[3],DATA[2],DATA[1],DATA[0]))
# | 47 | 158 | 0.691489 |
8167b0d56a737d008f46fbbcfb74a28e00ab2a2b | 1,353 | py | Python | python/dataingest/grammar/dmo/python_loc_parser.py | jiportilla/ontology | 8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40 | [
"MIT"
] | null | null | null | python/dataingest/grammar/dmo/python_loc_parser.py | jiportilla/ontology | 8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40 | [
"MIT"
] | null | null | null | python/dataingest/grammar/dmo/python_loc_parser.py | jiportilla/ontology | 8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import pprint
from base import BaseObject
from base import FileIO
| 24.160714 | 103 | 0.554324 |
8168288e7e0624056cb2c2cd06a6e71eb7b00f91 | 255 | py | Python | time_series_data_generator/csv_to_df_generator.py | ArtHackDay-Plus1/ParameterServer | 5e6e57154d2d29e311b9a725fd3b9ac8c5b99a74 | [
"MIT"
] | null | null | null | time_series_data_generator/csv_to_df_generator.py | ArtHackDay-Plus1/ParameterServer | 5e6e57154d2d29e311b9a725fd3b9ac8c5b99a74 | [
"MIT"
] | 7 | 2018-03-10T09:44:34.000Z | 2018-06-17T09:44:59.000Z | time_series_data_generator/csv_to_df_generator.py | ArtHackDay-Plus1/ParameterServer | 5e6e57154d2d29e311b9a725fd3b9ac8c5b99a74 | [
"MIT"
] | 1 | 2018-03-10T04:56:01.000Z | 2018-03-10T04:56:01.000Z | import pandas as pd
import time
df = pd.read_csv("data/sample.csv")
for num in range(1000):
argx = str(df["x"][num:num+1].get_values())
argy = str(df["y"][num:num+1].get_values())
print("x:{0} / y:{1}".format(argx,argy))
time.sleep(0.1)
| 23.181818 | 47 | 0.611765 |
816842032e46719c27ed0ea91d613473a3f094ca | 601 | py | Python | architecture_tool_django/graphdefs/urls.py | goldginkgo/architecture_tool_django | e4229c5938a4dd01d0877afa7b93daf68e09283b | [
"MIT"
] | 1 | 2021-08-13T01:37:29.000Z | 2021-08-13T01:37:29.000Z | architecture_tool_django/graphdefs/urls.py | goldginkgo/architecture_tool_django | e4229c5938a4dd01d0877afa7b93daf68e09283b | [
"MIT"
] | null | null | null | architecture_tool_django/graphdefs/urls.py | goldginkgo/architecture_tool_django | e4229c5938a4dd01d0877afa7b93daf68e09283b | [
"MIT"
] | 1 | 2021-07-19T07:57:54.000Z | 2021-07-19T07:57:54.000Z | from django.urls import path
from . import views
app_name = "graphs"
urlpatterns = [
path("graphs/", views.GraphListView.as_view(), name="graph.list"),
path("graphs/create/", views.GraphCreateView.as_view(), name="graph.create"),
path(
"graphs/<str:pk>/",
views.GraphDetailView.as_view(),
name="graph.detail",
),
path(
"graphs/<str:pk>/update/",
views.GraphUpdateView.as_view(),
name="graph.update",
),
path(
"graphs/<str:pk>/delete/",
views.GraphDeleteView.as_view(),
name="graph.delete",
),
]
| 24.04 | 81 | 0.587354 |
81690ba836e0e2d1c0fdfb89754bbbb996e53c02 | 2,823 | py | Python | lib/utils/blob.py | TheRevanchist/DeepWatershedDetection | 6d8f3b3ca6db67bcebef8e18fb11248e15bd9dc4 | [
"MIT"
] | null | null | null | lib/utils/blob.py | TheRevanchist/DeepWatershedDetection | 6d8f3b3ca6db67bcebef8e18fb11248e15bd9dc4 | [
"MIT"
] | null | null | null | lib/utils/blob.py | TheRevanchist/DeepWatershedDetection | 6d8f3b3ca6db67bcebef8e18fb11248e15bd9dc4 | [
"MIT"
] | null | null | null | # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick - extended by Lukas Tuggener
# --------------------------------------------------------
"""Blob helper functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import cv2
import random
def im_list_to_blob(ims):
"""Convert a list of images into a network input.
Assumes images are already prepared (means subtracted, BGR order, ...).
"""
max_shape = np.array([im.shape for im in ims]).max(axis=0)
num_images = len(ims)
blob = np.zeros((num_images, max_shape[0], max_shape[1], 3),
dtype=np.float32)
for i in range(num_images):
im = ims[i]
blob[i, 0:im.shape[0], 0:im.shape[1], :] = im
return blob
def prep_im_for_blob(im, pixel_means, global_scale, args):
"""Mean subtract and scale an image for use in a blob."""
im = im.astype(np.float32, copy=False)
# substract mean
if args.substract_mean == "True":
im -= pixel_means
# do global scaling
im = cv2.resize(im, None, None, fx=global_scale, fy=global_scale,
interpolation=cv2.INTER_LINEAR)
im_size_max = np.max(im.shape[0:2])
# Prevent the biggest axis from being more than MAX_SIZE
if im_size_max > args.max_edge:
if not args.crop == "True":
# scale down if bigger than max size
re_scale = (float(args.max_edge) / float(im_size_max))
im = cv2.resize(im, None, None, fx=re_scale, fy=re_scale,
interpolation=cv2.INTER_LINEAR)
global_scale = global_scale*re_scale
crop_box = [0,0,im.shape[0],im.shape[1]]
else:
# Crop image
topleft = random.uniform(0,1)<args.crop_top_left_bias
# crop to max size if necessary
if im.shape[0] <= args.max_edge or topleft:
crop_0 = 0
else:
crop_0 = random.randint(0,im.shape[0]-args.max_edge)
if im.shape[1] <= args.max_edge or topleft:
crop_1 = 0
else:
crop_1 = random.randint(0,im.shape[1]-args.max_edge)
crop_box = [crop_0, crop_1, min(crop_0+args.max_edge,im.shape[0]), min(crop_1+args.max_edge,im.shape[1])]
im = im[crop_box[0]:crop_box[2],crop_box[1]:crop_box[3]]
else:
crop_box = [0, 0, im.shape[0], im.shape[1]]
if not args.pad_to == 0:
# pad to fit RefineNet #TODO fix refinenet padding problem
y_mulity = int(np.ceil(im.shape[0] / float(args.pad_to)))
x_mulity = int(np.ceil(im.shape[1] / float(args.pad_to)))
canv = np.ones([y_mulity * args.pad_to, x_mulity * args.pad_to,3], dtype=np.uint8) * 255
canv[0:im.shape[0], 0:im.shape[1]] = im
im = canv
return im, global_scale, crop_box
| 32.825581 | 111 | 0.631598 |
81691bebff51090814a13a3ea3f9262d90d38a7b | 1,022 | py | Python | edlm/convert/_get_media_folders.py | etcher-be/EDLM | 7b25c85252fd15c2c222b00271f7a32e335db704 | [
"MIT"
] | null | null | null | edlm/convert/_get_media_folders.py | etcher-be/EDLM | 7b25c85252fd15c2c222b00271f7a32e335db704 | [
"MIT"
] | 4 | 2020-03-24T16:53:26.000Z | 2020-06-26T08:31:13.000Z | edlm/convert/_get_media_folders.py | etcher-be/EDLM | 7b25c85252fd15c2c222b00271f7a32e335db704 | [
"MIT"
] | null | null | null | # coding=utf-8
"""
Gathers the media folders
"""
import elib
from ._context import Context
def get_media_folders(ctx: Context):
"""
Gathers the media folders
"""
ctx.info('gathering media folders')
media_folders = []
this_folder = ctx.source_folder
while True:
ctx.debug(f'traversing: "{this_folder}"')
media_folder_candidate = elib.path.ensure_path(this_folder, 'media', must_exist=False).absolute()
if media_folder_candidate.exists() and media_folder_candidate.is_dir():
ctx.debug(f'media folder found: "{media_folder_candidate}"')
media_folders.append(media_folder_candidate)
if len(this_folder.parents) is 1:
ctx.debug(f'reach mount point at: "{this_folder}"')
break
this_folder = this_folder.parent
# if not media_folders:
# raise ConvertError('no media folder found', ctx)
ctx.info(f'media folders:\n{elib.pretty_format(media_folders)}')
ctx.media_folders = media_folders
| 28.388889 | 105 | 0.672211 |
8169c20b93a9491060201043f0c1a523fd5bc8ec | 1,924 | py | Python | twister2/python-support/src/main/python/twister2/tset/TLink.py | pulasthi/twister2 | 40b73bdf7dcb778d957c3f146baf825a97dceae5 | [
"Apache-2.0"
] | 63 | 2017-08-31T10:02:52.000Z | 2021-12-18T21:42:14.000Z | twister2/python-support/src/main/python/twister2/tset/TLink.py | pulasthi/twister2 | 40b73bdf7dcb778d957c3f146baf825a97dceae5 | [
"Apache-2.0"
] | 284 | 2017-10-20T15:35:26.000Z | 2020-09-11T15:25:40.000Z | twister2/python-support/src/main/python/twister2/tset/TLink.py | pulasthi/twister2 | 40b73bdf7dcb778d957c3f146baf825a97dceae5 | [
"Apache-2.0"
] | 44 | 2017-08-21T19:47:57.000Z | 2020-11-22T22:45:50.000Z | from inspect import signature
import twister2.tset.TSet as ts
from twister2.utils import function_wrapper
| 42.755556 | 96 | 0.72817 |
816e5b3d645c0e4cb41592db326d16685c653103 | 9,373 | py | Python | e2cnn/nn/modules/nonlinearities/concatenated.py | ziatdinovmax/e2cnn | e486a0d2cec71f2bde2d61f2f1315922f2883cee | [
"BSD-3-Clause"
] | null | null | null | e2cnn/nn/modules/nonlinearities/concatenated.py | ziatdinovmax/e2cnn | e486a0d2cec71f2bde2d61f2f1315922f2883cee | [
"BSD-3-Clause"
] | null | null | null | e2cnn/nn/modules/nonlinearities/concatenated.py | ziatdinovmax/e2cnn | e486a0d2cec71f2bde2d61f2f1315922f2883cee | [
"BSD-3-Clause"
] | null | null | null | from e2cnn.gspaces import *
from e2cnn.nn import FieldType
from e2cnn.nn import GeometricTensor
from e2cnn.group import Representation
from e2cnn.group.representation import build_from_discrete_group_representation
from ..equivariant_module import EquivariantModule
import torch
from typing import List, Tuple, Any
import numpy as np
import math
__all__ = ["ConcatenatedNonLinearity"]
| 37.047431 | 124 | 0.559159 |
81710a8f5de8e268c2f9c31947a6e69ae41e9b04 | 2,551 | py | Python | modules/gathering/host_gathering.py | anouarbensaad/VulnX | 2f9f46e59f28ceb6d5cf3bfacd9810d43b4b25cb | [
"MIT"
] | 10 | 2019-05-10T04:43:54.000Z | 2019-05-16T00:45:46.000Z | modules/gathering/host_gathering.py | anouarbensaad/VulnX | 2f9f46e59f28ceb6d5cf3bfacd9810d43b4b25cb | [
"MIT"
] | null | null | null | modules/gathering/host_gathering.py | anouarbensaad/VulnX | 2f9f46e59f28ceb6d5cf3bfacd9810d43b4b25cb | [
"MIT"
] | 8 | 2019-05-13T04:25:11.000Z | 2019-05-15T02:47:49.000Z | import requests
import re
import socket
from common.colors import bad,que, info, good,run,W,end
from common.uriParser import parsing_url as hostd | 42.516667 | 95 | 0.53822 |
8171ba68e87f53d5c2ecb6dd90deb2acd88e328d | 34,379 | py | Python | datastore/core/basic.py | datastore/datastore | 7ccf0cd4748001d3dbf5e6dda369b0f63e0269d3 | [
"MIT"
] | 65 | 2015-03-22T23:43:48.000Z | 2022-03-25T16:10:33.000Z | datastore/core/basic.py | datastore/datastore | 7ccf0cd4748001d3dbf5e6dda369b0f63e0269d3 | [
"MIT"
] | 3 | 2015-03-11T21:57:23.000Z | 2019-07-26T16:20:29.000Z | datastore/core/basic.py | datastore/datastore | 7ccf0cd4748001d3dbf5e6dda369b0f63e0269d3 | [
"MIT"
] | 14 | 2015-01-23T17:03:33.000Z | 2020-02-03T06:35:04.000Z |
from key import Key
from query import Cursor
'''
Hello Tiered Access
>>> import pymongo
>>> import datastore.core
>>>
>>> from datastore.impl.mongo import MongoDatastore
>>> from datastore.impl.lrucache import LRUCache
>>> from datastore.impl.filesystem import FileSystemDatastore
>>>
>>> conn = pymongo.Connection()
>>> mongo = MongoDatastore(conn.test_db)
>>>
>>> cache = LRUCache(1000)
>>> fs = FileSystemDatastore('/tmp/.test_db')
>>>
>>> ds = datastore.TieredDatastore([cache, mongo, fs])
>>>
>>> hello = datastore.Key('hello')
>>> ds.put(hello, 'world')
>>> ds.contains(hello)
True
>>> ds.get(hello)
'world'
>>> ds.delete(hello)
>>> ds.get(hello)
None
Hello Sharding
>>> import datastore.core
>>>
>>> shards = [datastore.DictDatastore() for i in range(0, 10)]
>>>
>>> ds = datastore.ShardedDatastore(shards)
>>>
>>> hello = datastore.Key('hello')
>>> ds.put(hello, 'world')
>>> ds.contains(hello)
True
>>> ds.get(hello)
'world'
>>> ds.delete(hello)
>>> ds.get(hello)
None
'''
| 28.53029 | 80 | 0.651706 |
8173b9a0c93895ba388cef8eeda4fb14eb4184e3 | 15,988 | py | Python | website/admin.py | jonfroehlich/makeabilitylabwebsite | 0b322b5a172cf1d4edc63559e3de713ad3e3542c | [
"MIT"
] | 2 | 2017-06-19T02:24:48.000Z | 2018-10-25T09:14:59.000Z | website/admin.py | jonfroehlich/makeabilitylabwebsite | 0b322b5a172cf1d4edc63559e3de713ad3e3542c | [
"MIT"
] | 571 | 2017-06-14T13:38:45.000Z | 2020-07-17T18:15:58.000Z | website/admin.py | jonfroehlich/makeabilitylabwebsite | 0b322b5a172cf1d4edc63559e3de713ad3e3542c | [
"MIT"
] | 7 | 2017-12-06T21:51:29.000Z | 2020-06-18T19:58:36.000Z | from django.contrib import admin
from django.contrib.admin import widgets
from .models import Person, Publication, Position, Talk, Project, Poster, Keyword, News, Banner, Video, Project_header, Photo, Project_umbrella, Project_Role, Sponsor
from website.admin_list_filters import PositionRoleListFilter, PositionTitleListFilter, PubVenueTypeListFilter, PubVenueListFilter
from sortedm2m_filter_horizontal_widget.forms import SortedFilteredSelectMultiple
import django
from django import forms
from django.http import HttpResponse
from datetime import datetime
from django.template import loader
from django.template import RequestContext
from django.shortcuts import redirect
from django import forms
import urllib
import bibtexparser
from image_cropping import ImageCroppingMixin
# Uses format as per https://github.com/jonasundderwolf/django-image-cropping to add cropping to the admin page
#from https://stackoverflow.com/questions/9602217/define-an-order-for-manytomanyfield-with-django
#display items inline
admin.site.register(Person, PersonAdmin)
admin.site.register(Publication, PublicationAdmin)
admin.site.register(Talk, TalkAdmin)
admin.site.register(Project, ProjectAdmin)
admin.site.register(Poster, PosterAdmin)
admin.site.register(Keyword)
admin.site.register(News, NewsAdmin)
admin.site.register(Banner, BannerAdmin)
admin.site.register(Video, VideoAdmin)
admin.site.register(Photo, PhotoAdmin)
admin.site.register(Project_umbrella, ProjectUmbrellaAdmin)
admin.site.register(Sponsor)
# For modifying more on the front admin landing page, see https://medium.com/django-musings/customizing-the-django-admin-site-b82c7d325510
admin.site.index_title = "Makeability Lab Admin. Django version: " + django.get_version() + " ML Version: 0.5.7a"
| 58.99631 | 185 | 0.714411 |
8173fb74546ca4fcfb60600f03ab6b477c4abacb | 6,191 | py | Python | python/annotation_orthologs_inference.py | liebermanlab/wide-variant | 2fe4c54d1e4bcf0e5e06313cc9696588ab105653 | [
"Unlicense"
] | null | null | null | python/annotation_orthologs_inference.py | liebermanlab/wide-variant | 2fe4c54d1e4bcf0e5e06313cc9696588ab105653 | [
"Unlicense"
] | null | null | null | python/annotation_orthologs_inference.py | liebermanlab/wide-variant | 2fe4c54d1e4bcf0e5e06313cc9696588ab105653 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 26 21:39:04 2019
@author: fmk
"""
import argparse,subprocess,string,random
import pandas as pd
''' positional and optional argument parser'''
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
description='''\
Infer orthologs across two or more prokka-based annotations, and returns overview table for all genes.
Homology is inferred using CD-HIT and annotations need to be in fasta format (nucleotide (*.ffn) or amino acid (*.faa))
CD-HIT: %identity optional. Fixed: -s 0.9, ie. shorter sequences need to be at least 90% length of the representative of the cluster.
''',
epilog="Questions or comments? --> fkey@mit.edu")
parser.add_argument("-f", dest="file_sample_annotation", help="2-col TSV file with subject-identifier and annotation file path.",type=argparse.FileType('r'),required=True)
parser.add_argument('-p', dest="percentIdentity", action="store", default='0.98', help="Percent identity cd-hit. Default: 0.98")
parser.add_argument('-o', dest="outpath", action="store", help="Output path.",required=True)
parser.add_argument("-c", dest="cdhit", help="Path to CD-HIT executable", action="store",default="cd-hit")
args = parser.parse_args()
''' FUNCTIONS'''
''' MAIN '''
# TEST Vars
#file_sample_annotation = "/Users/fmk/Documents/mit/stapAD/tmp/pycode/prokka_ffn/subject_4_9_16.list"
##annopath = "/Users/fmk/Documents/mit/stapAD/mlst"
##filetype = "txt"
#outpath = "/Users/fmk/Documents/mit/stapAD/tmp/pycode"
#percentIdentity=0.95
#cdhit_executable = '/usr/local/bin/cd-hit'
if __name__ == "__main__":
# assign argparse arguments
file_sample_annotation = args.file_sample_annotation
# annopath = fix_path(args.annopath) # fix path to annotation has trailing "/"
outpath = fix_path(args.outpath)
# filetype = args.filetype
cdhit_executable = args.cdhit
percentIdentity = args.percentIdentity
# get concatenated annotation file (output: merged_annotation.fa) and dict[subject]=prokka-tag
subj_tag_dict = read_merge_sample_annotation_file(file_sample_annotation)
subject_list_ord = list(subj_tag_dict.keys())
prokkaTag_list_ord = [ subj_tag_dict[k] for k in subject_list_ord ]
# cd-hit
command_cdhit = cdhit_executable + " -s 0.9 -c " + percentIdentity + " -i " + outpath + "merged_annotation.fa" + " -o " + outpath+"cdhit_results"
subprocess.run(command_cdhit,shell=True)
# read-in cdhit results: dict[SAAB_XXXXX_pidZZZ_YYY]=[geneX,geneY,geneZ]
cdhit_res_dict = read_cdhit_cluster(outpath+"cdhit_results.clstr",percentIdentity,prokkaTag_list_ord)
# build table of gene annotation
cdhit_res_df = pd.DataFrame.from_dict(cdhit_res_dict,orient='index',columns=subject_list_ord)
# write cdhit res
cdhit_res_df.to_csv(outpath+'annotation_orthologs.tsv',sep="\t")
| 45.189781 | 182 | 0.65466 |
81740534e476d32c799f65e43f16716d4ed45822 | 708 | py | Python | tests/test_vtable.py | matthewpruett/angr | bfba2af1ea2eb941001339f47a1264a685c60eec | [
"BSD-2-Clause"
] | 6,132 | 2015-08-06T23:24:47.000Z | 2022-03-31T21:49:34.000Z | tests/test_vtable.py | matthewpruett/angr | bfba2af1ea2eb941001339f47a1264a685c60eec | [
"BSD-2-Clause"
] | 2,272 | 2015-08-10T08:40:07.000Z | 2022-03-31T23:46:44.000Z | tests/test_vtable.py | matthewpruett/angr | bfba2af1ea2eb941001339f47a1264a685c60eec | [
"BSD-2-Clause"
] | 1,155 | 2015-08-06T23:37:39.000Z | 2022-03-31T05:54:11.000Z | import os
import angr
test_location = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries', 'tests')
if __name__ == "__main__":
test_vtable_extraction_x86_64()
| 32.181818 | 107 | 0.689266 |
81741edd16ac8dabfdb435a60121ed91382e9d89 | 465 | py | Python | sendmail.py | loitd/buzreportgenerator | 8d07be40b68859ca15173f3b229dd93f6bb39809 | [
"MIT"
] | null | null | null | sendmail.py | loitd/buzreportgenerator | 8d07be40b68859ca15173f3b229dd93f6bb39809 | [
"MIT"
] | null | null | null | sendmail.py | loitd/buzreportgenerator | 8d07be40b68859ca15173f3b229dd93f6bb39809 | [
"MIT"
] | null | null | null | from zeep import Client
if __name__ == "__main__":
test2() | 33.214286 | 131 | 0.696774 |
8174be4107d534513138717c81ca4815dbd17aaf | 2,760 | py | Python | pommerman/agents/http_agent.py | KaixiangLin/playground | a0eb299f4772bada1c528a881f3bf26404b131aa | [
"Apache-2.0"
] | 2 | 2018-11-10T08:31:13.000Z | 2018-11-13T08:16:45.000Z | pommerman/agents/http_agent.py | KaixiangLin/playground | a0eb299f4772bada1c528a881f3bf26404b131aa | [
"Apache-2.0"
] | null | null | null | pommerman/agents/http_agent.py | KaixiangLin/playground | a0eb299f4772bada1c528a881f3bf26404b131aa | [
"Apache-2.0"
] | null | null | null | '''The HTTP agent - provides observation using http push to remote
agent and expects action in the reply'''
import json
import time
import os
import threading
import requests
from . import BaseAgent
from .. import utility
from .. import characters
| 34.074074 | 81 | 0.544565 |
8174d6a81d47ed944222a745013e7d241d84e72a | 737 | py | Python | cacao_app/event/serializers.py | CacaoMovil/guia-de-cacao-django | 14d18edb76502736f6f31955509c3b413f1f91fc | [
"BSD-3-Clause"
] | 1 | 2016-03-07T17:03:45.000Z | 2016-03-07T17:03:45.000Z | cacao_app/event/serializers.py | CacaoMovil/guia-de-cacao-django | 14d18edb76502736f6f31955509c3b413f1f91fc | [
"BSD-3-Clause"
] | 4 | 2016-04-29T20:48:31.000Z | 2021-06-10T20:39:26.000Z | cacao_app/event/serializers.py | CacaoMovil/guia-de-cacao-django | 14d18edb76502736f6f31955509c3b413f1f91fc | [
"BSD-3-Clause"
] | 3 | 2016-03-04T19:46:45.000Z | 2016-05-11T19:46:00.000Z | # -*- coding: utf-8 -*-
from rest_framework import serializers
from django_countries.serializer_fields import CountryField
from .models import Event, CountryEvent
| 25.413793 | 70 | 0.693351 |
817503f7a5b9852ce0db8730044ea4170fe4eb91 | 13,998 | py | Python | examples/map.py | jlsajfj/NBT | a7aaaadce423cd50f941ccd5cd25fbd071651336 | [
"MIT"
] | 241 | 2015-01-14T22:49:03.000Z | 2022-03-17T06:46:44.000Z | examples/map.py | jlsajfj/NBT | a7aaaadce423cd50f941ccd5cd25fbd071651336 | [
"MIT"
] | 78 | 2015-01-02T15:01:06.000Z | 2022-03-12T16:06:23.000Z | examples/map.py | jlsajfj/NBT | a7aaaadce423cd50f941ccd5cd25fbd071651336 | [
"MIT"
] | 87 | 2015-01-24T20:02:29.000Z | 2022-03-17T21:32:24.000Z | #!/usr/bin/env python
"""
Prints a map of the entire world.
"""
import os, sys
import math
from struct import pack
# local module
try:
import nbt
except ImportError:
# nbt not in search path. Let's see if it can be found in the parent folder
extrasearchpath = os.path.realpath(os.path.join(__file__,os.pardir,os.pardir))
if not os.path.exists(os.path.join(extrasearchpath,'nbt')):
raise
sys.path.append(extrasearchpath)
from nbt.region import RegionFile
from nbt.chunk import Chunk
from nbt.world import WorldFolder,McRegionWorldFolder
# PIL module (not build-in)
try:
from PIL import Image
except ImportError:
# PIL not in search path. Let's see if it can be found in the parent folder
sys.stderr.write("Module PIL/Image not found. Pillow (a PIL fork) can be found at http://python-imaging.github.io/\n")
# Note: it may also be possible that PIL is installed, but JPEG support is disabled or broken
sys.exit(70) # EX_SOFTWARE
# List of blocks to ignore
# Uncomment all the lines to show underground structures
# TODO: move this list into a separate config file
block_ignore = [
'air', # At least this one
# 'cave_air', 'water', 'lava', 'snow', 'ice',
# 'grass', 'tall_grass', 'dead_bush',
# 'seagrass', 'tall_seagrass', 'kelp', 'kelp_plant',
# 'dandelion', 'poppy', 'oxeye_daisy', 'white_tulip',
# 'azure_bluet', 'lilac', 'rose_bush', 'peony', 'blue_orchid',
# 'lily_pad', 'sugar_cane', 'vine', 'pumpkin', 'cactus',
# 'wheat', 'potatoes', 'beetroots', 'carrots',
# 'oak_leaves', 'dark_oak_leaves', 'birch_leaves',
# 'acacia_leaves', 'spruce_leaves',
# 'oak_log', 'dark_oak_log', 'birch_log',
# 'acacia_log', 'spruce_log',
# 'brown_mushroom', 'red_mushroom',
# 'brown_mushroom_block', 'red_mushroom_block', 'mushroom_stem',
# 'grass_block', 'grass_path', 'farmland', 'dirt',
# 'stone', 'sand', 'gravel', 'clay',
# 'sandstone', 'diorite', 'andesite', 'granite', 'obsidian',
# 'coal_ore', 'iron_ore', 'gold_ore', 'diamond_ore',
# 'redstone_ore', 'lapis_ore', 'emerald_ore',
# 'cobweb',
]
# Map of block colors from names
# Legacy block numeric identifiers are now hidden by Block class
# and mapped to alpha identifiers in best effort
# TODO: move this map into a separate config file
block_colors = {
'acacia_leaves': {'h':114, 's':64, 'l':22 },
'acacia_log': {'h':35, 's':93, 'l':30 },
'air': {'h':0, 's':0, 'l':0 },
'andesite': {'h':0, 's':0, 'l':32 },
'azure_bluet': {'h':0, 's':0, 'l':100},
'bedrock': {'h':0, 's':0, 'l':10 },
'birch_leaves': {'h':114, 's':64, 'l':22 },
'birch_log': {'h':35, 's':93, 'l':30 },
'blue_orchid': {'h':0, 's':0, 'l':100},
'bookshelf': {'h':0, 's':0, 'l':100},
'brown_mushroom': {'h':0, 's':0, 'l':100},
'brown_mushroom_block': {'h':0, 's':0, 'l':100},
'cactus': {'h':126, 's':61, 'l':20 },
'cave_air': {'h':0, 's':0, 'l':0 },
'chest': {'h':0, 's':100, 'l':50 },
'clay': {'h':7, 's':62, 'l':23 },
'coal_ore': {'h':0, 's':0, 'l':10 },
'cobblestone': {'h':0, 's':0, 'l':25 },
'cobblestone_stairs': {'h':0, 's':0, 'l':25 },
'crafting_table': {'h':0, 's':0, 'l':100},
'dandelion': {'h':60, 's':100, 'l':60 },
'dark_oak_leaves': {'h':114, 's':64, 'l':22 },
'dark_oak_log': {'h':35, 's':93, 'l':30 },
'dark_oak_planks': {'h':35, 's':93, 'l':30 },
'dead_bush': {'h':0, 's':0, 'l':100},
'diorite': {'h':0, 's':0, 'l':32 },
'dirt': {'h':27, 's':51, 'l':15 },
'end_portal_frame': {'h':0, 's':100, 'l':50 },
'farmland': {'h':35, 's':93, 'l':15 },
'fire': {'h':55, 's':100, 'l':50 },
'flowing_lava': {'h':16, 's':100, 'l':48 },
'flowing_water': {'h':228, 's':50, 'l':23 },
'glass_pane': {'h':0, 's':0, 'l':100},
'granite': {'h':0, 's':0, 'l':32 },
'grass': {'h':94, 's':42, 'l':25 },
'grass_block': {'h':94, 's':42, 'l':32 },
'gravel': {'h':21, 's':18, 'l':20 },
'ice': {'h':240, 's':10, 'l':95 },
'infested_stone': {'h':320, 's':100, 'l':50 },
'iron_ore': {'h':22, 's':65, 'l':61 },
'iron_bars': {'h':22, 's':65, 'l':61 },
'ladder': {'h':35, 's':93, 'l':30 },
'lava': {'h':16, 's':100, 'l':48 },
'lilac': {'h':0, 's':0, 'l':100},
'lily_pad': {'h':114, 's':64, 'l':18 },
'lit_pumpkin': {'h':24, 's':100, 'l':45 },
'mossy_cobblestone': {'h':115, 's':30, 'l':50 },
'mushroom_stem': {'h':0, 's':0, 'l':100},
'oak_door': {'h':35, 's':93, 'l':30 },
'oak_fence': {'h':35, 's':93, 'l':30 },
'oak_fence_gate': {'h':35, 's':93, 'l':30 },
'oak_leaves': {'h':114, 's':64, 'l':22 },
'oak_log': {'h':35, 's':93, 'l':30 },
'oak_planks': {'h':35, 's':93, 'l':30 },
'oak_pressure_plate': {'h':35, 's':93, 'l':30 },
'oak_stairs': {'h':114, 's':64, 'l':22 },
'peony': {'h':0, 's':0, 'l':100},
'pink_tulip': {'h':0, 's':0, 'l':0 },
'poppy': {'h':0, 's':100, 'l':50 },
'pumpkin': {'h':24, 's':100, 'l':45 },
'rail': {'h':33, 's':81, 'l':50 },
'red_mushroom': {'h':0, 's':50, 'l':20 },
'red_mushroom_block': {'h':0, 's':50, 'l':20 },
'rose_bush': {'h':0, 's':0, 'l':100},
'sugar_cane': {'h':123, 's':70, 'l':50 },
'sand': {'h':53, 's':22, 'l':58 },
'sandstone': {'h':48, 's':31, 'l':40 },
'seagrass': {'h':94, 's':42, 'l':25 },
'sign': {'h':114, 's':64, 'l':22 },
'spruce_leaves': {'h':114, 's':64, 'l':22 },
'spruce_log': {'h':35, 's':93, 'l':30 },
'stone': {'h':0, 's':0, 'l':32 },
'stone_slab': {'h':0, 's':0, 'l':32 },
'tall_grass': {'h':94, 's':42, 'l':25 },
'tall_seagrass': {'h':94, 's':42, 'l':25 },
'torch': {'h':60, 's':100, 'l':50 },
'snow': {'h':240, 's':10, 'l':85 },
'spawner': {'h':180, 's':100, 'l':50 },
'vine': {'h':114, 's':64, 'l':18 },
'wall_torch': {'h':60, 's':100, 'l':50 },
'water': {'h':228, 's':50, 'l':23 },
'wheat': {'h':123, 's':60, 'l':50 },
'white_wool': {'h':0, 's':0, 'l':100},
}
## Color functions for map generation ##
# Hue given in degrees,
# saturation and lightness given either in range 0-1 or 0-100 and returned in kind
# From http://www.easyrgb.com/index.php?X=MATH&H=19#text19
if __name__ == '__main__':
if (len(sys.argv) == 1):
print("No world folder specified!")
sys.exit(64) # EX_USAGE
if sys.argv[1] == '--noshow' and len(sys.argv) > 2:
show = False
world_folder = sys.argv[2]
else:
show = True
world_folder = sys.argv[1]
# clean path name, eliminate trailing slashes. required for os.path.basename()
world_folder = os.path.normpath(world_folder)
if (not os.path.exists(world_folder)):
print("No such folder as "+world_folder)
sys.exit(72) # EX_IOERR
sys.exit(main(world_folder, show))
| 41.660714 | 128 | 0.473282 |
81763b53608a015d4f73d9be3a5324c6bd08db61 | 136 | py | Python | flaskcbv/protos/simple/apps/main/urls.py | procool/flaskcbv | 18c254c10ef03145073e1264a06a0313e811ad29 | [
"BSD-2-Clause"
] | 1 | 2020-02-24T13:08:16.000Z | 2020-02-24T13:08:16.000Z | flaskcbv/protos/simple/apps/main/urls.py | procool/flaskcbv | 18c254c10ef03145073e1264a06a0313e811ad29 | [
"BSD-2-Clause"
] | null | null | null | flaskcbv/protos/simple/apps/main/urls.py | procool/flaskcbv | 18c254c10ef03145073e1264a06a0313e811ad29 | [
"BSD-2-Clause"
] | null | null | null | from flaskcbv.url import Url, make_urls
from .views import mainView
namespases = make_urls(
Url('', mainView(), name="main"),
)
| 13.6 | 39 | 0.691176 |
81770013c6cc12c6db69c1cb5d883f8060329eda | 536 | py | Python | main/permissions.py | hellojoshuatonga/notepik | 8f251fe9a689a9be8248d4da6260fe7c8742e3c0 | [
"MIT"
] | null | null | null | main/permissions.py | hellojoshuatonga/notepik | 8f251fe9a689a9be8248d4da6260fe7c8742e3c0 | [
"MIT"
] | null | null | null | main/permissions.py | hellojoshuatonga/notepik | 8f251fe9a689a9be8248d4da6260fe7c8742e3c0 | [
"MIT"
] | null | null | null | # Rest framework
from rest_framework import permissions
| 35.733333 | 175 | 0.718284 |
8177b1f754a6ce02d1d064390b7211e8eae1df80 | 8,595 | py | Python | femagtools/vtu.py | dapu/femagtools | 95eaf750adc2013232cdf482e523b3900ac6eb08 | [
"BSD-2-Clause"
] | null | null | null | femagtools/vtu.py | dapu/femagtools | 95eaf750adc2013232cdf482e523b3900ac6eb08 | [
"BSD-2-Clause"
] | null | null | null | femagtools/vtu.py | dapu/femagtools | 95eaf750adc2013232cdf482e523b3900ac6eb08 | [
"BSD-2-Clause"
] | null | null | null | """
femagtools.vtu
~~~~~~~~~~~~~~
Read FEMAG vtu files
"""
import vtk
import pathlib
import numpy as np
def read(filename):
"""
Read vtu file and return Reader object.
Arguments:
filename: name of vtu file to be read
"""
return Reader(filename)
| 31.254545 | 83 | 0.52135 |
81780d1d21d48080345dc52c64611a0acffa03d7 | 232 | py | Python | __init__.py | rolc/python-package | 272fb6da9d7aa3c2a0c53dea6d4e958409ac5d12 | [
"MIT"
] | null | null | null | __init__.py | rolc/python-package | 272fb6da9d7aa3c2a0c53dea6d4e958409ac5d12 | [
"MIT"
] | null | null | null | __init__.py | rolc/python-package | 272fb6da9d7aa3c2a0c53dea6d4e958409ac5d12 | [
"MIT"
] | null | null | null | #!/usr/bin/python
#-------------------------------IMPORT--------------------------------#
from lib import *
#-------------------------------EXPORT--------------------------------#
__all__ = ['<#PREFIX#>_app','<#PREFIX#>_index']
| 23.2 | 71 | 0.284483 |
81787499b7aab46ab4834d4e0428415d4b205073 | 916 | py | Python | svm-rank/bag_of_words.py | Anthony-Alridge/individual_project | 9013aba2de92099dea5a703486da3dfab1dfbb61 | [
"MIT"
] | null | null | null | svm-rank/bag_of_words.py | Anthony-Alridge/individual_project | 9013aba2de92099dea5a703486da3dfab1dfbb61 | [
"MIT"
] | null | null | null | svm-rank/bag_of_words.py | Anthony-Alridge/individual_project | 9013aba2de92099dea5a703486da3dfab1dfbb61 | [
"MIT"
] | null | null | null | from collections import Counter
import numpy as np
| 28.625 | 101 | 0.663755 |
81788a0ffc02bedb32998891f0a147adfc80c30e | 1,507 | py | Python | pymps/ansatz/tests/test_sweeper.py | GiggleLiu/pymps | c8314581010d68d3fa34af6e87b6af2969fc261d | [
"MIT"
] | 4 | 2018-02-17T05:35:54.000Z | 2021-09-12T10:14:57.000Z | pymps/ansatz/tests/test_sweeper.py | GiggleLiu/pymps | c8314581010d68d3fa34af6e87b6af2969fc261d | [
"MIT"
] | null | null | null | pymps/ansatz/tests/test_sweeper.py | GiggleLiu/pymps | c8314581010d68d3fa34af6e87b6af2969fc261d | [
"MIT"
] | null | null | null | #!/usr/bin/python
'''
Tests for MPS and MPO
'''
from numpy import *
import matplotlib.pyplot as plt
from numpy.testing import dec, assert_, assert_raises, assert_almost_equal, assert_allclose
import pdb
from ..sweep import *
if __name__ == '__main__':
test_iterator()
| 31.395833 | 91 | 0.512276 |
8178e5a5cdd16d8a39e43e9f1e2b33dd9e55953c | 4,471 | py | Python | ui/numbered_menu.py | LouisPi/PiPortableRecorder | 430a4b6e1e869cbd68fd89bbf97261710fd7db6b | [
"Apache-2.0",
"MIT"
] | 51 | 2017-12-03T21:59:13.000Z | 2021-01-02T17:13:34.000Z | ui/numbered_menu.py | LouisPi/PiPortableRecorder | 430a4b6e1e869cbd68fd89bbf97261710fd7db6b | [
"Apache-2.0",
"MIT"
] | 153 | 2017-10-27T19:59:46.000Z | 2020-01-14T23:58:57.000Z | ui/numbered_menu.py | LouisPi/PiPortableRecorder | 430a4b6e1e869cbd68fd89bbf97261710fd7db6b | [
"Apache-2.0",
"MIT"
] | 26 | 2017-11-16T11:10:56.000Z | 2022-03-29T18:44:48.000Z | from threading import Lock
from time import time
from ui import Menu
from ui.utils import clamp, check_value_lock, to_be_foreground
| 33.616541 | 116 | 0.635652 |
8179e93b1b83227b20bf562f4c5eea1ac565d48c | 358 | py | Python | Math/RussianPeasantMultiplication.py | kopok2/algorithms | efb6a423a8447d99584335e9fef8d9b3c74e2ad8 | [
"MIT"
] | null | null | null | Math/RussianPeasantMultiplication.py | kopok2/algorithms | efb6a423a8447d99584335e9fef8d9b3c74e2ad8 | [
"MIT"
] | null | null | null | Math/RussianPeasantMultiplication.py | kopok2/algorithms | efb6a423a8447d99584335e9fef8d9b3c74e2ad8 | [
"MIT"
] | null | null | null | # coding=utf-8
"""Russian Peasant Multiplication algorithm Python implementation."""
if __name__ == '__main__':
for x in range(10):
for y in range(10):
print(x, y, x * y, russ_peasant(x, y))
| 18.842105 | 69 | 0.511173 |
817a13b6a8ab8c5ae685c931b654984848f5a51f | 36,420 | py | Python | bustime/requestmock.py | RemyPorter/BusTimeClient | a0bd3ef7e24e132e964e6847261ed6888e5735ee | [
"MIT"
] | 2 | 2017-06-24T14:10:42.000Z | 2018-02-16T20:44:07.000Z | bustime/requestmock.py | RemyPorter/BusTimeClient | a0bd3ef7e24e132e964e6847261ed6888e5735ee | [
"MIT"
] | null | null | null | bustime/requestmock.py | RemyPorter/BusTimeClient | a0bd3ef7e24e132e964e6847261ed6888e5735ee | [
"MIT"
] | null | null | null | from urllib.parse import urlparse, parse_qs
from io import BytesIO
import json
| 337.222222 | 33,400 | 0.578913 |
817aa92cbced1c3e3227a2a7dec4ed035c84f33f | 1,589 | py | Python | Assignment 4/src/optim/lr_scheduler.py | vamsi3/CS763-IIT-Bombay | 2c4650587eee8bb15944b95101b7434746ec03af | [
"MIT"
] | 1 | 2022-01-28T12:21:36.000Z | 2022-01-28T12:21:36.000Z | Assignment 4/src/optim/lr_scheduler.py | vamsi3/IITB-Computer-Vision | 2c4650587eee8bb15944b95101b7434746ec03af | [
"MIT"
] | null | null | null | Assignment 4/src/optim/lr_scheduler.py | vamsi3/IITB-Computer-Vision | 2c4650587eee8bb15944b95101b7434746ec03af | [
"MIT"
] | null | null | null | import math
import torch
from bisect import bisect_right
| 29.425926 | 120 | 0.668345 |
817af60b313398519be2e69061cd3bf593b81217 | 2,972 | py | Python | gluon/dal/adapters/teradata.py | lightcoder127/Web2py | d604816b487aaf758075805cffdb89f45dea906e | [
"BSD-3-Clause"
] | 2 | 2017-02-02T00:31:48.000Z | 2017-08-08T22:36:25.000Z | gluon/dal/adapters/teradata.py | crania/containerservices | 0ffbadb3b5a259abc74ed433b69bf6342b99ef83 | [
"BSD-3-Clause"
] | null | null | null | gluon/dal/adapters/teradata.py | crania/containerservices | 0ffbadb3b5a259abc74ed433b69bf6342b99ef83 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from .._globals import IDENTITY
from ..connection import ConnectionPool
from .base import BaseAdapter
| 38.102564 | 156 | 0.601615 |
817bffa6766be54af974175321c8e15902437d0a | 8,367 | py | Python | conan_tests/external_tools/vswhere_test.py | conan-io/test | 273835a701aca3934694dfa1ec082e58d5332660 | [
"MIT"
] | 2 | 2019-02-09T10:18:25.000Z | 2020-12-15T22:22:03.000Z | conan_tests/external_tools/vswhere_test.py | conan-io/test | 273835a701aca3934694dfa1ec082e58d5332660 | [
"MIT"
] | 9 | 2018-02-22T21:42:17.000Z | 2020-10-16T03:54:19.000Z | conan_tests/external_tools/vswhere_test.py | conan-io/test | 273835a701aca3934694dfa1ec082e58d5332660 | [
"MIT"
] | 7 | 2017-12-19T09:35:31.000Z | 2020-09-23T16:17:59.000Z | import os
import platform
import unittest
import nose
from conans import tools
from conans.errors import ConanException
from conans.model.version import Version
from conans import __version__ as client_version
from conans.model import settings
from conans.test.utils.tools import TestClient
from conans.test.assets.visual_project_files import get_vs_project_files
| 45.472826 | 126 | 0.676826 |
817dad7fd6fc56d0d2967576e42cee9331599cf9 | 7,797 | py | Python | pinliner/pinliner.py | minazukie/pinliner | 4d1f879a8df2b03e7335536735840274bbb26416 | [
"Apache-2.0"
] | 53 | 2016-03-29T10:30:41.000Z | 2022-03-23T17:49:38.000Z | pinliner/pinliner.py | minazukie/pinliner | 4d1f879a8df2b03e7335536735840274bbb26416 | [
"Apache-2.0"
] | 4 | 2017-07-21T15:21:54.000Z | 2022-03-17T19:51:07.000Z | pinliner/pinliner.py | minazukie/pinliner | 4d1f879a8df2b03e7335536735840274bbb26416 | [
"Apache-2.0"
] | 9 | 2017-07-21T18:05:45.000Z | 2022-01-15T19:57:00.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import argparse
import json
import os
from pinliner import __version__
import sys
TEMPLATE_FILE = 'importer.template'
TEMPLATE_PATTERN = '${CONTENTS}'
if __name__ == '__main__':
main()
| 36.434579 | 79 | 0.62896 |
817e1fdfe583e5b2b44c9f5c5fb7e9b12305519f | 305 | py | Python | list_s3_buckets.py | MarijaKalebota/aws-playground | 2aaf3da65ba3f0cc5c2c222a10659d219f9136e8 | [
"MIT"
] | null | null | null | list_s3_buckets.py | MarijaKalebota/aws-playground | 2aaf3da65ba3f0cc5c2c222a10659d219f9136e8 | [
"MIT"
] | null | null | null | list_s3_buckets.py | MarijaKalebota/aws-playground | 2aaf3da65ba3f0cc5c2c222a10659d219f9136e8 | [
"MIT"
] | null | null | null | from dotenv import load_dotenv
load_dotenv()
import os
import boto3
#s3 = boto3.resource('s3')
s3 = boto3.resource('s3', aws_access_key_id=os.environ.get("AWS_KEY_ID"),
aws_secret_access_key=os.environ.get("AWS_SECRET_KEY"))
for bucket in s3.buckets.all():
print(bucket.name)
| 23.461538 | 77 | 0.704918 |
817ee460ad53e44fa65e444a49afe839ce9a20b2 | 2,570 | py | Python | GAScore/testbench/hold_buffer.py | sharm294/shoal | db7dd08a70882585fb9740a39b57b4b7a48b3081 | [
"MIT"
] | 1 | 2021-04-12T06:41:33.000Z | 2021-04-12T06:41:33.000Z | GAScore/testbench/hold_buffer.py | UofT-HPRC/shoal | db7dd08a70882585fb9740a39b57b4b7a48b3081 | [
"MIT"
] | null | null | null | GAScore/testbench/hold_buffer.py | UofT-HPRC/shoal | db7dd08a70882585fb9740a39b57b4b7a48b3081 | [
"MIT"
] | null | null | null | import os
from sonar.testbench import Testbench, Module, TestVector, Thread
from sonar.interfaces import AXIS
from sonar_strToInt import strToInt
hold_buffer = Testbench.default('hold_buffer')
filepath = os.path.join(os.path.dirname(__file__), 'build/hold_buffer/')
dut = Module.default("DUT")
dut.add_clock_port('ap_clk', '20ns')
dut.add_reset_port('ap_rst_n')
dut.add_port('dataRelease_V', 'input', 16)
axis_input = AXIS('axis_input', 'slave', 'ap_clk', c_struct='axis_word', c_stream='uaxis_l')
axis_input.port.init_channels('tkeep', 64, True)
dut.add_interface(axis_input)
axis_output = AXIS('axis_output', 'master', 'ap_clk', c_struct='axis_word', c_stream='uaxis_l')
axis_output.port.init_channels('tkeep', 64, True)
dut.add_interface(axis_output)
hold_buffer.add_module(dut)
################################################################################
# Test Vectors
################################################################################
# Initialization thread (added to each test vector to reset everything)
initT = Thread()
initT.init_signals()
initT.wait_negedge('ap_clk')
initT.add_delay('40ns')
initT.set_signal('ap_rst_n', 1)
initT.set_signal('axis_output_tready', 1)
#-------------------------------------------------------------------------------
#
#-------------------------------------------------------------------------------
Release_A = TestVector()
Release_A.add_thread(initT)
rA_t1 = Thread()
rA_t1.add_delay('100ns')
rA_t1.init_timer()
rA_t1.set_signal('dataRelease_V', 1)
axis_input.writes(rA_t1, [
{"tdata": 0xDEF, "callTB": 1},
{"tdata": 0xFED, "callTB": 1},
])
Release_A.add_thread(rA_t1)
rA_t2 = Thread()
axis_output.read(rA_t2, 0xDEF)
axis_output.read(rA_t2, 0xFED)
rA_t2.print_elapsed_time("Release_A")
rA_t2.end_vector()
Release_A.add_thread(rA_t2)
#-------------------------------------------------------------------------------
# Medium Message A
#
#
#-------------------------------------------------------------------------------
Release_B = TestVector()
Release_B.add_thread(initT)
rB_t1 = Thread()
rB_t1.add_delay('100ns')
rB_t1.init_timer()
axis_input.writes(rB_t1, [
{"tdata": 0xDEF, "callTB": 1},
{"tdata": 0xFED, "callTB": 1},
])
rB_t1.set_signal('dataRelease_V', 1)
Release_B.add_thread(rB_t1)
rB_t2 = Thread()
axis_output.read(rB_t2, 0xDEF)
axis_output.read(rB_t2, 0xFED)
rB_t2.print_elapsed_time("Release_B")
rB_t2.end_vector()
Release_B.add_thread(rB_t2)
hold_buffer.add_test_vector(Release_A)
hold_buffer.add_test_vector(Release_B)
hold_buffer.generateTB(filepath, 'all')
| 27.934783 | 95 | 0.614008 |
8180ea48ed0eaf64449e035d61a657bb7146e229 | 16,957 | py | Python | vaxtools/utils/pair.py | menis/vaxtools | 221343d0a2b9ecefc777ff5a94cb12eaa1524813 | [
"MIT"
] | null | null | null | vaxtools/utils/pair.py | menis/vaxtools | 221343d0a2b9ecefc777ff5a94cb12eaa1524813 | [
"MIT"
] | null | null | null | vaxtools/utils/pair.py | menis/vaxtools | 221343d0a2b9ecefc777ff5a94cb12eaa1524813 | [
"MIT"
] | 1 | 2018-10-10T21:59:08.000Z | 2018-10-10T21:59:08.000Z | #!/usr/bin/env python
# filename: pair.py
#
# Copyright (c) 2015 Bryan Briney
# License: The MIT license (http://opensource.org/licenses/MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software
# and associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import copy
import sys
import traceback
from Bio.Seq import Seq
from Bio.Alphabet import generic_dna
from abtools import germlines
from abtools.alignment import global_alignment
from abtools.sequence import Sequence
def fasta(self, key='vdj_nt', append_chain=True):
'''
Returns the sequence pair as a fasta string. If the Pair object contains
both heavy and light chain sequences, both will be returned as a single string.
By default, the fasta string contains the 'vdj_nt' sequence for each chain. To change,
use the <key> option to select an alternate sequence.
By default, the chain (heavy or light) will be appended to the sequence name:
>MySequence_heavy
To just use the pair name (which will result in duplicate sequence names for Pair objects
with both heavy and light chains), set <append_chain> to False.
'''
fastas = []
for s, chain in [(self.heavy, 'heavy'), (self.light, 'light')]:
if s is not None:
c = '_{}'.format(chain) if append_chain else ''
fastas.append('>{}{}\n{}'.format(s['seq_id'], c, s[key]))
return '\n'.join(fastas)
def get_pairs(db, collection, experiment=None, subject=None, group=None, name='seq_id',
delim=None, delim_occurance=1, pairs_only=False):
'''
Gets sequences and assigns them to the appropriate mAb pair, based on the sequence name.
Inputs:
::db:: is a pymongo database connection object
::collection:: is the collection name, as a string
If ::subject:: is provided, only sequences with a 'subject' field matching ::subject:: will
be included. ::subject:: can be either a single subject (as a string) or an iterable
(list or tuple) of subject strings.
If ::group:: is provided, only sequences with a 'group' field matching ::group:: will
be included. ::group:: can be either a single group (as a string) or an iterable
(list or tuple) of group strings.
::name:: is the dict key of the field to be used to group the sequences into pairs.
Default is 'seq_id'
::delim:: is an optional delimiter used to truncate the contents of the ::name:: field.
Default is None, which results in no name truncation.
::delim_occurance:: is the occurance of the delimiter at which to trim. Trimming is performed
as delim.join(name.split(delim)[:delim_occurance]), so setting delim_occurance to -1 will
trucate after the last occurance of delim. Default is 1.
::pairs_only:: setting to True results in only truly paired sequences (pair.is_pair == True)
will be returned. Default is False.
Returns a list of Pair objects, one for each mAb pair.
'''
match = {}
if subject is not None:
if type(subject) in (list, tuple):
match['subject'] = {'$in': subject}
elif type(subject) in (str, str):
match['subject'] = subject
if group is not None:
if type(group) in (list, tuple):
match['group'] = {'$in': group}
elif type(group) in (str, str):
match['group'] = group
if experiment is not None:
if type(experiment) in (list, tuple):
match['experiment'] = {'$in': experiment}
elif type(experiment) in (str, str):
match['experiment'] = experiment
seqs = list(db[collection].find(match))
return assign_pairs(seqs, name=name, delim=delim,
delim_occurance=delim_occurance, pairs_only=pairs_only)
def assign_pairs(seqs, name='seq_id', delim=None, delim_occurance=1, pairs_only=False):
'''
Assigns sequences to the appropriate mAb pair, based on the sequence name.
Inputs:
::seqs:: is a list of dicts, of the format returned by querying a MongoDB containing
Abstar output.
::name:: is the dict key of the field to be used to group the sequences into pairs.
Default is 'seq_id'
::delim:: is an optional delimiter used to truncate the contents of the ::name:: field.
Default is None, which results in no name truncation.
::delim_occurance:: is the occurance of the delimiter at which to trim. Trimming is performed
as delim.join(name.split(delim)[:delim_occurance]), so setting delim_occurance to -1 will
trucate after the last occurance of delim. Default is 1.
::pairs_only:: setting to True results in only truly paired sequences (pair.is_pair == True)
will be returned. Default is False.
Returns a list of Pair objects, one for each mAb pair.
'''
pdict = {}
for s in seqs:
if delim is not None:
pname = delim.join(s[name].split(delim)[:delim_occurance])
else:
pname = s[name]
if pname not in pdict:
pdict[pname] = [s, ]
else:
pdict[pname].append(s)
pairs = [Pair(pdict[n], name=n) for n in list(pdict.keys())]
if pairs_only:
pairs = [p for p in pairs if p.is_pair]
return pairs
def deduplicate(pairs, aa=False, ignore_primer_regions=False):
'''
Removes duplicate sequences from a list of Pair objects.
If a Pair has heavy and light chains, both chains must identically match heavy and light chains
from another Pair to be considered a duplicate. If a Pair has only a single chain,
identical matches to that chain will cause the single chain Pair to be considered a duplicate,
even if the comparison Pair has both chains.
Note that identical sequences are identified by simple string comparison, so sequences of
different length that are identical over the entirety of the shorter sequence are not
considered duplicates.
By default, comparison is made on the nucleotide sequence. To use the amino acid sequence instead,
set aa=True.
'''
nr_pairs = []
just_pairs = [p for p in pairs if p.is_pair]
single_chains = [p for p in pairs if not p.is_pair]
_pairs = just_pairs + single_chains
for p in _pairs:
duplicates = []
for nr in nr_pairs:
identical = True
vdj = 'vdj_aa' if aa else 'vdj_nt'
offset = 4 if aa else 12
if p.heavy is not None:
if nr.heavy is None:
identical = False
else:
heavy = p.heavy[vdj][offset:-offset] if ignore_primer_regions else p.heavy[vdj]
nr_heavy = nr.heavy[vdj][offset:-offset] if ignore_primer_regions else nr.heavy[vdj]
if heavy != nr_heavy:
identical = False
if p.light is not None:
if nr.light is None:
identical = False
else:
light = p.light[vdj][offset:-offset] if ignore_primer_regions else p.light[vdj]
nr_light = nr.light[vdj][offset:-offset] if ignore_primer_regions else nr.light[vdj]
if light != nr_light:
identical = False
duplicates.append(identical)
if any(duplicates):
continue
else:
nr_pairs.append(p)
return nr_pairs
def refine(pairs, heavy=True, light=True, species='human'):
refined_pairs = copy.deepcopy(pairs)
for p in refined_pairs:
p.refine(heavy, light, species)
return refined_pairs
| 37.766147 | 109 | 0.606062 |
8182de99b8accab6efbe98871df960f416bdddf7 | 38,104 | py | Python | t3f/riemannian.py | robol/t3f | d61037ba9c03c344e9fc31fce46648347b762b39 | [
"MIT"
] | null | null | null | t3f/riemannian.py | robol/t3f | d61037ba9c03c344e9fc31fce46648347b762b39 | [
"MIT"
] | null | null | null | t3f/riemannian.py | robol/t3f | d61037ba9c03c344e9fc31fce46648347b762b39 | [
"MIT"
] | null | null | null | import tensorflow as tf
from t3f.tensor_train import TensorTrain
from t3f.tensor_train_batch import TensorTrainBatch
from t3f import shapes
from t3f import decompositions
def project_sum(what, where, weights=None):
"""Project sum of `what` TTs on the tangent space of `where` TT.
project_sum(what, x) = P_x(what)
project_sum(batch_what, x) = P_x(\sum_i batch_what[i])
project_sum(batch_what, x, weights) = P_x(\sum_j weights[j] * batch_what[j])
This function implements the algorithm from the paper [1], theorem 3.1.
[1] C. Lubich, I. Oseledets and B. Vandereycken, Time integration of
Tensor Trains.
Args:
what: TensorTrain or TensorTrainBatch. In the case of batch returns
projection of the sum of elements in the batch.
where: TensorTrain, TT-tensor or TT-matrix on which tangent space to project
weights: python list or tf.Tensor of numbers or None, weights of the sum
Returns:
a TensorTrain with the TT-ranks equal 2 * tangent_space_tens.get_tt_ranks()
Complexity:
O(d r_where^3 m) for orthogonalizing the TT-cores of where
+O(batch_size d r_what r_where n (r_what + r_where))
d is the number of TT-cores (what.ndims());
r_what is the largest TT-rank of what max(what.get_tt_rank())
r_where is the largest TT-rank of where
n is the size of the axis dimension of what and where e.g.
for a tensor of size 4 x 4 x 4, n is 4;
for a 9 x 64 matrix of raw shape (3, 3, 3) x (4, 4, 4) n is 12
"""
# Always work with batch of TT objects for simplicity.
what = shapes.expand_batch_dim(what)
if weights is not None:
weights = tf.convert_to_tensor(weights, dtype=where.dtype)
if not isinstance(where, TensorTrain):
raise ValueError('The first argument should be a TensorTrain object, got '
'"%s".' % where)
if where.get_raw_shape() != what.get_raw_shape():
raise ValueError('The shapes of the tensor we want to project and of the '
'tensor on which tangent space we want to project should '
'match, got %s and %s.' %
(where.get_raw_shape(),
what.get_raw_shape()))
dtypes_compatible = (where.dtype.is_compatible_with(what.dtype) or
what.dtype.is_compatible_with(where.dtype))
if not dtypes_compatible:
raise ValueError('Dtypes of the arguments should coincide, got %s and %s.' %
(where.dtype,
what.dtype))
left_tangent_space_tens = decompositions.orthogonalize_tt_cores(
where)
right_tangent_space_tens = decompositions.orthogonalize_tt_cores(
left_tangent_space_tens, left_to_right=False)
ndims = where.ndims()
dtype = where.dtype
raw_shape = shapes.lazy_raw_shape(where)
batch_size = shapes.lazy_batch_size(what)
right_tangent_tt_ranks = shapes.lazy_tt_ranks(right_tangent_space_tens)
left_tangent_tt_ranks = shapes.lazy_tt_ranks(left_tangent_space_tens)
# For einsum notation.
mode_str = 'ij' if where.is_tt_matrix() else 'i'
right_rank_dim = where.right_tt_rank_dim
left_rank_dim = where.left_tt_rank_dim
if weights is not None:
weights_shape = weights.get_shape()
output_is_batch = len(weights_shape) > 1 and weights_shape[1] > 1
else:
output_is_batch = False
output_batch_str = 'o' if output_is_batch else ''
if output_is_batch:
right_rank_dim += 1
left_rank_dim += 1
output_batch_size = weights.get_shape()[1].value
# Prepare rhs vectors.
# rhs[core_idx] is of size
# batch_size x tensor_tt_ranks[core_idx] x tangent_tt_ranks[core_idx]
rhs = [None] * (ndims + 1)
rhs[ndims] = tf.ones((batch_size, 1, 1), dtype=dtype)
for core_idx in range(ndims - 1, 0, -1):
tens_core = what.tt_cores[core_idx]
right_tang_core = right_tangent_space_tens.tt_cores[core_idx]
einsum_str = 'sa{0}b,sbd,c{0}d->sac'.format(mode_str)
rhs[core_idx] = tf.einsum(einsum_str, tens_core, rhs[core_idx + 1],
right_tang_core)
# Prepare lhs vectors.
# lhs[core_idx] is of size
# batch_size x tangent_tt_ranks[core_idx] x tensor_tt_ranks[core_idx]
lhs = [None] * (ndims + 1)
lhs[0] = tf.ones((batch_size, 1, 1), dtype=dtype)
for core_idx in range(ndims - 1):
tens_core = what.tt_cores[core_idx]
left_tang_core = left_tangent_space_tens.tt_cores[core_idx]
einsum_str = 'sab,a{0}c,sb{0}d->scd'.format(mode_str)
lhs[core_idx + 1] = tf.einsum(einsum_str, lhs[core_idx], left_tang_core,
tens_core)
# Left to right sweep.
res_cores_list = []
for core_idx in range(ndims):
tens_core = what.tt_cores[core_idx]
left_tang_core = left_tangent_space_tens.tt_cores[core_idx]
right_tang_core = right_tangent_space_tens.tt_cores[core_idx]
if core_idx < ndims - 1:
einsum_str = 'sab,sb{0}c->sa{0}c'.format(mode_str)
proj_core = tf.einsum(einsum_str, lhs[core_idx], tens_core)
einsum_str = 'a{0}b,sbc->sa{0}c'.format(mode_str)
proj_core -= tf.einsum(einsum_str, left_tang_core, lhs[core_idx + 1])
if weights is None:
einsum_str = 'sa{0}b,sbc->a{0}c'.format(mode_str)
proj_core = tf.einsum(einsum_str, proj_core, rhs[core_idx + 1])
else:
einsum_str = 'sa{0}b,sbc->sa{0}c'.format(mode_str, output_batch_str)
proj_core_s = tf.einsum(einsum_str, proj_core, rhs[core_idx + 1])
einsum_str = 's{1},sa{0}c->{1}a{0}c'.format(mode_str, output_batch_str)
proj_core = tf.einsum(einsum_str, weights, proj_core_s)
if core_idx == ndims - 1:
if weights is None:
einsum_str = 'sab,sb{0}c->a{0}c'.format(mode_str)
proj_core = tf.einsum(einsum_str, lhs[core_idx], tens_core)
else:
einsum_str = 'sab,sb{0}c->sa{0}c'.format(mode_str, output_batch_str)
proj_core_s = tf.einsum(einsum_str, lhs[core_idx], tens_core)
einsum_str = 's{1},sa{0}c->{1}a{0}c'.format(mode_str, output_batch_str)
proj_core = tf.einsum(einsum_str, weights, proj_core_s)
if output_is_batch:
# Add batch dimension of size output_batch_size to left_tang_core and
# right_tang_core
extended_left_tang_core = tf.expand_dims(left_tang_core, 0)
extended_right_tang_core = tf.expand_dims(right_tang_core, 0)
if where.is_tt_matrix():
extended_left_tang_core = tf.tile(extended_left_tang_core,
[output_batch_size, 1, 1, 1, 1])
extended_right_tang_core = tf.tile(extended_right_tang_core,
[output_batch_size, 1, 1, 1, 1])
else:
extended_left_tang_core = tf.tile(extended_left_tang_core,
[output_batch_size, 1, 1, 1])
extended_right_tang_core = tf.tile(extended_right_tang_core,
[output_batch_size, 1, 1, 1])
else:
extended_left_tang_core = left_tang_core
extended_right_tang_core = right_tang_core
if core_idx == 0:
res_core = tf.concat((proj_core, extended_left_tang_core),
axis=right_rank_dim)
elif core_idx == ndims - 1:
res_core = tf.concat((extended_right_tang_core, proj_core), axis=left_rank_dim)
else:
rank_1 = right_tangent_tt_ranks[core_idx]
rank_2 = left_tangent_tt_ranks[core_idx + 1]
if where.is_tt_matrix():
mode_size_n = raw_shape[0][core_idx]
mode_size_m = raw_shape[1][core_idx]
shape = [rank_1, mode_size_n, mode_size_m, rank_2]
else:
mode_size = raw_shape[0][core_idx]
shape = [rank_1, mode_size, rank_2]
if output_is_batch:
shape = [output_batch_size] + shape
zeros = tf.zeros(shape, dtype)
upper = tf.concat((extended_right_tang_core, zeros), axis=right_rank_dim)
lower = tf.concat((proj_core, extended_left_tang_core),
axis=right_rank_dim)
res_core = tf.concat((upper, lower), axis=left_rank_dim)
res_cores_list.append(res_core)
# TODO: TT-ranks.
if output_is_batch:
res = TensorTrainBatch(res_cores_list, where.get_raw_shape(),
batch_size=output_batch_size)
else:
res = TensorTrain(res_cores_list, where.get_raw_shape())
res.projection_on = where
return res
def project(what, where):
"""Project `what` TTs on the tangent space of `where` TT.
project(what, x) = P_x(what)
project(batch_what, x) = batch(P_x(batch_what[0]), ..., P_x(batch_what[N]))
This function implements the algorithm from the paper [1], theorem 3.1.
[1] C. Lubich, I. Oseledets and B. Vandereycken, Time integration of
Tensor Trains.
Args:
what: TensorTrain or TensorTrainBatch. In the case of batch returns
batch with projection of each individual tensor.
where: TensorTrain, TT-tensor or TT-matrix on which tangent space to project
Returns:
a TensorTrain with the TT-ranks equal 2 * tangent_space_tens.get_tt_ranks()
Complexity:
O(d r_where^3 m) for orthogonalizing the TT-cores of where
+O(batch_size d r_what r_where n (r_what + r_where))
d is the number of TT-cores (what.ndims());
r_what is the largest TT-rank of what max(what.get_tt_rank())
r_where is the largest TT-rank of where
n is the size of the axis dimension of what and where e.g.
for a tensor of size 4 x 4 x 4, n is 4;
for a 9 x 64 matrix of raw shape (3, 3, 3) x (4, 4, 4) n is 12
"""
if not isinstance(where, TensorTrain):
raise ValueError('The first argument should be a TensorTrain object, got '
'"%s".' % where)
if where.get_raw_shape() != what.get_raw_shape():
raise ValueError('The shapes of the tensor we want to project and of the '
'tensor on which tangent space we want to project should '
'match, got %s and %s.' %
(where.get_raw_shape(),
what.get_raw_shape()))
dtypes_compatible = (where.dtype.is_compatible_with(what.dtype) or
what.dtype.is_compatible_with(where.dtype))
if not dtypes_compatible:
raise ValueError('Dtypes of the arguments should coincide, got %s and %s.' %
(where.dtype,
what.dtype))
left_tangent_space_tens = decompositions.orthogonalize_tt_cores(
where)
right_tangent_space_tens = decompositions.orthogonalize_tt_cores(
left_tangent_space_tens, left_to_right=False)
ndims = where.ndims()
dtype = where.dtype
raw_shape = shapes.lazy_raw_shape(where)
right_tangent_tt_ranks = shapes.lazy_tt_ranks(right_tangent_space_tens)
left_tangent_tt_ranks = shapes.lazy_tt_ranks(left_tangent_space_tens)
# For einsum notation.
mode_str = 'ij' if where.is_tt_matrix() else 'i'
right_rank_dim = what.right_tt_rank_dim
left_rank_dim = what.left_tt_rank_dim
output_is_batch = isinstance(what, TensorTrainBatch)
if output_is_batch:
output_batch_size = what.batch_size
# Always work with batch of TT objects for simplicity.
what = shapes.expand_batch_dim(what)
batch_size = shapes.lazy_batch_size(what)
# Prepare rhs vectors.
# rhs[core_idx] is of size
# batch_size x tensor_tt_ranks[core_idx] x tangent_tt_ranks[core_idx]
rhs = [None] * (ndims + 1)
rhs[ndims] = tf.ones((batch_size, 1, 1), dtype=dtype)
for core_idx in range(ndims - 1, 0, -1):
tens_core = what.tt_cores[core_idx]
right_tang_core = right_tangent_space_tens.tt_cores[core_idx]
einsum_str = 'sa{0}b,sbd,c{0}d->sac'.format(mode_str)
rhs[core_idx] = tf.einsum(einsum_str, tens_core, rhs[core_idx + 1],
right_tang_core)
# Prepare lhs vectors.
# lhs[core_idx] is of size
# batch_size x tangent_tt_ranks[core_idx] x tensor_tt_ranks[core_idx]
lhs = [None] * (ndims + 1)
lhs[0] = tf.ones((batch_size, 1, 1), dtype=dtype)
for core_idx in range(ndims - 1):
tens_core = what.tt_cores[core_idx]
left_tang_core = left_tangent_space_tens.tt_cores[core_idx]
einsum_str = 'sab,a{0}c,sb{0}d->scd'.format(mode_str)
lhs[core_idx + 1] = tf.einsum(einsum_str, lhs[core_idx], left_tang_core,
tens_core)
# Left to right sweep.
res_cores_list = []
for core_idx in range(ndims):
tens_core = what.tt_cores[core_idx]
left_tang_core = left_tangent_space_tens.tt_cores[core_idx]
right_tang_core = right_tangent_space_tens.tt_cores[core_idx]
if core_idx < ndims - 1:
einsum_str = 'sab,sb{0}c->sa{0}c'.format(mode_str)
proj_core = tf.einsum(einsum_str, lhs[core_idx], tens_core)
einsum_str = 'a{0}b,sbc->sa{0}c'.format(mode_str)
proj_core -= tf.einsum(einsum_str, left_tang_core, lhs[core_idx + 1])
if output_is_batch:
einsum_str = 'sa{0}b,sbc->sa{0}c'.format(mode_str)
else:
einsum_str = 'sa{0}b,sbc->a{0}c'.format(mode_str)
proj_core = tf.einsum(einsum_str, proj_core, rhs[core_idx + 1])
if core_idx == ndims - 1:
if output_is_batch:
einsum_str = 'sab,sb{0}c->sa{0}c'.format(mode_str)
else:
einsum_str = 'sab,sb{0}c->a{0}c'.format(mode_str)
proj_core = tf.einsum(einsum_str, lhs[core_idx], tens_core)
if output_is_batch:
# Add batch dimension of size output_batch_size to left_tang_core and
# right_tang_core
extended_left_tang_core = tf.expand_dims(left_tang_core, 0)
extended_right_tang_core = tf.expand_dims(right_tang_core, 0)
if where.is_tt_matrix():
extended_left_tang_core = tf.tile(extended_left_tang_core,
[output_batch_size, 1, 1, 1, 1])
extended_right_tang_core = tf.tile(extended_right_tang_core,
[output_batch_size, 1, 1, 1, 1])
else:
extended_left_tang_core = tf.tile(extended_left_tang_core,
[output_batch_size, 1, 1, 1])
extended_right_tang_core = tf.tile(extended_right_tang_core,
[output_batch_size, 1, 1, 1])
else:
extended_left_tang_core = left_tang_core
extended_right_tang_core = right_tang_core
if core_idx == 0:
res_core = tf.concat((proj_core, extended_left_tang_core),
axis=right_rank_dim)
elif core_idx == ndims - 1:
res_core = tf.concat((extended_right_tang_core, proj_core), axis=left_rank_dim)
else:
rank_1 = right_tangent_tt_ranks[core_idx]
rank_2 = left_tangent_tt_ranks[core_idx + 1]
if where.is_tt_matrix():
mode_size_n = raw_shape[0][core_idx]
mode_size_m = raw_shape[1][core_idx]
shape = [rank_1, mode_size_n, mode_size_m, rank_2]
else:
mode_size = raw_shape[0][core_idx]
shape = [rank_1, mode_size, rank_2]
if output_is_batch:
shape = [output_batch_size] + shape
zeros = tf.zeros(shape, dtype)
upper = tf.concat((extended_right_tang_core, zeros), axis=right_rank_dim)
lower = tf.concat((proj_core, extended_left_tang_core),
axis=right_rank_dim)
res_core = tf.concat((upper, lower), axis=left_rank_dim)
res_cores_list.append(res_core)
# TODO: TT-ranks.
if output_is_batch:
res = TensorTrainBatch(res_cores_list, where.get_raw_shape(),
batch_size=output_batch_size)
else:
res = TensorTrain(res_cores_list, where.get_raw_shape())
res.projection_on = where
return res
def project_matmul(what, where, matrix):
"""Project `matrix` * `what` TTs on the tangent space of `where` TT.
project(what, x) = P_x(what)
project(batch_what, x) = batch(P_x(batch_what[0]), ..., P_x(batch_what[N]))
This function implements the algorithm from the paper [1], theorem 3.1.
[1] C. Lubich, I. Oseledets and B. Vandereycken, Time integration of
Tensor Trains.
Args:
what: TensorTrain or TensorTrainBatch. In the case of batch returns
batch with projection of each individual tensor.
where: TensorTrain, TT-tensor or TT-matrix on which tangent space to project
matrix: TensorTrain, TT-matrix to multiply by what
Returns:
a TensorTrain with the TT-ranks equal 2 * tangent_space_tens.get_tt_ranks()
Complexity:
O(d r_where^3 m) for orthogonalizing the TT-cores of where
+O(batch_size d R r_what r_where (n r_what + n m R + m r_where))
d is the number of TT-cores (what.ndims());
r_what is the largest TT-rank of what max(what.get_tt_rank())
r_where is the largest TT-rank of where
matrix is of TT-rank R and of raw-shape (m, m, ..., m) x (n, n, ..., n).
"""
if not isinstance(where, TensorTrain):
raise ValueError('The first argument should be a TensorTrain object, got '
'"%s".' % where)
if where.get_raw_shape() != what.get_raw_shape():
raise ValueError('The shapes of the tensor we want to project and of the '
'tensor on which tangent space we want to project should '
'match, got %s and %s.' %
(where.get_raw_shape(),
what.get_raw_shape()))
dtypes_compatible = (where.dtype.is_compatible_with(what.dtype) or
what.dtype.is_compatible_with(where.dtype))
if not dtypes_compatible:
raise ValueError('Dtypes of the arguments should coincide, got %s and %s.' %
(where.dtype,
what.dtype))
left_tangent_space_tens = decompositions.orthogonalize_tt_cores(
where)
right_tangent_space_tens = decompositions.orthogonalize_tt_cores(
left_tangent_space_tens, left_to_right=False)
ndims = where.ndims()
dtype = where.dtype
raw_shape = shapes.lazy_raw_shape(where)
batch_size = shapes.lazy_batch_size(what)
right_tangent_tt_ranks = shapes.lazy_tt_ranks(right_tangent_space_tens)
left_tangent_tt_ranks = shapes.lazy_tt_ranks(left_tangent_space_tens)
# For einsum notation.
right_rank_dim = what.right_tt_rank_dim
left_rank_dim = what.left_tt_rank_dim
output_is_batch = isinstance(what, TensorTrainBatch)
if output_is_batch:
output_batch_size = what.batch_size
# Always work with batch of TT objects for simplicity.
what = shapes.expand_batch_dim(what)
# Prepare rhs vectors.
# rhs[core_idx] is of size
# batch_size x tensor_tt_ranks[core_idx] x matrix_tt_ranks[core_idx] x tangent_tt_ranks[core_idx]
rhs = [None] * (ndims + 1)
rhs[ndims] = tf.ones((batch_size, 1, 1, 1), dtype=dtype)
for core_idx in range(ndims - 1, 0, -1):
tens_core = what.tt_cores[core_idx]
right_tang_core = right_tangent_space_tens.tt_cores[core_idx]
matrix_core = matrix.tt_cores[core_idx]
rhs[core_idx] = tf.einsum('bije,cikf,sdef,sajkd->sabc', matrix_core,
right_tang_core, rhs[core_idx + 1], tens_core)
# Prepare lhs vectors.
# lhs[core_idx] is of size
# batch_size x tangent_tt_ranks[core_idx] x matrix_tt_ranks[core_idx] x tensor_tt_ranks[core_idx]
lhs = [None] * (ndims + 1)
lhs[0] = tf.ones((batch_size, 1, 1, 1), dtype=dtype)
for core_idx in range(ndims - 1):
tens_core = what.tt_cores[core_idx]
left_tang_core = left_tangent_space_tens.tt_cores[core_idx]
matrix_core = matrix.tt_cores[core_idx]
# TODO: brutforce order of indices in lhs??
lhs[core_idx + 1] = tf.einsum('bije,aikd,sabc,scjkf->sdef', matrix_core,
left_tang_core, lhs[core_idx], tens_core)
# Left to right sweep.
res_cores_list = []
for core_idx in range(ndims):
tens_core = what.tt_cores[core_idx]
matrix_core = matrix.tt_cores[core_idx]
left_tang_core = left_tangent_space_tens.tt_cores[core_idx]
right_tang_core = right_tangent_space_tens.tt_cores[core_idx]
if core_idx < ndims - 1:
proj_core = tf.einsum('scjke,sabc,bijd->saikde', tens_core,
lhs[core_idx], matrix_core)
proj_core -= tf.einsum('aikb,sbcd->saikcd', left_tang_core,
lhs[core_idx + 1])
proj_core = tf.einsum('saikcb,sbcd->saikd', proj_core, rhs[core_idx + 1])
if core_idx == ndims - 1:
# d and e dimensions take 1 value, since its the last rank.
# To make the result shape (?, ?, ?, 1), we are summing d and leaving e,
# but we could have done the opposite -- sum e and leave d.
proj_core = tf.einsum('sabc,bijd,scjke->saike', lhs[core_idx], matrix_core,
tens_core)
if output_is_batch:
# Add batch dimension of size output_batch_size to left_tang_core and
# right_tang_core
extended_left_tang_core = tf.expand_dims(left_tang_core, 0)
extended_right_tang_core = tf.expand_dims(right_tang_core, 0)
extended_left_tang_core = tf.tile(extended_left_tang_core,
[output_batch_size, 1, 1, 1, 1])
extended_right_tang_core = tf.tile(extended_right_tang_core,
[output_batch_size, 1, 1, 1, 1])
else:
extended_left_tang_core = left_tang_core
extended_right_tang_core = right_tang_core
if core_idx == 0:
res_core = tf.concat((proj_core, extended_left_tang_core),
axis=right_rank_dim)
elif core_idx == ndims - 1:
res_core = tf.concat((extended_right_tang_core, proj_core),
axis=left_rank_dim)
else:
rank_1 = right_tangent_tt_ranks[core_idx]
rank_2 = left_tangent_tt_ranks[core_idx + 1]
mode_size_n = raw_shape[0][core_idx]
mode_size_m = raw_shape[1][core_idx]
shape = [rank_1, mode_size_n, mode_size_m, rank_2]
if output_is_batch:
shape = [output_batch_size] + shape
zeros = tf.zeros(shape, dtype)
upper = tf.concat((extended_right_tang_core, zeros),
axis=right_rank_dim)
lower = tf.concat((proj_core, extended_left_tang_core),
axis=right_rank_dim)
res_core = tf.concat((upper, lower), axis=left_rank_dim)
res_cores_list.append(res_core)
# TODO: TT-ranks.
if output_is_batch:
res = TensorTrainBatch(res_cores_list, where.get_raw_shape(),
batch_size=output_batch_size)
else:
res = TensorTrain(res_cores_list, where.get_raw_shape())
res.projection_on = where
return res
def pairwise_flat_inner_projected(projected_tt_vectors_1,
projected_tt_vectors_2):
"""Scalar products between two batches of TTs from the same tangent space.
res[i, j] = t3f.flat_inner(projected_tt_vectors_1[i], projected_tt_vectors_1[j]).
pairwise_flat_inner_projected(projected_tt_vectors_1, projected_tt_vectors_2)
is equivalent to
pairwise_flat_inner(projected_tt_vectors_1, projected_tt_vectors_2)
, but works only on objects from the same tangent space and is much faster
than general pairwise_flat_inner.
Args:
projected_tt_vectors_1: TensorTrainBatch of tensors projected on the same
tangent space as projected_tt_vectors_2.
projected_tt_vectors_2: TensorTrainBatch.
Returns:
tf.tensor with the scalar product matrix.
Complexity:
O(batch_size^2 d r^2 n), where
d is the number of TT-cores (projected_tt_vectors_1.ndims());
r is the largest TT-rank max(projected_tt_vectors_1.get_tt_rank())
(i.e. 2 * {the TT-rank of the object we projected vectors onto}.
and n is the size of the axis dimension, e.g.
for a tensor of size 4 x 4 x 4, n is 4;
for a 9 x 64 matrix of raw shape (3, 3, 3) x (4, 4, 4) n is 12.
"""
if not hasattr(projected_tt_vectors_1, 'projection_on') or \
not hasattr(projected_tt_vectors_2, 'projection_on'):
raise ValueError('Both arguments should be projections on the tangent '
'space of some other TT-object. All projection* functions '
'leave .projection_on field in the resulting TT-object '
'which is not present in the arguments you\'ve provided')
if projected_tt_vectors_1.projection_on != projected_tt_vectors_2.projection_on:
raise ValueError('Both arguments should be projections on the tangent '
'space of the same TT-object. The provided arguments are '
'projections on different TT-objects (%s and %s). Or at '
'least the pointers are different.' %
(projected_tt_vectors_1.projection_on,
projected_tt_vectors_2.projection_on))
# Always work with batches of objects for simplicity.
projected_tt_vectors_1 = shapes.expand_batch_dim(projected_tt_vectors_1)
projected_tt_vectors_2 = shapes.expand_batch_dim(projected_tt_vectors_2)
ndims = projected_tt_vectors_1.ndims()
tt_ranks = shapes.lazy_tt_ranks(projected_tt_vectors_1)
if projected_tt_vectors_1.is_tt_matrix():
right_size = tt_ranks[1] // 2
curr_core_1 = projected_tt_vectors_1.tt_cores[0]
curr_core_2 = projected_tt_vectors_2.tt_cores[0]
curr_du_1 = curr_core_1[:, :, :, :, :right_size]
curr_du_2 = curr_core_2[:, :, :, :, :right_size]
res = tf.einsum('paijb,qaijb->pq', curr_du_1, curr_du_2)
for core_idx in range(1, ndims):
left_size = tt_ranks[core_idx] // 2
right_size = tt_ranks[core_idx + 1] // 2
curr_core_1 = projected_tt_vectors_1.tt_cores[core_idx]
curr_core_2 = projected_tt_vectors_2.tt_cores[core_idx]
curr_du_1 = curr_core_1[:, left_size:, :, :, :right_size]
curr_du_2 = curr_core_2[:, left_size:, :, :, :right_size]
res += tf.einsum('paijb,qaijb->pq', curr_du_1, curr_du_2)
left_size = tt_ranks[-2] // 2
curr_core_1 = projected_tt_vectors_1.tt_cores[-1]
curr_core_2 = projected_tt_vectors_2.tt_cores[-1]
curr_du_1 = curr_core_1[:, left_size:, :, :, :]
curr_du_2 = curr_core_2[:, left_size:, :, :, :]
res += tf.einsum('paijb,qaijb->pq', curr_du_1, curr_du_2)
else:
# Working with TT-tensor, not TT-matrix.
right_size = tt_ranks[1] // 2
curr_core_1 = projected_tt_vectors_1.tt_cores[0]
curr_core_2 = projected_tt_vectors_2.tt_cores[0]
curr_du_1 = curr_core_1[:, :, :, :right_size]
curr_du_2 = curr_core_2[:, :, :, :right_size]
res = tf.einsum('paib,qaib->pq', curr_du_1, curr_du_2)
for core_idx in range(1, ndims):
left_size = tt_ranks[core_idx] // 2
right_size = tt_ranks[core_idx + 1] // 2
curr_core_1 = projected_tt_vectors_1.tt_cores[core_idx]
curr_core_2 = projected_tt_vectors_2.tt_cores[core_idx]
curr_du_1 = curr_core_1[:, left_size:, :, :right_size]
curr_du_2 = curr_core_2[:, left_size:, :, :right_size]
res += tf.einsum('paib,qaib->pq', curr_du_1, curr_du_2)
left_size = tt_ranks[-2] // 2
curr_core_1 = projected_tt_vectors_1.tt_cores[-1]
curr_core_2 = projected_tt_vectors_2.tt_cores[-1]
curr_du_1 = curr_core_1[:, left_size:, :, :]
curr_du_2 = curr_core_2[:, left_size:, :, :]
res += tf.einsum('paib,qaib->pq', curr_du_1, curr_du_2)
return res
def add_n_projected(tt_objects, coef=None):
"""Adds all input TT-objects that are projections on the same tangent space.
add_projected((a, b)) is equivalent add(a, b) for a and b that are from the
same tangent space, but doesn't increase the TT-ranks.
Args:
tt_objects: a list of TT-objects that are projections on the same tangent
space.
coef: a list of numbers or anything else convertable to tf.Tensor.
If provided, computes weighted sum. The size of this array should be
len(tt_objects) x tt_objects[0].batch_size
Returns:
TT-objects representing the sum of the tt_objects (weighted sum if coef is
provided). The TT-rank of the result equals to the TT-ranks of the arguments.
"""
for tt in tt_objects:
if not hasattr(tt, 'projection_on'):
raise ValueError('Both arguments should be projections on the tangent '
'space of some other TT-object. All projection* functions '
'leave .projection_on field in the resulting TT-object '
'which is not present in the argument you\'ve provided.')
projection_on = tt_objects[0].projection_on
for tt in tt_objects[1:]:
if tt.projection_on != projection_on:
raise ValueError('All tt_objects should be projections on the tangent '
'space of the same TT-object. The provided arguments are '
'projections on different TT-objects (%s and %s). Or at '
'least the pointers are different.' % (tt.projection_on,
projection_on))
if coef is not None:
coef = tf.convert_to_tensor(coef, dtype=tt_objects[0].dtype)
if coef.get_shape().ndims > 1:
# In batch case we will need to multiply each core by this coefficients
# along the first axis. To do it need to reshape the coefs to match
# the TT-cores number of dimensions.
some_core = tt_objects[0].tt_cores[0]
dim_array = [1] * (some_core.get_shape().ndims + 1)
dim_array[0] = coef.get_shape()[0].value
dim_array[1] = coef.get_shape()[1].value
coef = tf.reshape(coef, dim_array)
ndims = tt_objects[0].ndims()
tt_ranks = shapes.lazy_tt_ranks(tt_objects[0])
left_rank_dim = tt_objects[0].left_tt_rank_dim
right_rank_dim = tt_objects[0].right_tt_rank_dim
res_cores = []
right_half_rank = tt_ranks[1] // 2
left_chunks = []
for obj_idx, tt in enumerate(tt_objects):
curr_core = slice_tt_core(tt.tt_cores[0], slice(None),
slice(0, right_half_rank))
if coef is not None:
curr_core *= coef[obj_idx]
left_chunks.append(curr_core)
left_part = tf.add_n(left_chunks)
first_obj_core = tt_objects[0].tt_cores[0]
right_part = slice_tt_core(first_obj_core, slice(None),
slice(right_half_rank, None))
first_core = tf.concat((left_part, right_part), axis=right_rank_dim)
res_cores.append(first_core)
for core_idx in range(1, ndims - 1):
first_obj_core = tt_objects[0].tt_cores[core_idx]
left_half_rank = tt_ranks[core_idx] // 2
right_half_rank = tt_ranks[core_idx + 1] // 2
upper_part = slice_tt_core(tt.tt_cores[core_idx], slice(0, left_half_rank),
slice(None))
lower_right_part = slice_tt_core(first_obj_core,
slice(left_half_rank, None),
slice(right_half_rank, None))
lower_left_chunks = []
for obj_idx, tt in enumerate(tt_objects):
curr_core = slice_tt_core(tt.tt_cores[core_idx],
slice(left_half_rank, None),
slice(0, right_half_rank))
if coef is not None:
curr_core *= coef[obj_idx]
lower_left_chunks.append(curr_core)
lower_left_part = tf.add_n(lower_left_chunks)
lower_part = tf.concat((lower_left_part, lower_right_part),
axis=right_rank_dim)
curr_core = tf.concat((upper_part, lower_part), axis=left_rank_dim)
res_cores.append(curr_core)
left_half_rank = tt_ranks[ndims - 1] // 2
upper_part = slice_tt_core(tt.tt_cores[-1], slice(0, left_half_rank),
slice(None))
lower_chunks = []
for obj_idx, tt in enumerate(tt_objects):
curr_core = slice_tt_core(tt.tt_cores[-1], slice(left_half_rank, None),
slice(None))
if coef is not None:
curr_core *= coef[obj_idx]
lower_chunks.append(curr_core)
lower_part = tf.add_n(lower_chunks)
last_core = tf.concat((upper_part, lower_part), axis=left_rank_dim)
res_cores.append(last_core)
raw_shape = tt_objects[0].get_raw_shape()
static_tt_ranks = tt_objects[0].get_tt_ranks()
if isinstance(tt_objects[0], TensorTrain):
res = TensorTrain(res_cores, raw_shape, static_tt_ranks)
elif isinstance(tt_objects[0], TensorTrainBatch):
res = TensorTrainBatch(res_cores, raw_shape, static_tt_ranks,
tt_objects[0].batch_size)
# Maintain the projection_on property.
res.projection_on = tt_objects[0].projection_on
return res
def tangent_space_to_deltas(tt, name='t3f_tangent_space_to_deltas'):
"""Convert an element of the tangent space to deltas representation.
Tangent space elements (outputs of t3f.project) look like:
dP1 V2 ... Vd + U1 dP2 V3 ... Vd + ... + U1 ... Ud-1 dPd.
This function takes as input an element of the tangent space and converts
it to the list of deltas [dP1, ..., dPd].
Args:
tt: `TensorTrain` or `TensorTrainBatch` that is a result of t3f.project,
t3f.project_matmul, or other similar functions.
name: string, name of the Op.
Returns:
A list of delta-cores (tf.Tensors).
"""
if not hasattr(tt, 'projection_on') or tt.projection_on is None:
raise ValueError('tt argument is supposed to be a projection, but it '
'lacks projection_on field')
num_dims = tt.ndims()
left_tt_rank_dim = tt.left_tt_rank_dim
right_tt_rank_dim = tt.right_tt_rank_dim
deltas = [None] * num_dims
tt_ranks = shapes.lazy_tt_ranks(tt)
for i in range(1, num_dims - 1):
if int(tt_ranks[i] / 2) != tt_ranks[i] / 2:
raise ValueError('tt argument is supposed to be a projection, but its '
'ranks are not even.')
with tf.compat.v1.name_scope(name, values=tt.tt_cores):
for i in range(1, num_dims - 1):
r1, r2 = tt_ranks[i], tt_ranks[i + 1]
curr_core = tt.tt_cores[i]
slc = [slice(None)] * len(curr_core.shape)
slc[left_tt_rank_dim] = slice(int(r1 / 2), None)
slc[right_tt_rank_dim] = slice(0, int(r2 / 2))
deltas[i] = curr_core[slc]
slc = [slice(None)] * len(tt.tt_cores[0].shape)
slc[right_tt_rank_dim] = slice(0, int(tt_ranks[1] / 2))
deltas[0] = tt.tt_cores[0][slc]
slc = [slice(None)] * len(tt.tt_cores[0].shape)
slc[left_tt_rank_dim] = slice(int(tt_ranks[-2] / 2), None)
deltas[num_dims - 1] = tt.tt_cores[num_dims - 1][slc]
return deltas
def deltas_to_tangent_space(deltas, tt, left=None, right=None,
name='t3f_deltas_to_tangent_space'):
"""Converts deltas representation of tangent space vector to TT object.
Takes as input a list of [dP1, ..., dPd] and returns
dP1 V2 ... Vd + U1 dP2 V3 ... Vd + ... + U1 ... Ud-1 dPd.
This function is hard to use correctly because deltas should abey the
so called gauge conditions. If the don't, the function will silently return
incorrect result. This is why this function is not imported in __init__.
Args:
deltas: a list of deltas (essentially TT-cores) obeying the gauge
conditions.
tt: `TensorTrain` object on which the tangent space tensor represented by
delta is projected.
left: t3f.orthogonilize_tt_cores(tt). If you have it already compute, you
may pass it as argument to avoid recomputing.
right: t3f.orthogonilize_tt_cores(left, left_to_right=False). If you have
it already compute, you may pass it as argument to avoid recomputing.
name: string, name of the Op.
Returns:
`TensorTrain` object constructed from deltas, that is from the tangent
space at point `tt`.
"""
cores = []
dtype = tt.dtype
num_dims = tt.ndims()
# TODO: add cache instead of mannually pasisng precomputed stuff?
input_tensors = list(tt.tt_cores) + list(deltas)
if left is not None:
input_tensors += list(left.tt_cores)
if right is not None:
input_tensors += list(right.tt_cores)
with tf.compat.v1.name_scope(name, values=input_tensors):
if left is None:
left = decompositions.orthogonalize_tt_cores(tt)
if right is None:
right = decompositions.orthogonalize_tt_cores(left, left_to_right=False)
left_tangent_tt_ranks = shapes.lazy_tt_ranks(left)
right_tangent_tt_ranks = shapes.lazy_tt_ranks(left)
raw_shape = shapes.lazy_raw_shape(left)
right_rank_dim = left.right_tt_rank_dim
left_rank_dim = left.left_tt_rank_dim
is_batch_case = len(deltas[0].shape) > len(tt.tt_cores[0].shape)
if is_batch_case:
right_rank_dim += 1
left_rank_dim += 1
batch_size = deltas[0].shape.as_list()[0]
for i in range(num_dims):
left_tt_core = left.tt_cores[i]
right_tt_core = right.tt_cores[i]
if is_batch_case:
tile = [1] * len(left_tt_core.shape)
tile = [batch_size] + tile
left_tt_core = tf.tile(left_tt_core[None, ...], tile)
right_tt_core = tf.tile(right_tt_core[None, ...], tile)
if i == 0:
tangent_core = tf.concat((deltas[i], left_tt_core),
axis=right_rank_dim)
elif i == num_dims - 1:
tangent_core = tf.concat((right_tt_core, deltas[i]),
axis=left_rank_dim)
else:
rank_1 = right_tangent_tt_ranks[i]
rank_2 = left_tangent_tt_ranks[i + 1]
if tt.is_tt_matrix():
mode_size_n = raw_shape[0][i]
mode_size_m = raw_shape[1][i]
shape = [rank_1, mode_size_n, mode_size_m, rank_2]
else:
mode_size_n = raw_shape[0][i]
shape = [rank_1, mode_size_n, rank_2]
if is_batch_case:
shape = [batch_size] + shape
zeros = tf.zeros(shape, dtype=dtype)
upper = tf.concat((right_tt_core, zeros), axis=right_rank_dim)
lower = tf.concat((deltas[i], left_tt_core), axis=right_rank_dim)
tangent_core = tf.concat((upper, lower), axis=left_rank_dim)
cores.append(tangent_core)
if is_batch_case:
tangent = TensorTrainBatch(cores, batch_size=batch_size)
else:
tangent = TensorTrain(cores)
tangent.projection_on = tt
return tangent
| 42.90991 | 101 | 0.664943 |
8184c1d8dc29034b686437e80c0929c8f140a87c | 262 | py | Python | dpauth/admin.py | askmeaboutlo0m/website | 3df97d061a425e7fbb3f173c78ff01d831575aa0 | [
"MIT"
] | 9 | 2017-06-04T15:46:05.000Z | 2021-09-04T23:28:03.000Z | dpauth/admin.py | askmeaboutlo0m/website | 3df97d061a425e7fbb3f173c78ff01d831575aa0 | [
"MIT"
] | 24 | 2018-02-10T04:29:00.000Z | 2021-10-01T16:01:04.000Z | dpauth/admin.py | askmeaboutlo0m/website | 3df97d061a425e7fbb3f173c78ff01d831575aa0 | [
"MIT"
] | 4 | 2020-03-23T03:42:32.000Z | 2022-03-16T17:01:09.000Z | from django.contrib import admin
from . import models
| 23.818182 | 45 | 0.717557 |
8188e19b101be322e95cf844a7e3d5f16f246e15 | 346 | py | Python | iptv_proxy/providers/beast/json_api.py | sfanous/IPTVProxy | 23047be01a229ef8f69ea6ca55185eae93adc56e | [
"MIT"
] | 9 | 2018-11-02T02:51:50.000Z | 2022-01-12T06:22:33.000Z | iptv_proxy/providers/beast/json_api.py | sfanous/IPTVProxy | 23047be01a229ef8f69ea6ca55185eae93adc56e | [
"MIT"
] | 3 | 2019-05-11T21:28:32.000Z | 2020-04-27T00:58:46.000Z | iptv_proxy/providers/beast/json_api.py | sfanous/IPTVProxy | 23047be01a229ef8f69ea6ca55185eae93adc56e | [
"MIT"
] | 7 | 2019-01-03T20:31:30.000Z | 2022-01-29T04:09:24.000Z | import logging
from iptv_proxy.providers.beast.constants import BeastConstants
from iptv_proxy.providers.iptv_provider.json_api import ProviderConfigurationJSONAPI
logger = logging.getLogger(__name__)
| 26.615385 | 84 | 0.84104 |
818971d06d80952ffaa6249ad6fd01e66412f03b | 30,912 | py | Python | tests/random/random_testing.py | jkeiren/mCRL2 | 3a60939f4e6dbc131e8ce87b1929bea9dbf90dd2 | [
"BSL-1.0"
] | 61 | 2018-05-24T13:14:05.000Z | 2022-03-29T11:35:03.000Z | tests/random/random_testing.py | jkeiren/mCRL2 | 3a60939f4e6dbc131e8ce87b1929bea9dbf90dd2 | [
"BSL-1.0"
] | 229 | 2018-05-28T08:31:09.000Z | 2022-03-21T11:02:41.000Z | tests/random/random_testing.py | jkeiren/mCRL2 | 3a60939f4e6dbc131e8ce87b1929bea9dbf90dd2 | [
"BSL-1.0"
] | 28 | 2018-04-11T14:09:39.000Z | 2022-02-25T15:57:39.000Z | #!/usr/bin/env python
# Copyright 2015 Wieger Wesselink.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import os
import os.path
import random
import re
import sys
import traceback
sys.path += [os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'python'))]
import random_state_formula_generator
from random_bes_generator import make_bes
from random_pbes_generator import make_pbes
import random_process_expression
from testing import YmlTest
from text_utility import write_text
MCRL2_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
MCRL2_INSTALL_DIR = os.path.join(MCRL2_ROOT, 'install', 'bin')
# generates stochastic random processes
# generates random process with higher probability of tau transitions
# N.B. does not work yet due to unusable abstraction map
# N.B. This test has been disabled, since the tool has been deprecated.
# N.B does not work due to unknown expressions (F_or)
available_tests = {
'alphabet-reduce' : lambda name, settings: AlphabetReduceTest(name, settings) ,
'lpssuminst' : lambda name, settings: LpsSuminstTest(name, settings) ,
'lpssumelm' : lambda name, settings: LpsSumelmTest(name, settings) ,
'lpsparelm' : lambda name, settings: LpsParelmTest(name, settings) ,
'lps-quantifier-one-point' : lambda name, settings: LpsOnePointRuleRewriteTest(name, settings) ,
'lpsconfcheck-commutative' : lambda name, settings: LpsConfcheckTest(name, 'commutative', settings) ,
'lpsconfcheck-commutative-disjoint' : lambda name, settings: LpsConfcheckTest(name, 'commutative-disjoint', settings) ,
'lpsconfcheck-disjoint' : lambda name, settings: LpsConfcheckTest(name, 'disjoint', settings) ,
'lpsconfcheck-triangular' : lambda name, settings: LpsConfcheckTest(name, 'triangular', settings) ,
'lpsconfcheck-trivial' : lambda name, settings: LpsConfcheckTest(name, 'trivial', settings) ,
'lpsconstelm' : lambda name, settings: LpsConstelmTest(name, settings) ,
'lpsbinary' : lambda name, settings: LpsBinaryTest(name, settings) ,
'lps2lts-algorithms' : lambda name, settings: Lps2ltsAlgorithmsTest(name, settings) ,
'lps2pbes' : lambda name, settings: Lps2pbesTest(name, settings) ,
'lpsstategraph' : lambda name, settings: LpsstategraphTest(name, settings) ,
'lts2pbes' : lambda name, settings: Lts2pbesTest(name, settings) ,
'ltscompare-bisim' : lambda name, settings: LtscompareTest(name, 'bisim', settings) ,
'ltscompare-bisim-gv' : lambda name, settings: LtscompareTest(name, 'bisim-gv', settings) ,
'ltscompare-bisim-gjkw' : lambda name, settings: LtscompareTest(name, 'bisim-gjkw', settings) ,
'ltscompare-branching-bisim' : lambda name, settings: LtscompareTest(name, 'branching-bisim', settings) ,
'ltscompare-branching-bisim-gv' : lambda name, settings: LtscompareTest(name, 'branching-bisim-gv', settings) ,
'ltscompare-branching-bisim-gjkw' : lambda name, settings: LtscompareTest(name, 'branching-bisim-gjkw', settings) ,
'ltscompare-dpbranching-bisim' : lambda name, settings: LtscompareTest(name, 'dpbranching-bisim', settings) ,
'ltscompare-dpbranching-bisim-gv' : lambda name, settings: LtscompareTest(name, 'dpbranching-bisim-gv', settings) ,
'ltscompare-dpbranching-bisim-gjkw' : lambda name, settings: LtscompareTest(name, 'dpbranching-bisim-gjkw', settings) ,
'ltscompare-weak-bisim' : lambda name, settings: LtscompareTest(name, 'weak-bisim', settings) ,
'ltscompare-dpweak-bisim' : lambda name, settings: LtscompareTest(name, 'dpweak-bisim', settings) ,
'ltscompare-sim' : lambda name, settings: LtscompareTest(name, 'sim', settings) ,
'ltscompare-ready-sim' : lambda name, settings: LtscompareTest(name, 'ready-sim', settings) ,
'ltscompare-trace' : lambda name, settings: LtscompareTest(name, 'trace', settings) ,
'ltscompare-weak-trace' : lambda name, settings: LtscompareTest(name, 'weak-trace', settings) ,
'bisimulation-bisim' : lambda name, settings: BisimulationTest(name, 'bisim', settings) ,
'bisimulation-bisim-gv' : lambda name, settings: BisimulationTest(name, 'bisim-gv', settings) ,
'bisimulation-bisim-gjkw' : lambda name, settings: BisimulationTest(name, 'bisim-gjkw', settings) ,
'bisimulation-branching-bisim' : lambda name, settings: BisimulationTest(name, 'branching-bisim', settings) ,
'bisimulation-branching-bisim-gv' : lambda name, settings: BisimulationTest(name, 'branching-bisim-gv', settings) ,
'bisimulation-branching-bisim-gjkw' : lambda name, settings: BisimulationTest(name, 'branching-bisim-gjkw', settings) ,
'bisimulation-weak-bisim' : lambda name, settings: BisimulationTest(name, 'weak-bisim', settings) ,
'pbesconstelm' : lambda name, settings: PbesconstelmTest(name, settings) ,
'pbesparelm' : lambda name, settings: PbesparelmTest(name, settings) ,
'pbespareqelm' : lambda name, settings: PbespareqelmTest(name, settings) ,
'pbespor2' : lambda name, settings: Pbespor2Test(name, settings) ,
'pbesrewr-simplify' : lambda name, settings: PbesrewrTest(name, 'simplify', settings) ,
'pbesrewr-pfnf' : lambda name, settings: PbesrewrTest(name, 'pfnf', settings) ,
'pbesrewr-quantifier-all' : lambda name, settings: PbesrewrTest(name, 'quantifier-all', settings) ,
'pbesrewr-quantifier-finite' : lambda name, settings: PbesrewrTest(name, 'quantifier-finite', settings) ,
'pbesrewr-quantifier-inside' : lambda name, settings: PbesrewrTest(name, 'quantifier-inside', settings) ,
'pbesrewr-quantifier-one-point' : lambda name, settings: PbesrewrTest(name, 'quantifier-one-point', settings) ,
'pbesrewr-data-rewriter' : lambda name, settings: PbestransformTest(name, 'pbes-data-rewriter', settings) ,
'pbesrewr-simplify-rewriter' : lambda name, settings: PbestransformTest(name, 'pbes-simplify-rewriter', settings) ,
'pbesrewr-simplify-data-rewriter' : lambda name, settings: PbestransformTest(name, 'pbes-simplify-data-rewriter', settings) ,
'pbesrewr-simplify-quantifiers-rewriter' : lambda name, settings: PbestransformTest(name, 'pbes-simplify-quantifiers-rewriter', settings) ,
'pbesrewr-simplify-quantifiers-data-rewriter' : lambda name, settings: PbestransformTest(name, 'pbes-simplify-quantifiers-data-rewriter', settings),
'pbesinst-lazy' : lambda name, settings: PbesinstTest(name, ['-slazy'], settings) ,
'pbesinst-alternative_lazy' : lambda name, settings: PbesinstTest(name, ['-salternative-lazy'], settings) ,
'pbesinst-finite' : lambda name, settings: PbesinstTest(name, ['-sfinite', '-f*(*:Bool)'], settings) ,
'pbespgsolve' : lambda name, settings: PbespgsolveTest(name, settings) ,
'pbessolve' : lambda name, settings: Pbes2boolTest(name, settings) ,
'pbessolve-depth-first' : lambda name, settings: Pbes2boolDepthFirstTest(name, settings) ,
'pbessolve-counter-example-optimization-0' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 0, settings) ,
'pbessolve-counter-example-optimization-1' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 1, settings) ,
'pbessolve-counter-example-optimization-2' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 2, settings) ,
'pbessolve-counter-example-optimization-3' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 3, settings) ,
'pbessolve-counter-example-optimization-4' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 4, settings) ,
'pbessolve-counter-example-optimization-5' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 5, settings) ,
'pbessolve-counter-example-optimization-6' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 6, settings) ,
'pbessolve-counter-example-optimization-7' : lambda name, settings: Pbes2bool_counter_exampleTest(name, 7, settings) ,
'pbesstategraph' : lambda name, settings: PbesstategraphTest(name, settings) ,
'pbes-unify-parameters' : lambda name, settings: Pbes_unify_parametersTest(name, settings) ,
'pbes-srf' : lambda name, settings: Pbes_srfTest(name, settings) ,
# 'pbessymbolicbisim' : lambda name, settings: PbessymbolicbisimTest(name, settings) , # excluded from the tests because of Z3 dependency
'bessolve' : lambda name, settings: BessolveTest(name, settings) ,
#'stochastic-ltscompare' : lambda name, settings: StochasticLtscompareTest(name, settings) ,
}
# These test do not work on Windows due to dependencies.
if os.name != 'nt':
available_tests.update({'pbessolvesymbolic' : lambda name, settings: PbessolvesymbolicTest(name, settings) })
# available_tests.update({ 'pbesbddsolve' : lambda name, settings: PbesbddsolveTest(name, settings) })
# Return all tests that match with pattern. In case of an exact match, only this exact match is returned.
if __name__ == '__main__':
main(available_tests)
| 63.085714 | 280 | 0.606593 |
8189efb35e8c25b88203a01795c7461668948d95 | 969 | py | Python | src/download.py | stanislawbartkowski/webhdfsdirectory | 8f7110eb573487c845ab0126eb71f038edb5ed41 | [
"Apache-2.0"
] | null | null | null | src/download.py | stanislawbartkowski/webhdfsdirectory | 8f7110eb573487c845ab0126eb71f038edb5ed41 | [
"Apache-2.0"
] | null | null | null | src/download.py | stanislawbartkowski/webhdfsdirectory | 8f7110eb573487c845ab0126eb71f038edb5ed41 | [
"Apache-2.0"
] | null | null | null | """ Main program to launch proc/hdfs.py
"""
import argparse
import logging
from pars import addargs
import sys
import logging
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
from proc.hdfs import DIRHDFS
if __name__ == "__main__":
# execute only if run as a script
main()
| 25.5 | 103 | 0.700722 |
818a52c58e57385fa71bcd403825bd6c6ac08eb9 | 15,748 | py | Python | desktop/core/ext-py/josepy-1.1.0/src/josepy/json_util.py | kokosing/hue | 2307f5379a35aae9be871e836432e6f45138b3d9 | [
"Apache-2.0"
] | 5,079 | 2015-01-01T03:39:46.000Z | 2022-03-31T07:38:22.000Z | desktop/core/ext-py/josepy-1.1.0/src/josepy/json_util.py | zks888/hue | 93a8c370713e70b216c428caa2f75185ef809deb | [
"Apache-2.0"
] | 1,623 | 2015-01-01T08:06:24.000Z | 2022-03-30T19:48:52.000Z | desktop/core/ext-py/josepy-1.1.0/src/josepy/json_util.py | zks888/hue | 93a8c370713e70b216c428caa2f75185ef809deb | [
"Apache-2.0"
] | 2,033 | 2015-01-04T07:18:02.000Z | 2022-03-28T19:55:47.000Z | """JSON (de)serialization framework.
The framework presented here is somewhat based on `Go's "json" package`_
(especially the ``omitempty`` functionality).
.. _`Go's "json" package`: http://golang.org/pkg/encoding/json/
"""
import abc
import binascii
import logging
import OpenSSL
import six
from josepy import b64, errors, interfaces, util
logger = logging.getLogger(__name__)
def encode_b64jose(data):
"""Encode JOSE Base-64 field.
:param bytes data:
:rtype: `unicode`
"""
# b64encode produces ASCII characters only
return b64.b64encode(data).decode('ascii')
def decode_b64jose(data, size=None, minimum=False):
"""Decode JOSE Base-64 field.
:param unicode data:
:param int size: Required length (after decoding).
:param bool minimum: If ``True``, then `size` will be treated as
minimum required length, as opposed to exact equality.
:rtype: bytes
"""
error_cls = TypeError if six.PY2 else binascii.Error
try:
decoded = b64.b64decode(data.encode())
except error_cls as error:
raise errors.DeserializationError(error)
if size is not None and ((not minimum and len(decoded) != size) or
(minimum and len(decoded) < size)):
raise errors.DeserializationError(
"Expected at least or exactly {0} bytes".format(size))
return decoded
def encode_hex16(value):
"""Hexlify.
:param bytes value:
:rtype: unicode
"""
return binascii.hexlify(value).decode()
def decode_hex16(value, size=None, minimum=False):
"""Decode hexlified field.
:param unicode value:
:param int size: Required length (after decoding).
:param bool minimum: If ``True``, then `size` will be treated as
minimum required length, as opposed to exact equality.
:rtype: bytes
"""
value = value.encode()
if size is not None and ((not minimum and len(value) != size * 2) or
(minimum and len(value) < size * 2)):
raise errors.DeserializationError()
error_cls = TypeError if six.PY2 else binascii.Error
try:
return binascii.unhexlify(value)
except error_cls as error:
raise errors.DeserializationError(error)
def encode_cert(cert):
"""Encode certificate as JOSE Base-64 DER.
:type cert: `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
:rtype: unicode
"""
return encode_b64jose(OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_ASN1, cert.wrapped))
def decode_cert(b64der):
"""Decode JOSE Base-64 DER-encoded certificate.
:param unicode b64der:
:rtype: `OpenSSL.crypto.X509` wrapped in `.ComparableX509`
"""
try:
return util.ComparableX509(OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_ASN1, decode_b64jose(b64der)))
except OpenSSL.crypto.Error as error:
raise errors.DeserializationError(error)
def encode_csr(csr):
"""Encode CSR as JOSE Base-64 DER.
:type csr: `OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
:rtype: unicode
"""
return encode_b64jose(OpenSSL.crypto.dump_certificate_request(
OpenSSL.crypto.FILETYPE_ASN1, csr.wrapped))
def decode_csr(b64der):
"""Decode JOSE Base-64 DER-encoded CSR.
:param unicode b64der:
:rtype: `OpenSSL.crypto.X509Req` wrapped in `.ComparableX509`
"""
try:
return util.ComparableX509(OpenSSL.crypto.load_certificate_request(
OpenSSL.crypto.FILETYPE_ASN1, decode_b64jose(b64der)))
except OpenSSL.crypto.Error as error:
raise errors.DeserializationError(error)
| 32.672199 | 78 | 0.625921 |
818c9c67cc31addec97fe43bfd97c54843bc6cf4 | 6,463 | py | Python | norm/executable/schema/variable.py | reasoned-ai/norm | 5e45d5917ce8745c9a757a0c6b5e689ea0cac19f | [
"Apache-2.0"
] | 8 | 2019-07-22T08:57:20.000Z | 2021-03-26T13:51:02.000Z | norm/executable/schema/variable.py | xumiao/norm | 5e45d5917ce8745c9a757a0c6b5e689ea0cac19f | [
"Apache-2.0"
] | null | null | null | norm/executable/schema/variable.py | xumiao/norm | 5e45d5917ce8745c9a757a0c6b5e689ea0cac19f | [
"Apache-2.0"
] | 1 | 2019-11-16T13:37:35.000Z | 2019-11-16T13:37:35.000Z | from norm.models.norm import Status, Lambda
from norm.executable import NormExecutable
from typing import Union, List
import logging
logger = logging.getLogger(__name__)
| 34.195767 | 116 | 0.597865 |
818cc866ba9ab18156dde4bf30880a18007fbc03 | 3,318 | py | Python | src/parserindexer/tika_parser.py | yyzhuang1991/parser-indexer-py | 66847a6373d1fbc771e8ed9172a068731f9cd4da | [
"Apache-2.0"
] | 8 | 2016-07-20T19:50:00.000Z | 2020-09-15T01:56:51.000Z | src/parserindexer/tika_parser.py | yyzhuang1991/parser-indexer-py | 66847a6373d1fbc771e8ed9172a068731f9cd4da | [
"Apache-2.0"
] | 27 | 2016-08-01T13:39:25.000Z | 2021-08-18T17:47:23.000Z | src/parserindexer/tika_parser.py | yyzhuang1991/parser-indexer-py | 66847a6373d1fbc771e8ed9172a068731f9cd4da | [
"Apache-2.0"
] | 3 | 2016-07-20T19:55:49.000Z | 2021-08-06T16:19:48.000Z | from __future__ import print_function
import os
import sys
import json
import tika
from tqdm import tqdm
from utils import LogUtil
from parser import Parser
from ioutils import read_lines
from tika import parser as tk_parser
if __name__ == '__main__':
main()
| 31.6 | 80 | 0.611814 |
818d2b5226021a3473fd95143600b3a63ac484e1 | 869 | py | Python | checkov/cloudformation/checks/resource/aws/DocDBAuditLogs.py | niradler/checkov | 2628c6f28a5604efe3877d6eacc3044d2b66b7b1 | [
"Apache-2.0"
] | null | null | null | checkov/cloudformation/checks/resource/aws/DocDBAuditLogs.py | niradler/checkov | 2628c6f28a5604efe3877d6eacc3044d2b66b7b1 | [
"Apache-2.0"
] | 2 | 2022-03-07T07:15:32.000Z | 2022-03-21T07:21:17.000Z | checkov/cloudformation/checks/resource/aws/DocDBAuditLogs.py | niradler/checkov | 2628c6f28a5604efe3877d6eacc3044d2b66b7b1 | [
"Apache-2.0"
] | null | null | null | from checkov.cloudformation.checks.resource.base_resource_check import BaseResourceCheck
from checkov.common.parsers.node import DictNode
from checkov.common.models.enums import CheckResult, CheckCategories
check = DocDBAuditLogs()
| 36.208333 | 106 | 0.721519 |
818de49075c87063860cf616c4fbba1c27c95106 | 584 | py | Python | py/server/deephaven/server/plugin/__init__.py | lbooker42/deephaven-core | 2d04563f18ae914754b28041475c02770e57af15 | [
"MIT"
] | null | null | null | py/server/deephaven/server/plugin/__init__.py | lbooker42/deephaven-core | 2d04563f18ae914754b28041475c02770e57af15 | [
"MIT"
] | null | null | null | py/server/deephaven/server/plugin/__init__.py | lbooker42/deephaven-core | 2d04563f18ae914754b28041475c02770e57af15 | [
"MIT"
] | null | null | null | #
# Copyright (c) 2016-2022 Deephaven Data Labs and Patent Pending
#
import jpy
_JCallbackAdapter = jpy.get_type('io.deephaven.server.plugin.python.CallbackAdapter')
| 30.736842 | 116 | 0.717466 |
818e56826eb6b882afcd422dcb192ea6b72a334b | 3,133 | py | Python | mypy/test/testoutput.py | TimSimpsonR/mypy | 5e6fd6335e0662b0477e1d678269f33e6f4194ba | [
"PSF-2.0"
] | 1 | 2019-06-27T11:34:27.000Z | 2019-06-27T11:34:27.000Z | mypy/test/testoutput.py | silky/mypy | de6a8d3710df9f49109cb682f2092e4967bfb92c | [
"PSF-2.0"
] | null | null | null | mypy/test/testoutput.py | silky/mypy | de6a8d3710df9f49109cb682f2092e4967bfb92c | [
"PSF-2.0"
] | null | null | null | """Tests for parse tree pretty printing that preserves formatting
Test case descriptions are in file test/data/output.test.
"""
import os.path
import re
from typing import Undefined, Any
from mypy import build
from mypy.myunit import Suite, run_test
from mypy.test.helpers import assert_string_arrays_equal
from mypy.test.data import parse_test_cases
from mypy.test.config import test_data_prefix, test_temp_dir
from mypy.parse import parse
from mypy.output import OutputVisitor
from mypy.errors import CompileError
# Files which contain test case descriptions.
output_files = ['output.test']
def test_output(testcase):
"""Perform an identity source code transformation test case."""
expected = testcase.output
if expected == []:
expected = testcase.input
try:
src = '\n'.join(testcase.input)
# Parse and semantically analyze the source program.
# Test case names with a special suffix get semantically analyzed. This
# lets us test that semantic analysis does not break source code pretty
# printing.
if testcase.name.endswith('_SemanticAnalyzer'):
result = build.build('main',
target=build.SEMANTIC_ANALYSIS,
program_text=src,
flags=[build.TEST_BUILTINS],
alt_lib_path=test_temp_dir)
files = result.files
else:
files = {'main': parse(src, 'main')}
a = []
first = True
# Produce an output containing the pretty-printed forms (with original
# formatting) of all the relevant source files.
for fnam in sorted(files.keys()):
f = files[fnam]
# Omit the builtins and files marked for omission.
if (not f.path.endswith(os.sep + 'builtins.py') and
'-skip.' not in f.path):
# Add file name + colon for files other than the first.
if not first:
a.append('{}:'.format(fix_path(remove_prefix(
f.path, test_temp_dir))))
v = OutputVisitor()
f.accept(v)
s = v.output()
if s != '':
a += s.split('\n')
first = False
except CompileError as e:
a = e.messages
assert_string_arrays_equal(
expected, a, 'Invalid source code output ({}, line {})'.format(
testcase.file, testcase.line))
if __name__ == '__main__':
import sys
run_test(OutputSuite(), sys.argv[1:])
| 31.969388 | 79 | 0.578998 |
81909aed2c8da07e00d68c36e16753bcc2a5f66d | 2,395 | py | Python | tests/test_SklearnDecisionTreeConverters.py | c-bata/sklearn-onnx | ff3e2eb204991b5799fc606c265b2c283dbfc25c | [
"MIT"
] | 1 | 2019-05-06T20:54:02.000Z | 2019-05-06T20:54:02.000Z | tests/test_SklearnDecisionTreeConverters.py | PossieMP/https-github.com-onnx-sklearn-onnx | 48c60398d38e6937897d7c1506a8dcfcf28830a2 | [
"MIT"
] | null | null | null | tests/test_SklearnDecisionTreeConverters.py | PossieMP/https-github.com-onnx-sklearn-onnx | 48c60398d38e6937897d7c1506a8dcfcf28830a2 | [
"MIT"
] | 1 | 2020-04-09T07:46:52.000Z | 2020-04-09T07:46:52.000Z | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import unittest
from sklearn.tree import DecisionTreeClassifier
from sklearn.tree import DecisionTreeRegressor
from skl2onnx.common.data_types import onnx_built_with_ml
from test_utils import (
dump_one_class_classification,
dump_binary_classification,
dump_multiple_classification,
)
from test_utils import dump_multiple_regression, dump_single_regression
if __name__ == "__main__":
unittest.main()
| 38.015873 | 76 | 0.562004 |
8190b06cbd6a99f76c275c7c5d6181dfe355ab0d | 7,005 | py | Python | tests/test_inference.py | MihailSalnikov/microscopeimagequality | 22d1f7c7f6793a0ba6f64c2aea2bf3270c32301b | [
"Apache-2.0"
] | 77 | 2017-10-30T19:34:06.000Z | 2022-01-20T17:15:10.000Z | tests/test_inference.py | MihailSalnikov/microscopeimagequality | 22d1f7c7f6793a0ba6f64c2aea2bf3270c32301b | [
"Apache-2.0"
] | 3 | 2020-07-02T22:20:46.000Z | 2021-08-25T14:39:43.000Z | tests/test_inference.py | MihailSalnikov/microscopeimagequality | 22d1f7c7f6793a0ba6f64c2aea2bf3270c32301b | [
"Apache-2.0"
] | 40 | 2017-12-22T01:21:13.000Z | 2022-03-13T22:28:59.000Z | import logging
import os
import tempfile
import PIL.Image
import numpy
import tensorflow
import microscopeimagequality.constants
import microscopeimagequality.data_provider
import microscopeimagequality.evaluation
import microscopeimagequality.prediction
| 44.056604 | 125 | 0.63469 |
8190c488725fd5780c71f8986d5214f9a0371832 | 498 | py | Python | config.py | hiankun/qb_test | ab031d74d903cdb8845a033d290611b444a5abdb | [
"MIT"
] | 4 | 2015-03-07T19:15:28.000Z | 2021-01-24T15:13:30.000Z | config.py | hiankun/qb_test | ab031d74d903cdb8845a033d290611b444a5abdb | [
"MIT"
] | null | null | null | config.py | hiankun/qb_test | ab031d74d903cdb8845a033d290611b444a5abdb | [
"MIT"
] | 1 | 2017-07-03T06:55:24.000Z | 2017-07-03T06:55:24.000Z | """
QuickBot wiring config.
Specifies which pins are used for motor control, IR sensors and wheel encoders.
"""
# Motor pins: (dir1_pin, dir2_pin, pwd_pin)
RIGHT_MOTOR_PINS = 'P8_12', 'P8_10', 'P9_14'
LEFT_MOTOR_PINS = 'P8_14', 'P8_16', 'P9_16'
# IR sensors (clock-wise, starting with the rear left sensor):
# rear-left, front-left, front, front-right, rear-right
IR_PINS = ('P9_38', 'P9_40', 'P9_36', 'P9_35', 'P9_33')
# Wheel encoder sensors: (left, right)
ENC_PINS = ('P9_39', 'P9_37')
| 23.714286 | 79 | 0.690763 |
8191a9d3234f49c843978a8688358673f859017f | 8,912 | py | Python | tools/tests/skimage_self_test.py | yinquan529/platform-external-skia | 1adfb847fe565e53d2e26e35b04c8dc112b7513a | [
"BSD-3-Clause"
] | 1 | 2016-05-04T10:08:50.000Z | 2016-05-04T10:08:50.000Z | tools/tests/skimage_self_test.py | yinquan529/platform-external-skia | 1adfb847fe565e53d2e26e35b04c8dc112b7513a | [
"BSD-3-Clause"
] | null | null | null | tools/tests/skimage_self_test.py | yinquan529/platform-external-skia | 1adfb847fe565e53d2e26e35b04c8dc112b7513a | [
"BSD-3-Clause"
] | 1 | 2020-01-16T03:34:53.000Z | 2020-01-16T03:34:53.000Z | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Self-test for skimage.
import filecmp
import os
import subprocess
import sys
import tempfile
# Find a path to the binary to use. Iterates through a list of possible
# locations the binary may be.
def PickBinaryPath(base_dir):
POSSIBLE_BINARY_PATHS = [
'out/Debug/skimage',
'out/Release/skimage',
'xcodebuild/Debug/skimage',
'xcodebuild/Release/skimage',
]
for binary in POSSIBLE_BINARY_PATHS:
binary_full_path = os.path.join(base_dir, binary)
if (os.path.exists(binary_full_path)):
return binary_full_path
raise BinaryNotFoundException
# Quit early if two files have different content.
def test_invalid_file(file_dir, skimage_binary):
""" Test the return value of skimage when an invalid file is decoded.
If there is no expectation file, or the file expects a particular
result, skimage should return nonzero indicating failure.
If the file has no expectation, or ignore-failure is set to true,
skimage should return zero indicating success. """
invalid_file = os.path.join(file_dir, "skimage", "input", "bad-images",
"invalid.png")
# No expectations file:
args = [skimage_binary, "--readPath", invalid_file]
result = subprocess.call(args)
if 0 == result:
print "'%s' should have reported failure!" % " ".join(args)
exit(1)
# Directory holding all expectations files
expectations_dir = os.path.join(file_dir, "skimage", "input", "bad-images")
# Expectations file expecting a valid decode:
incorrect_expectations = os.path.join(expectations_dir,
"incorrect-results.json")
args = [skimage_binary, "--readPath", invalid_file,
"--readExpectationsPath", incorrect_expectations]
result = subprocess.call(args)
if 0 == result:
print "'%s' should have reported failure!" % " ".join(args)
exit(1)
# Empty expectations:
empty_expectations = os.path.join(expectations_dir, "empty-results.json")
output = subprocess.check_output([skimage_binary, "--readPath", invalid_file,
"--readExpectationsPath",
empty_expectations],
stderr=subprocess.STDOUT)
if not "Missing" in output:
# Another test (in main()) tests to ensure that "Missing" does not appear
# in the output. That test could be passed if the output changed so
# "Missing" never appears. This ensures that an error is not missed if
# that happens.
print "skimage output changed! This may cause other self tests to fail!"
exit(1)
# Ignore failure:
ignore_expectations = os.path.join(expectations_dir, "ignore-results.json")
output = subprocess.check_output([skimage_binary, "--readPath", invalid_file,
"--readExpectationsPath",
ignore_expectations],
stderr=subprocess.STDOUT)
if not "failures" in output:
# Another test (in main()) tests to ensure that "failures" does not
# appear in the output. That test could be passed if the output changed
# so "failures" never appears. This ensures that an error is not missed
# if that happens.
print "skimage output changed! This may cause other self tests to fail!"
exit(1)
def test_incorrect_expectations(file_dir, skimage_binary):
""" Test that comparing to incorrect expectations fails, unless
ignore-failures is set to true. """
valid_file = os.path.join(file_dir, "skimage", "input",
"images-with-known-hashes",
"1209453360120438698.png")
expectations_dir = os.path.join(file_dir, "skimage", "input",
"images-with-known-hashes")
incorrect_results = os.path.join(expectations_dir,
"incorrect-results.json")
args = [skimage_binary, "--readPath", valid_file, "--readExpectationsPath",
incorrect_results]
result = subprocess.call(args)
if 0 == result:
print "'%s' should have reported failure!" % " ".join(args)
exit(1)
ignore_results = os.path.join(expectations_dir, "ignore-failures.json")
subprocess.check_call([skimage_binary, "--readPath", valid_file,
"--readExpectationsPath", ignore_results])
if __name__ == "__main__":
main()
| 44.78392 | 81 | 0.632518 |
8193d66190779e7816666311c0493c349ff06765 | 121 | py | Python | pymachine/condition.py | landrew31/pymachine | 117ad7aa3694a1ccb0be207cc931de8c2c345567 | [
"MIT"
] | 1 | 2018-02-21T08:57:08.000Z | 2018-02-21T08:57:08.000Z | pymachine/condition.py | landrew31/pymachine | 117ad7aa3694a1ccb0be207cc931de8c2c345567 | [
"MIT"
] | null | null | null | pymachine/condition.py | landrew31/pymachine | 117ad7aa3694a1ccb0be207cc931de8c2c345567 | [
"MIT"
] | null | null | null | from collections import namedtuple
Condition = namedtuple(
'Condition',
['current_state', 'input_character'],
)
| 17.285714 | 41 | 0.719008 |
8195c711df03d29790fdcc4e7f130ef66986f549 | 788 | py | Python | examples/simple_lakehouse/simple_lakehouse/assets.py | bitdotioinc/dagster | 4fe395a37b206b1a48b956fa5dd72bf698104cca | [
"Apache-2.0"
] | 2 | 2021-06-21T17:50:26.000Z | 2021-06-21T19:14:23.000Z | examples/simple_lakehouse/simple_lakehouse/assets.py | bitdotioinc/dagster | 4fe395a37b206b1a48b956fa5dd72bf698104cca | [
"Apache-2.0"
] | 7 | 2022-03-16T06:55:04.000Z | 2022-03-18T07:03:25.000Z | examples/simple_lakehouse/simple_lakehouse/assets.py | bitdotioinc/dagster | 4fe395a37b206b1a48b956fa5dd72bf698104cca | [
"Apache-2.0"
] | 1 | 2021-08-18T17:21:57.000Z | 2021-08-18T17:21:57.000Z | """Asset definitions for the simple_lakehouse example."""
import pandas as pd
from lakehouse import Column, computed_table, source_table
from pyarrow import date32, float64, string
sfo_q2_weather_sample_table = source_table(
path="data", columns=[Column("tmpf", float64()), Column("valid_date", string())],
)
| 41.473684 | 97 | 0.757614 |
8196db5a9a3e9b1ef0fc71ca07363d90aa3c3237 | 4,386 | py | Python | aindex/demo.py | ad3002/Lyrebird | 8c0a186e32d61189f073401152c52a89bfed46ed | [
"MIT"
] | null | null | null | aindex/demo.py | ad3002/Lyrebird | 8c0a186e32d61189f073401152c52a89bfed46ed | [
"MIT"
] | null | null | null | aindex/demo.py | ad3002/Lyrebird | 8c0a186e32d61189f073401152c52a89bfed46ed | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#@created: 07.01.2018
#@author: Aleksey Komissarov
#@contact: ad3002@gmail.com
from aindex import *
settings = {
"index_prefix": "tests/kmers.23",
"aindex_prefix": "tests/kmers.23",
"reads_file": "tests/reads.reads",
}
index = load_aindex(settings)
k = 23
sequence = "TAAGTTATTATTTAGTTAATACTTTTAACAATATTATTAAGGTATTTAAAAAATACTATTATAGTATTTAACATAGTTAAATACCTTCCTTAATACTGTTAAATTATATTCAATCAATACATATATAATATTATTAAAATACTTGATAAGTATTATTTAGATATTAGACAAATACTAATTTTATATTGCTTTAATACTTAATAAATACTACTTATGTATTAAGTAAATATTACTGTAATACTAATAACAATATTATTACAATATGCTAGAATAATATTGCTAGTATCAATAATTACTAATATAGTATTAGGAAAATACCATAATAATATTTCTACATAATACTAAGTTAATACTATGTGTAGAATAATAAATAATCAGATTAAAAAAATTTTATTTATCTGAAACATATTTAATCAATTGAACTGATTATTTTCAGCAGTAATAATTACATATGTACATAGTACATATGTAAAATATCATTAATTTCTGTTATATATAATAGTATCTATTTTAGAGAGTATTAATTATTACTATAATTAAGCATTTATGCTTAATTATAAGCTTTTTATGAACAAAATTATAGACATTTTAGTTCTTATAATAAATAATAGATATTAAAGAAAATAAAAAAATAGAAATAAATATCATAACCCTTGATAACCCAGAAATTAATACTTAATCAAAAATGAAAATATTAATTAATAAAAGTGAATTGAATAAAATTTTGAAAAAAATGAATAACGTTATTATTTCCAATAACAAAATAAAACCACATCATTCATATTTTTTAATAGAGGCAAAAGAAAAAGAAATAAACTTTTATGCTAACAATGAATACTTTTCTGTCAAATGTAATTTAAATAAAAATATTGATATTCTTGAACAAGGCTCCTTAATTGTTAAAGGAAAAATTTTTAACGATCTTATTAATGGCATAAAAGAAGAGATTATTACTATTCAAGAAAAAGATCAAACACTTTTGGTTAAAACAAAAAAAACAAGTATTAATTTAAACACAATTAATGTGAATGAATTTCCAAGAATAAGGTTTAATGAAAAAAACGATTTAAGTGAATTTAATCAATTCAAAATAAATTATTCACTTTTAGTAAAAGGCATTAAAAAAATTTTTCACTCAGTTTCAAATAATCGTGAAATATCTTCTAAATTTAATGGAGTAAATTTCAATGGATCCAATGGAAAAGAAATATTTTTAGAAGCTTCTGACACTTATAAACTATCTGTTTTTGAGATAAAGCAAGAAACAGAACCATTTGATTTCATTTTGGAGAGTAATTTACTTAGTTTCATTAATTCTTTTAATCCTGAAGAAGATAAATCTATTGTTTTTTATTACAGAAAAGATAATAAAGATAGCTTTAGTACAGAAATGTTGATTTCAATGGATAACTTTATGATTAGTTACACATCGGTTAATGAAAAATTTCCAGAGGTAAACTACTTTTTTGAATTTGAACCTGAAACTAAAATAGTTGTTCAAAAAAATGAATTAAAAGATGCACTTCAAAGAATTCAAACTTTGGCTCAAAATGAAAGAACTTTTTTATGCGATATGCAAATTAACAGTTCTGAATTAAAAATAAGAGCTATTGTTAATAATATCGGAAATTCTCTTGAGGAAATTTCTTGTCTTAAATTTGAAGGTTATAAACTTAATATTTCTTTTAACCCAAGTTCTCTATTAGATCACATAGAGTCTTTTGAATCAAATGAAATAAATTTTGATTTCCAAGGAAATAGTAAGTATTTTTTGATAACCTCTAAAAGTGAACCTGAACTTAAGCAAATATTGGTTCCTTCAAGATAATGAATCTTTACGATCTTTTAGAACTACCAACTACAGCATCAATAAAAGAAATAAAAATTGCTTATAAAAGATTAGCAAAGCGTTATCACCCTGATGTAAATAAATTAGGTTCGCAAACTTTTGTTGAAATTAATAATGCTTATTCAATATTAAGTGATCCTAACCAAAAGGAAAAATATGATTCAATGCTGAAAGTTAATGATTTTCAAAATCGCATCAAAAATTTAGATATTAGTGTTAGATGACATGAAAATTTCATGGAAGAACTCGAACTTCGTAAGAACTGAGAATTTGATTTTTTTTCATCTGATGAAGATTTCTTTTATTCTCCATTTACAAAAA"
test_kmer = "TAAGTTATTATTTAGTTAATACT"
right_kmer = "AGTTAATACTTTTAACAATATTA"
print("Task 1. Get kmer frequency")
# raw_input("\nReady?")
for i in range(len(sequence)-k+1):
kmer = sequence[i:i+k]
print("Position %s kmer %s freq = %s" % (i, kmer, index[kmer]))
print("Task 2. Iter read by read, print the first 20 reads")
# raw_input("\nReady?")
for i, read in enumerate(index.iter_reads()):
if i == 20:
break
print(i, read)
print("Task 3. Iter reads by kmer, returs (start, next_read_start, read, pos_if_uniq|None, all_poses)")
# raw_input("\nReady?")
for read in iter_reads_by_kmer(test_kmer, index):
print(read)
print("Task 4. Get distances in reads for two kmers, returns a list of (rid, left_kmer_pos, right_kmer_pos) tuples.")
# raw_input("\nReady?")
print(get_left_right_distances(test_kmer, right_kmer, index))
print("Task 5. Get layout for kmer, returns (max_pos, reads, lefts, rights, rids, starts), for details see source code")
# raw_input("\nReady?")
max_pos, reads, lefts, rights, rids, starts = get_layout_for_kmer(right_kmer, index)
print("Central layout:")
for read in reads:
print(read)
print("Left flanks:")
print(lefts)
print("Right flanks:")
print(rights)
print("Task 6. Iter reads by sequence, returs (start, next_read_start, read, pos_if_uniq|None, all_poses)")
# raw_input("\nReady?")
sequence = "AATATTATTAAGGTATTTAAAAAATACTATTATAGTATTTAACATA"
for read in iter_reads_by_sequence(sequence, index):
print(read)
print("Task 7. Iter reads by kmer with reads as SE, returns (start, next_read_start, subread, kmere_pos, -1|0|1 for spring_pos, was_reversed, poses_in_read)")
# raw_input("\nReady?")
user_reads = set()
sequence = "AATATTATTAAGGTATTTAAAAAATACTATTATAGTATTTAACATA"
for rid, nextrid, read, pos, spring_pos, was_reversed, poses in get_reads_se_by_kmer(kmer, index, user_reads, k=23):
print(rid, read, pos)
| 58.48 | 2,183 | 0.858413 |
81972bcd61b6a76b4f3ac7d2990ee8f85d6af35f | 1,013 | py | Python | tests/testresources/pytest_resource_path_ini.py | yukihiko-shinoda/pytest-resource-path | bc56c4b5f2c8f3138baeac7f145717f6a70af7b6 | [
"MIT"
] | 5 | 2020-09-06T01:54:28.000Z | 2021-06-14T11:10:09.000Z | tests/testresources/pytest_resource_path_ini.py | yukihiko-shinoda/pytest-resource-path | bc56c4b5f2c8f3138baeac7f145717f6a70af7b6 | [
"MIT"
] | 5 | 2020-05-23T08:30:01.000Z | 2021-05-01T04:58:06.000Z | tests/testresources/pytest_resource_path_ini.py | yukihiko-shinoda/pytest-resource-path | bc56c4b5f2c8f3138baeac7f145717f6a70af7b6 | [
"MIT"
] | null | null | null | """Implements test for pytest-resource-path Fixtures with pytest.ini."""
from pathlib import Path
import pytest
def test_resource_path_ini(resource_path, request):
"""Fixture resource_path should be following absolute path."""
assert resource_path == Path(str(request.fspath)).parents[1] / Path(
"data/test_package/test_module_something/test_resource_path_ini"
)
def test_resource_path_root_ini(resource_path_root, request):
"""Fixture resource_path_root should be following absolute path."""
assert resource_path_root == Path(str(request.fspath)).parents[1] / Path("data")
# Reason: To define fixture in same module. pylint: disable=redefined-outer-name
def test_resource_path_root_scope_package_ini(resource_path_root_scope_package_ini, request):
assert resource_path_root_scope_package_ini == Path(str(request.fspath)).parents[1] / Path("data")
| 37.518519 | 102 | 0.785785 |
8197395414f35f5a57891af7ddfab20969d9cd9f | 301 | py | Python | 17-files/read-file-with-try-block.py | johnehunt/Python3Intro | 2a41ce488aac11bb3928ea81e57be1c2c8acdac2 | [
"Apache-2.0"
] | 1 | 2020-11-03T19:46:25.000Z | 2020-11-03T19:46:25.000Z | 14-files/read-file-with-try-block.py | johnehunt/PythonIntroDS | 7e9d5c5494191cd68bc71e140df5fb30290a8da6 | [
"Apache-2.0"
] | null | null | null | 14-files/read-file-with-try-block.py | johnehunt/PythonIntroDS | 7e9d5c5494191cd68bc71e140df5fb30290a8da6 | [
"Apache-2.0"
] | 1 | 2019-09-21T08:24:46.000Z | 2019-09-21T08:24:46.000Z | # Illustrates combining exception / error handling
# with file access
print('Start')
try:
with open('myfile2.txt', 'r') as f:
lines = f.readlines()
for line in lines:
print(line, end='')
except FileNotFoundError as err:
print('oops')
print(err)
print('Done')
| 20.066667 | 50 | 0.61794 |
81977d254cadb7ee5093cb2ff32e221394f8fe36 | 8,455 | py | Python | Miscellaneous/test_script_pymc3/multinominal.py | junpenglao/Planet_Sakaar_Data_Science | 73d9605b91b774a56d18c193538691521f679f16 | [
"MIT"
] | 51 | 2018-04-08T19:53:15.000Z | 2021-11-24T21:08:25.000Z | Miscellaneous/test_script_pymc3/multinominal.py | junpenglao/Planet_Sakaar_Data_Science | 73d9605b91b774a56d18c193538691521f679f16 | [
"MIT"
] | 2 | 2018-05-29T20:50:37.000Z | 2020-09-12T07:14:08.000Z | Miscellaneous/test_script_pymc3/multinominal.py | junpenglao/Planet_Sakaar_Data_Science | 73d9605b91b774a56d18c193538691521f679f16 | [
"MIT"
] | 13 | 2018-07-21T09:53:10.000Z | 2021-06-07T19:06:26.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 11 13:30:53 2017
@author: laoj
"""
import numpy as np
import pymc3 as pm
import theano.tensor as tt
from pymc3.distributions.distribution import Discrete, draw_values, generate_samples, infer_shape
from pymc3.distributions.dist_math import bound, logpow, factln, Cholesky
from pymc3.math import tround
#%% n scaler, p 1D
#n = 183
n = np.array([[106],
[143],
[102],
[116],
[183],
[150]])
p = np.array([[ 0.21245365, 0.41223126, 0.37531509],
[ 0.13221011, 0.50537169, 0.3624182 ],
[ 0.08813779, 0.54447146, 0.36739075],
[ 0.18932804, 0.4630365, 0.34763546],
[ 0.11006472, 0.49227755, 0.39765773],
[ 0.17886852, 0.41098834, 0.41014314]])
# p = np.array([ 0.21245365, 0.41223126, 0.37531509])
n = tt.as_tensor_variable(n)
p = tt.as_tensor_variable(p)
n = np.squeeze(n)
n = tt.shape_padright(n) if n.ndim == 1 else tt.as_tensor_variable(n)
n.ndim
n * p
#%%
n = np.array([[106],
[143],
[102],
[116],
[183],
[150]])
#n = 183
p = np.array([[ 0.21245365, 0.41223126, 0.37531509],
[ 0.13221011, 0.50537169, 0.3624182 ],
[ 0.08813779, 0.54447146, 0.36739075],
[ 0.18932804, 0.4630365, 0.34763546],
[ 0.11006472, 0.49227755, 0.39765773],
[ 0.17886852, 0.41098834, 0.41014314]])
#p = np.array([[ 0.21245365, 0.41223126, 0.37531509]])
#n = tt.as_tensor_variable(n)
p = tt.as_tensor_variable(p)
#%%
Multinomial.dist(1,np.ones(3)/3,shape=(6, 3)).mode.eval()
#%%
Multinomial.dist(n,p,shape=(6, 3)).p.eval()
#%%
Multinomial.dist(n,p,shape=(6, 3)).n.eval()
#%%
Multinomial.dist(n,p,shape=(6, 3)).mean.eval()
#%%
Multinomial.dist(n,p,shape=(6, 3)).random()
#%%
counts =np.asarray([[19, 50, 37],
[21, 67, 55],
[11, 53, 38],
[17, 54, 45],
[24, 93, 66],
[27, 53, 70]])
Multinomial.dist(n,p,shape=(6, 3)).logp(x=counts).eval()
#%%
with pm.Model() as model:
like = Multinomial('obs_ABC', n, p, observed=counts, shape=counts.shape)
#%%
paramall = (
[[.25, .25, .25, .25], 4, 2],
[[.25, .25, .25, .25], (1, 4), 3],
# 3: expect to fail
# [[.25, .25, .25, .25], (10, 4)],
[[.25, .25, .25, .25], (10, 1, 4), 5],
# 5: expect to fail
# [[[.25, .25, .25, .25]], (2, 4), [7, 11]],
[[[.25, .25, .25, .25],
[.25, .25, .25, .25]], (2, 4), 13],
[[[.25, .25, .25, .25],
[.25, .25, .25, .25]], (2, 4), [17, 19]],
[[[.25, .25, .25, .25],
[.25, .25, .25, .25]], (1, 2, 4), [23, 29]],
[[[.25, .25, .25, .25],
[.25, .25, .25, .25]], (10, 2, 4), [31, 37]],
)
for p, shape, n in paramall:
with pm.Model() as model:
m = Multinomial('m', n=n, p=np.asarray(p), shape=shape)
print(m.random().shape)
#%%
counts =np.asarray([[19, 50, 37],
[21, 67, 55],
[11, 53, 38],
[17, 54, 45],
[24, 93, 66],
[27, 53, 70]])
n = np.array([[106],
[143],
[102],
[116],
[183],
[150]])
sparsity=1 #not zero
beta=np.ones(counts.shape) #input for dirichlet
with pm.Model() as model:
theta=pm.Dirichlet('theta',beta/sparsity, shape = counts.shape)
transition=pm.Multinomial('transition',n,theta,observed=counts)
trace=pm.sample(1000)
#%%
import numpy as np
import pymc3 as pm
import theano.tensor as tt
def norm_simplex(p):
"""Sum-to-zero transformation."""
return (p.T / p.sum(axis=-1)).T
def ccmodel(beta, x):
"""Community composition model."""
return norm_simplex(tt.exp(tt.dot(x, tt.log(beta))))
n, k, r = 25, 10, 2
x = np.random.randint(0, 1000, size=(n, k))
y = np.random.randint(0, 1000, size=n)
design = np.vstack((np.ones(25), np.random.randint(2, size=n))).T
with pm.Model() as model:
# Community composition
pi = pm.Dirichlet('pi', np.ones(k), shape=(r, k))
comp = pm.Deterministic('comp', ccmodel(pi, design))
# Inferred population density of observed taxa (hierarchical model)
rho = pm.Normal('rho', shape=r)
tau = pm.Lognormal('tau')
dens = pm.Lognormal('dens', tt.dot(design, rho), tau=tau, shape=n)
# Community composition *with* the spike
expected_recovery = as_col(1 / dens)
_comp = norm_simplex(tt.concatenate((comp, expected_recovery), axis=1))
# Variability
mu = pm.Lognormal('mu')
# Data
obs = DirichletMultinomial('obs', _comp * mu,
observed=tt.concatenate((x, as_col(y)), axis=1))
pm.sample(1000) | 30.970696 | 135 | 0.543465 |
819a475b581f4721e5c8b8ee781500a5749d808c | 8,054 | py | Python | transformation_fnc.py | usrmaia/transformation-fnc | 37ef77708892417ac985bb6f1cf62285834560d8 | [
"MIT"
] | null | null | null | transformation_fnc.py | usrmaia/transformation-fnc | 37ef77708892417ac985bb6f1cf62285834560d8 | [
"MIT"
] | null | null | null | transformation_fnc.py | usrmaia/transformation-fnc | 37ef77708892417ac985bb6f1cf62285834560d8 | [
"MIT"
] | null | null | null | from useful import *
from os import system
if __name__ == "__main__":
system("cls")
#system("clear")
while(True):
formula = input("Frmula: ")
if formula == 'q': break
print(formula)
print("Removendo implicaes: ")
A1 = remove_implication(formula)
print(A1)
print("Aplicando Lei de Morgan: ")
A2 = morgan_law(A1)
print(A2)
print("Removendo dupla negao: ")
A3 = remove_double_negation(A2)
print(A3)
print("Aplicando distributividade: ")
A4 = distributivity(A3)
print(A4)
print("Aplicando distributividade com novo tomo: ")
A5 = distributivity_new_aton(A3)
print(A5)
system("pause") | 47.099415 | 220 | 0.661286 |
819a9225919d23f1c377fd7749da7d7ea0d1e851 | 2,896 | py | Python | src/implant/commands/__init__.py | diefans/debellator | 44203174ef8e0702be577a9e08dedde40e3ce1fe | [
"Apache-2.0"
] | 2 | 2018-09-08T00:04:43.000Z | 2021-07-20T01:31:09.000Z | src/implant/commands/__init__.py | diefans/implant | 44203174ef8e0702be577a9e08dedde40e3ce1fe | [
"Apache-2.0"
] | null | null | null | src/implant/commands/__init__.py | diefans/implant | 44203174ef8e0702be577a9e08dedde40e3ce1fe | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Oliver Berger
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Core features
"""
import asyncio
import concurrent
import logging
import os
import time
from implant import core
log = logging.getLogger(__name__)
| 28.96 | 85 | 0.648481 |
819b2d29e2f76cc772587cf27c82ea7f7e151615 | 73 | py | Python | trainer/utils/__init__.py | chriszhou0916/czai4art | 2fecc9b808fc16cb74c4c29be8a8770ffe247b15 | [
"MIT"
] | null | null | null | trainer/utils/__init__.py | chriszhou0916/czai4art | 2fecc9b808fc16cb74c4c29be8a8770ffe247b15 | [
"MIT"
] | null | null | null | trainer/utils/__init__.py | chriszhou0916/czai4art | 2fecc9b808fc16cb74c4c29be8a8770ffe247b15 | [
"MIT"
] | null | null | null | from trainer.utils.losses import *
from trainer.utils import custom_ssim
| 24.333333 | 37 | 0.835616 |
819bd18a4722e9a3211561882e51cf2324399bde | 1,693 | py | Python | src/Testing/ZopeTestCase/__init__.py | tseaver/Zope-RFA | 08634f39b0f8b56403a2a9daaa6ee4479ef0c625 | [
"ZPL-2.1"
] | 2 | 2015-12-21T10:34:56.000Z | 2017-09-24T11:07:58.000Z | src/Testing/ZopeTestCase/__init__.py | MatthewWilkes/Zope | 740f934fc9409ae0062e8f0cd6dcfd8b2df00376 | [
"ZPL-2.1"
] | null | null | null | src/Testing/ZopeTestCase/__init__.py | MatthewWilkes/Zope | 740f934fc9409ae0062e8f0cd6dcfd8b2df00376 | [
"ZPL-2.1"
] | null | null | null | ##############################################################################
#
# Copyright (c) 2005 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Names exported by the ZopeTestCase package
"""
import ZopeLite as Zope2
import utils
import layer
from ZopeLite import hasProduct
from ZopeLite import installProduct
from ZopeLite import hasPackage
from ZopeLite import installPackage
from ZopeLite import _print
from ZopeTestCase import folder_name
from ZopeTestCase import user_name
from ZopeTestCase import user_password
from ZopeTestCase import user_role
from ZopeTestCase import standard_permissions
from ZopeTestCase import ZopeTestCase
from ZopeTestCase import FunctionalTestCase
from PortalTestCase import portal_name
from PortalTestCase import PortalTestCase
from sandbox import Sandboxed
from functional import Functional
from base import TestCase
from base import app
from base import close
from warnhook import WarningsHook
from unittest import main
from zopedoctest import ZopeDocTestSuite
from zopedoctest import ZopeDocFileSuite
from zopedoctest import FunctionalDocTestSuite
from zopedoctest import FunctionalDocFileSuite
import zopedoctest as doctest
import transaction
import placeless
Zope = Zope2
| 29.189655 | 78 | 0.759598 |
819c97d3aababb49d55ab8197540259628b81475 | 3,132 | py | Python | CIM14/CPSM/Equipment/LoadModel/SubLoadArea.py | MaximeBaudette/PyCIM | d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14 | [
"MIT"
] | 58 | 2015-04-22T10:41:03.000Z | 2022-03-29T16:04:34.000Z | CIM14/CPSM/Equipment/LoadModel/SubLoadArea.py | MaximeBaudette/PyCIM | d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14 | [
"MIT"
] | 12 | 2015-08-26T03:57:23.000Z | 2020-12-11T20:14:42.000Z | CIM14/CPSM/Equipment/LoadModel/SubLoadArea.py | MaximeBaudette/PyCIM | d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14 | [
"MIT"
] | 35 | 2015-01-10T12:21:03.000Z | 2020-09-09T08:18:16.000Z | # Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.CPSM.Equipment.LoadModel.EnergyArea import EnergyArea
| 36 | 129 | 0.685185 |
819e28662a5c7fe45f90d593ee3b3db086815aa5 | 369 | py | Python | bin/clean_pdb.py | caixiuhong/Stable-MCCE | 186bdafdf1d631994b2cdd6ec6a548383f559929 | [
"MIT"
] | null | null | null | bin/clean_pdb.py | caixiuhong/Stable-MCCE | 186bdafdf1d631994b2cdd6ec6a548383f559929 | [
"MIT"
] | null | null | null | bin/clean_pdb.py | caixiuhong/Stable-MCCE | 186bdafdf1d631994b2cdd6ec6a548383f559929 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import sys
HATOMS = ["HG", "HD", "HE", "HH"]
lines = open(sys.argv[1]).readlines()
for line in lines:
if line[:6] == "ATOM " or line[:6] == "HETATM":
if line[17:20] == "WAT":
continue
if line[13] == "H":
continue
if line[12:14] in HATOMS:
continue
print(line.strip("\n"))
| 23.0625 | 52 | 0.490515 |
819e7c6d0a67bfff73dacc1d420e6cd83f55ee9f | 7,693 | py | Python | old_logen/pylogen/OutputBook.py | leuschel/logen | 0ea806f54628162615e25177c3ed98f6b2c27935 | [
"Apache-2.0"
] | 14 | 2015-10-16T11:35:30.000Z | 2021-05-12T15:31:16.000Z | old_logen/pylogen/OutputBook.py | leuschel/logen | 0ea806f54628162615e25177c3ed98f6b2c27935 | [
"Apache-2.0"
] | null | null | null | old_logen/pylogen/OutputBook.py | leuschel/logen | 0ea806f54628162615e25177c3ed98f6b2c27935 | [
"Apache-2.0"
] | 5 | 2015-10-16T12:44:41.000Z | 2019-10-02T02:45:38.000Z | import Pmw
import os
import re
from FastIndex import FastIndex, timer
from PrologFrame import PrologFrame
from TerminalFrame import TerminalFrame
regexp = re.compile('[a-zA-z0-9_]+__[0-9]+')
| 34.191111 | 100 | 0.573508 |
819ee9aceebbd56d2a4d4ed207f6ae47bb68ff70 | 5,468 | py | Python | back/db.py | belshoff/Agenda | a9d3d1a80d6b3c00e4d0055847d5ed2bb6c6d7d1 | [
"Apache-2.0"
] | null | null | null | back/db.py | belshoff/Agenda | a9d3d1a80d6b3c00e4d0055847d5ed2bb6c6d7d1 | [
"Apache-2.0"
] | null | null | null | back/db.py | belshoff/Agenda | a9d3d1a80d6b3c00e4d0055847d5ed2bb6c6d7d1 | [
"Apache-2.0"
] | null | null | null | import sqlite3
with sqlite3.connect('storage.db') as conn:
cursor = conn.cursor()
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS Produtos (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
price REAL,
compra_id INTEGER,
FOREIGN KEY (compra_id) REFERENCES Compras(id)
);
"""
)
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS Compras (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
date TEXT NOT NULL
);
"""
) | 35.277419 | 126 | 0.492502 |
819ef83975adf31b6a1082bbb314f1833657807e | 246 | bzl | Python | tools/build_rules/gtk_dependent.bzl | Ewpratten/frc_971_mirror | 3a8a0c4359f284d29547962c2b4c43d290d8065c | [
"BSD-2-Clause"
] | 39 | 2021-06-18T03:22:30.000Z | 2022-03-21T15:23:43.000Z | tools/build_rules/gtk_dependent.bzl | Ewpratten/frc_971_mirror | 3a8a0c4359f284d29547962c2b4c43d290d8065c | [
"BSD-2-Clause"
] | 10 | 2021-06-18T03:22:19.000Z | 2022-03-18T22:14:15.000Z | tools/build_rules/gtk_dependent.bzl | Ewpratten/frc_971_mirror | 3a8a0c4359f284d29547962c2b4c43d290d8065c | [
"BSD-2-Clause"
] | 4 | 2021-08-19T19:20:04.000Z | 2022-03-08T07:33:18.000Z | disable_gtk_binaries = True
| 24.6 | 39 | 0.739837 |
819fb9161fe72a4b226194ee2bfb3c7088844885 | 743 | py | Python | vectorize.py | tomohiroando/recipe_gan | 00f7418dfc5ee732e6d2de33d9c397e24f304864 | [
"MIT"
] | null | null | null | vectorize.py | tomohiroando/recipe_gan | 00f7418dfc5ee732e6d2de33d9c397e24f304864 | [
"MIT"
] | null | null | null | vectorize.py | tomohiroando/recipe_gan | 00f7418dfc5ee732e6d2de33d9c397e24f304864 | [
"MIT"
] | null | null | null | import sys
from gensim import models
from gensim.models.doc2vec import LabeledSentence
import pickle
with open('corpus_text', 'rb') as f:
corpus = pickle.load(f)
sentences = corpus_to_sentences(corpus)
model = models.Doc2Vec(vector_size=400, window=15, alpha=.025, min_alpha=.025, min_count=1, sample=1e-6)
model.build_vocab(sentences)
print(len(corpus))
model.train(sentences, total_examples=len(corpus), epochs=20)
model.save('doc2vec.model') | 28.576923 | 104 | 0.722746 |
81a0eab42248c18facd3ce51c6e6f97ebfbcd166 | 6,206 | py | Python | lxman/registry.py | stuxcrystal/lxman | ea0b44a8b9424b3489e393591f5384a986f583a3 | [
"MIT"
] | 1 | 2017-12-04T18:48:21.000Z | 2017-12-04T18:48:21.000Z | lxman/registry.py | stuxcrystal/lxman | ea0b44a8b9424b3489e393591f5384a986f583a3 | [
"MIT"
] | null | null | null | lxman/registry.py | stuxcrystal/lxman | ea0b44a8b9424b3489e393591f5384a986f583a3 | [
"MIT"
] | null | null | null | # -*- encoding: utf-8 -*-
from collections import UserDict
from itertools import count
import shutil
import winreg
import uuid
PATH = "Software\\Microsoft\\Windows\\CurrentVersion\\Lxss"
KEY = winreg.HKEY_CURRENT_USER
def __exit__(self, *exc):
self._state = 1
return False
class _Lxss(object):
default_distribution = RegistryDescriptor("DefaultDistribution")
def __iter__(self):
for i in count():
with self._key() as k:
try:
name = winreg.EnumKey(k, i)
except OSError as e:
if e.winerror != 259:
raise
break
yield Distribution(name)
Lxss = _Lxss()
| 30.875622 | 108 | 0.563165 |
81a34a9a29a2bcf516df9f355478686bebdaa96b | 25,235 | py | Python | virtual/lib/python3.6/site-packages/debian/changelog.py | marknesh/pitches | 0a480d9bc2beafaefa0121393b1502cc05edab89 | [
"MIT"
] | null | null | null | virtual/lib/python3.6/site-packages/debian/changelog.py | marknesh/pitches | 0a480d9bc2beafaefa0121393b1502cc05edab89 | [
"MIT"
] | 10 | 2020-03-08T21:13:29.000Z | 2021-04-08T19:41:14.000Z | flask/lib/python3.6/site-packages/debian/changelog.py | JOFLIX/grapevines | 34576e01184570d79cc140b42ffb71d322132da6 | [
"MIT",
"Unlicense"
] | null | null | null | # changelog.py -- Python module for Debian changelogs
# Copyright (C) 2006-7 James Westby <jw+debian@jameswestby.net>
# Copyright (C) 2008 Canonical Ltd.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# The parsing code is based on that from dpkg which is:
# Copyright 1996 Ian Jackson
# Copyright 2005 Frank Lichtenheld <frank@lichtenheld.de>
# and licensed under the same license as above.
"""This module implements facilities to deal with Debian changelogs."""
from __future__ import absolute_import
import os
import pwd
import re
import socket
import warnings
import sys
import six
from debian import debian_support
# Python 3 doesn't have StandardError, but let's avoid changing our
# exception inheritance hierarchy for Python 2.
try:
_base_exception_class = StandardError
except NameError:
_base_exception_class = Exception
# TODO(jsw): Remove this in favor of using debian_support.Version directly. I
# don't think we gain anything by using this empty subclass.
if sys.version >= '3':
__str__ = _format
else:
__unicode__ = _format
topline = re.compile(r'^(\w%(name_chars)s*) \(([^\(\) \t]+)\)'
r'((\s+%(name_chars)s+)+)\;'
% {'name_chars': '[-+0-9a-z.]'},
re.IGNORECASE)
blankline = re.compile(r'^\s*$')
change = re.compile(r'^\s\s+.*$')
endline = re.compile(r'^ -- (.*) <(.*)>( ?)((\w+\,\s*)?\d{1,2}\s+\w+\s+'
r'\d{4}\s+\d{1,2}:\d\d:\d\d\s+[-+]\d{4}\s*)$')
endline_nodetails = re.compile(r'^ --(?: (.*) <(.*)>( ?)((\w+\,\s*)?\d{1,2}'
r'\s+\w+\s+\d{4}\s+\d{1,2}:\d\d:\d\d\s+[-+]\d{4}'
r'))?\s*$')
keyvalue= re.compile(r'^([-0-9a-z]+)=\s*(.*\S)$', re.IGNORECASE)
value_re = re.compile(r'^([-0-9a-z]+)((\s+.*)?)$', re.IGNORECASE)
xbcs_re = re.compile('^X[BCS]+-', re.IGNORECASE)
emacs_variables = re.compile(r'^(;;\s*)?Local variables:', re.IGNORECASE)
vim_variables = re.compile('^vim:', re.IGNORECASE)
cvs_keyword = re.compile(r'^\$\w+:.*\$')
comments = re.compile(r'^\# ')
more_comments = re.compile(r'^/\*.*\*/')
closes = re.compile(r'closes:\s*(?:bug)?\#?\s?\d+(?:,\s*(?:bug)?\#?\s?\d+)*',
re.IGNORECASE)
closeslp = re.compile(r'lp:\s+\#\d+(?:,\s*\#\d+)*', re.IGNORECASE)
old_format_re1 = re.compile(r'^(\w+\s+\w+\s+\d{1,2} \d{1,2}:\d{1,2}:\d{1,2}'
r'\s+[\w\s]*\d{4})\s+(.*)\s+(<|\()(.*)(\)|>)')
old_format_re2 = re.compile(r'^(\w+\s+\w+\s+\d{1,2},?\s*\d{4})\s+(.*)'
r'\s+(<|\()(.*)(\)|>)')
old_format_re3 = re.compile(r'^(\w[-+0-9a-z.]*) \(([^\(\) \t]+)\)\;?',
re.IGNORECASE)
old_format_re4 = re.compile(r'^([\w.+-]+)(-| )(\S+) Debian (\S+)',
re.IGNORECASE)
old_format_re5 = re.compile('^Changes from version (.*) to (.*):',
re.IGNORECASE)
old_format_re6 = re.compile(r'^Changes for [\w.+-]+-[\w.+-]+:?\s*$',
re.IGNORECASE)
old_format_re7 = re.compile(r'^Old Changelog:\s*$', re.IGNORECASE)
old_format_re8 = re.compile(r'^(?:\d+:)?\w[\w.+~-]*:?\s*$')
def get_maintainer():
"""Get the maintainer information in the same manner as dch.
This function gets the information about the current user for
the maintainer field using environment variables of gecos
informations as approriate.
It uses the same methods as dch to get the information, namely
DEBEMAIL, DEBFULLNAME, EMAIL, NAME, /etc/mailname and gecos.
:returns: a tuple of the full name, email pair as strings.
Either of the pair may be None if that value couldn't
be determined.
"""
env = os.environ
regex = re.compile(r"^(.*)\s+<(.*)>$")
# Split email and name
if 'DEBEMAIL' in env:
match_obj = regex.match(env['DEBEMAIL'])
if match_obj:
if not 'DEBFULLNAME' in env:
env['DEBFULLNAME'] = match_obj.group(1)
env['DEBEMAIL'] = match_obj.group(2)
if 'DEBEMAIL' not in env or 'DEBFULLNAME' not in env:
if 'EMAIL' in env:
match_obj = regex.match(env['EMAIL'])
if match_obj:
if not 'DEBFULLNAME' in env:
env['DEBFULLNAME'] = match_obj.group(1)
env['EMAIL'] = match_obj.group(2)
# Get maintainer's name
if 'DEBFULLNAME' in env:
maintainer = env['DEBFULLNAME']
elif 'NAME' in env:
maintainer = env['NAME']
else:
# Use password database if no data in environment variables
try:
maintainer = re.sub(r',.*', '', pwd.getpwuid(os.getuid()).pw_gecos)
except (KeyError, AttributeError):
maintainer = None
# Get maintainer's mail address
if 'DEBEMAIL' in env:
email = env['DEBEMAIL']
elif 'EMAIL' in env:
email = env['EMAIL']
else:
addr = None
if os.path.exists('/etc/mailname'):
f = open('/etc/mailname')
try:
addr = f.readline().strip()
finally:
f.close()
if not addr:
addr = socket.getfqdn()
if addr:
user = pwd.getpwuid(os.getuid()).pw_name
if not user:
addr = None
else:
addr = "%s@%s" % (user, addr)
if addr:
email = addr
else:
email = None
return (maintainer, email)
| 39.553292 | 80 | 0.559263 |
81a35f7c896207540f74045284e195d4e4fb7b21 | 667 | py | Python | Median.py | fatih-iver/Intro-to-Computer-Science-with-Python | 7b8127681415dfd100a0e70fe8a672cec696bbb7 | [
"MIT"
] | null | null | null | Median.py | fatih-iver/Intro-to-Computer-Science-with-Python | 7b8127681415dfd100a0e70fe8a672cec696bbb7 | [
"MIT"
] | null | null | null | Median.py | fatih-iver/Intro-to-Computer-Science-with-Python | 7b8127681415dfd100a0e70fe8a672cec696bbb7 | [
"MIT"
] | null | null | null | # Define a procedure, median, that takes three
# numbers as its inputs, and returns the median
# of the three numbers.
# Make sure your procedure has a return statement.
print(median(1,2,3))
#>>> 2
print(median(9,3,6))
#>>> 6
print(median(7,8,7))
#>>> 7 | 20.212121 | 51 | 0.493253 |
81a49f3c33e13f260a839b1cda7b2ffc5e26f768 | 5,089 | py | Python | miss_islington/util.py | webknjaz/miss-islington | 51c38793b9ff95ecf4a3e98755a291120e7240cc | [
"Apache-2.0"
] | null | null | null | miss_islington/util.py | webknjaz/miss-islington | 51c38793b9ff95ecf4a3e98755a291120e7240cc | [
"Apache-2.0"
] | 1 | 2021-02-27T14:15:59.000Z | 2021-02-27T14:15:59.000Z | miss_islington/util.py | webknjaz/miss-islington | 51c38793b9ff95ecf4a3e98755a291120e7240cc | [
"Apache-2.0"
] | null | null | null | import requests
import os
import subprocess
import gidgethub
from gidgethub import sansio
AUTOMERGE_LABEL = ":robot: automerge"
def comment_on_pr(issue_number, message):
"""
Leave a comment on a PR/Issue
"""
request_headers = sansio.create_headers(
"miss-islington", oauth_token=os.getenv("GH_AUTH")
)
issue_comment_url = (
f"https://api.github.com/repos/python/cpython/issues/{issue_number}/comments"
)
data = {"body": message}
response = requests.post(issue_comment_url, headers=request_headers, json=data)
if response.status_code == requests.codes.created:
print(f"Commented at {response.json()['html_url']}, message: {message}")
else:
print(response.status_code)
print(response.text)
return response
def assign_pr_to_core_dev(issue_number, coredev_login):
"""
Assign the PR to a core dev. Should be done when miss-islington failed
to backport.
"""
request_headers = sansio.create_headers(
"miss-islington", oauth_token=os.getenv("GH_AUTH")
)
edit_issue_url = (
f"https://api.github.com/repos/python/cpython/issues/{issue_number}"
)
data = {"assignees": [coredev_login]}
response = requests.patch(edit_issue_url, headers=request_headers, json=data)
if response.status_code == requests.codes.created:
print(f"Assigned PR {issue_number} to {coredev_login}")
else:
print(response.status_code)
print(response.text)
return response
def normalize_title(title, body):
"""Normalize the title if it spills over into the PR's body."""
if not (title.endswith("") and body.startswith("")):
return title
else:
# Being paranoid in case \r\n is used.
return title[:-1] + body[1:].partition("\r\n")[0]
def normalize_message(body):
"""Normalize the message body to make it commit-worthy.
Mostly this just means removing HTML comments, but also removes unwanted
leading or trailing whitespace.
Returns the normalized body.
"""
while "<!--" in body:
body = body[: body.index("<!--")] + body[body.index("-->") + 3 :]
return "\n\n" + body.strip()
# Copied over from https://github.com/python/bedevere
| 29.760234 | 86 | 0.660051 |
81a62c5963fc9b6615ab457b4e524e86f8ffa6af | 1,918 | py | Python | src/plugins/database.py | Blitz-Raynor/Kiba | a73b5b6212a5446d218a80f1a6aba108e0a1912b | [
"MIT"
] | 4 | 2022-01-24T05:33:34.000Z | 2022-03-25T06:29:19.000Z | src/plugins/database.py | Blitz-Raynor/Kiba | a73b5b6212a5446d218a80f1a6aba108e0a1912b | [
"MIT"
] | null | null | null | src/plugins/database.py | Blitz-Raynor/Kiba | a73b5b6212a5446d218a80f1a6aba108e0a1912b | [
"MIT"
] | 3 | 2022-02-08T13:24:59.000Z | 2022-03-13T06:42:40.000Z | import aiosqlite
import sqlite3
import asyncio
import nonebot
from nonebot.log import logger
driver: nonebot.Driver = nonebot.get_driver()
config: nonebot.config.Config = driver.config
| 53.277778 | 153 | 0.696038 |
81a67c33ee26043b2ddf70e19e7da7c69207a707 | 5,492 | py | Python | dexp/processing/utils/scatter_gather_i2i.py | haesleinhuepf/dexp | 2ea84f3db323724588fac565fae56f0d522bc5ca | [
"BSD-3-Clause"
] | 16 | 2021-04-21T14:09:19.000Z | 2022-03-22T02:30:59.000Z | dexp/processing/utils/scatter_gather_i2i.py | haesleinhuepf/dexp | 2ea84f3db323724588fac565fae56f0d522bc5ca | [
"BSD-3-Clause"
] | 28 | 2021-04-15T17:43:08.000Z | 2022-03-29T16:08:35.000Z | dexp/processing/utils/scatter_gather_i2i.py | haesleinhuepf/dexp | 2ea84f3db323724588fac565fae56f0d522bc5ca | [
"BSD-3-Clause"
] | 3 | 2022-02-08T17:41:30.000Z | 2022-03-18T15:32:27.000Z | from typing import Callable, Optional, Sequence, Tuple, Union
import numpy
from dexp.processing.utils.nd_slice import nd_split_slices, remove_margin_slice
from dexp.processing.utils.normalise import Normalise
from dexp.utils import xpArray
from dexp.utils.backends import Backend
def scatter_gather_i2i(
function: Callable,
image: xpArray,
tiles: Union[int, Tuple[int, ...]],
margins: Optional[Union[int, Tuple[int, ...]]] = None,
normalise: bool = False,
clip: bool = False,
to_numpy: bool = True,
internal_dtype: Optional[numpy.dtype] = None,
) -> xpArray:
"""
Image-2-image scatter-gather.
'Scatters' computation of a given unary function by splitting the input array into tiles,
computing using a given backend, and reassembling the tiles into a single array of same
shape as the inpout that is either backed by the same backend than that of the input image,
or that is backed by numpy -- usefull when the compute backend cannot hold the whole input and output
images in memory.
Parameters
----------
function : unary function
image : input image (can be any backend, numpy )
tiles : tile sizes to cut input image into, can be a single integer or a tuple of integers.
margins : margins to add to each tile, can be a single integer or a tuple of integers.
if None, no margins are added.
normalise : normalises the input image.
clip : clip after normalisation/denormalisation
to_numpy : should the result be a numpy array? Very usefull when the compute backend
cannot hold the whole input and output images in memory.
internal_dtype : internal dtype for computation
Returns
-------
Result of applying the unary function to the input image, if to_numpy==True then the image is
"""
if internal_dtype is None:
internal_dtype = image.dtype
if type(tiles) == int:
tiles = (tiles,) * image.ndim
# If None is passed for a tile that means that we don't tile along that axis, we als clip the tile size:
tiles = tuple((length if tile is None else min(length, tile)) for tile, length in zip(tiles, image.shape))
if margins is None:
margins = (0,) * image.ndim
if type(margins) == int:
margins = (margins,) * image.ndim
if to_numpy:
result = numpy.empty(shape=image.shape, dtype=internal_dtype)
else:
result = Backend.get_xp_module(image).empty_like(image, dtype=internal_dtype)
# Normalise:
norm = Normalise(Backend.to_backend(image), do_normalise=normalise, clip=clip, quantile=0.005)
# image shape:
shape = image.shape
# We compute the slices objects to cut the input and target images into batches:
tile_slices = list(nd_split_slices(shape, chunks=tiles, margins=margins))
tile_slices_no_margins = list(nd_split_slices(shape, chunks=tiles))
# Zipping together slices with and without margins:
slices = zip(tile_slices, tile_slices_no_margins)
# Number of tiles:
number_of_tiles = len(tile_slices)
if number_of_tiles == 1:
# If there is only one tile, let's not be complicated about it:
result = norm.backward(function(norm.forward(image)))
if to_numpy:
result = Backend.to_numpy(result, dtype=internal_dtype)
else:
result = Backend.to_backend(result, dtype=internal_dtype)
else:
_scatter_gather_loop(
norm.backward, function, image, internal_dtype, norm.forward, result, shape, slices, to_numpy
)
return result
# Dask turned out not too work great here, HUGE overhead compared to the light approach above.
# def scatter_gather_dask(backend: Backend,
# function,
# image,
# chunks,
# margins=None):
# boundary=None
# trim=True
# align_arrays=True
#
# image_d = from_array(image, chunks=chunks, asarray=False)
#
# def function_numpy(_image):
# print(_image.shape)
# return backend.to_numpy(function(_image))
#
# #func, *args, depth=None, boundary=None, trim=True, align_arrays=True, **kwargs
# computation= map_overlap(function_numpy,
# image_d,
# depth=margins,
# boundary=boundary,
# trim=trim,
# align_arrays=align_arrays,
# dtype=image.dtype
# )
#
# #computation.visualize(filename='transpose.png')
# result = computation.compute()
#
# return result
| 35.205128 | 110 | 0.667334 |
81a7268b47b548089b30e84d12ff883fa4b80a6d | 58 | py | Python | http_shadow/__init__.py | abador/http-shadow | 040935b0715f983714f38005f8ae97c255dae3e0 | [
"MIT"
] | null | null | null | http_shadow/__init__.py | abador/http-shadow | 040935b0715f983714f38005f8ae97c255dae3e0 | [
"MIT"
] | null | null | null | http_shadow/__init__.py | abador/http-shadow | 040935b0715f983714f38005f8ae97c255dae3e0 | [
"MIT"
] | 2 | 2018-09-27T15:20:35.000Z | 2020-10-02T08:38:31.000Z | from .backend import Backend
from .thread import HttpPool
| 19.333333 | 28 | 0.827586 |
81a85ad97e42bc868e9ae369e44cafe2a61e536a | 107 | py | Python | Problems/sample.py | HKuz/Test_Code | 798efc9fc668ef021736a6d9699ef4713cf8b718 | [
"MIT"
] | 1 | 2020-06-14T20:10:04.000Z | 2020-06-14T20:10:04.000Z | Problems/sample.py | makramjandar/Test_Code | 798efc9fc668ef021736a6d9699ef4713cf8b718 | [
"MIT"
] | null | null | null | Problems/sample.py | makramjandar/Test_Code | 798efc9fc668ef021736a6d9699ef4713cf8b718 | [
"MIT"
] | 1 | 2019-12-09T12:48:05.000Z | 2019-12-09T12:48:05.000Z | #!/usr/local/bin/python3
if __name__ == '__main__':
main()
| 9.727273 | 26 | 0.579439 |
81a8ec6fd77ecb2b6c41666fc9f2b8378760daa6 | 3,759 | py | Python | setup.py | wrmsr/omnibus | 3c4ef5eb17b0fff8593fa6a2284337bf193c18d3 | [
"BSD-3-Clause"
] | 2 | 2020-06-17T19:54:09.000Z | 2020-06-18T20:10:26.000Z | setup.py | wrmsr/omnibus | 3c4ef5eb17b0fff8593fa6a2284337bf193c18d3 | [
"BSD-3-Clause"
] | null | null | null | setup.py | wrmsr/omnibus | 3c4ef5eb17b0fff8593fa6a2284337bf193c18d3 | [
"BSD-3-Clause"
] | null | null | null | import fnmatch
import glob
import os
import sys
import setuptools.command.build_ext
APPLE = sys.platform == 'darwin'
BASE_DIR = os.path.dirname(__file__)
ABOUT = {}
_read_about()
EXCLUDED_STATIC_FILE_PATHS = [
'*.py',
'*/__pycache__/*',
'*/tests/*',
'*/_ext/cc/*',
'*/_ext/cy/*',
'*/_ext/rs/*',
]
PACKAGE_DATA = [
'.revision',
] + _get_static_files('omnibus')
INSTALL_REQUIRES = [
'toolz>=0.9.0',
]
EXTRAS_REQUIRE = {
'bintrees': ['bintrees>=0.2.7'],
'cytoolz': ['cytoolz>=0.9.0'],
'docker': ['docker>=3.7.0'],
'sortedcontainers': ['sortedcontainers>=2.1.0'],
}
DEBUG = 'DEBUG' in os.environ
EXT_MODULES = []
try:
import Cython
except ImportError:
pass
else:
import Cython.Build
import Cython.Compiler.Options
EXT_MODULES.extend([
*[
setuptools.Extension(
'omnibus._ext.cc.' + os.path.basename(fpath).rpartition('.')[0],
sources=[fpath]
)
for fpath in glob.glob('omnibus/_ext/cc/*.cc')
],
*Cython.Build.cythonize(
[
setuptools.Extension(
'omnibus._ext.cy.' + os.path.basename(fpath).rpartition('.')[0],
sources=[fpath],
language='c++',
)
for fpath in glob.glob('omnibus/_ext/cy/**/*.pyx', recursive=True)
],
language_level=3,
gdb_debug=DEBUG,
compiler_directives={
**Cython.Compiler.Options.get_directive_defaults(),
'embedsignature': True,
'binding': True,
},
),
])
if APPLE:
EXT_MODULES.extend([
setuptools.Extension(
'omnibus._ext.m.' + os.path.basename(fpath).rpartition('.')[0],
sources=[fpath],
extra_link_args=[
'-framework', 'AppKit',
'-framework', 'CoreFoundation',
]
)
for fpath in glob.glob('omnibus/_ext/m/*.m')
])
if __name__ == '__main__':
setuptools.setup(
name=ABOUT['__title__'],
version=ABOUT['__version__'],
description=ABOUT['__description__'],
author=ABOUT['__author__'],
url=ABOUT['__url__'],
python_requires='>=3.7',
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: ' + '.'.join(map(str, sys.version_info[:2])),
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python',
],
# zip_safe=True,
setup_requires=['setuptools'],
packages=setuptools.find_packages(
include=['omnibus', 'omnibus.*'],
exclude=['tests', '*.tests', '*.tests.*'],
),
py_modules=['omnibus'],
package_data={'omnibus': PACKAGE_DATA},
include_package_data=True,
entry_points={},
install_requires=INSTALL_REQUIRES,
extras_require=EXTRAS_REQUIRE,
ext_modules=EXT_MODULES,
)
| 24.096154 | 93 | 0.536313 |
81aab8159848ee0e48d169bcc15f7002773f809e | 689 | py | Python | day1/test_day1.py | Sundin/advent-of-code-2019 | 7ba5971ab5deeec61c60e6acbe1ac223876e77fe | [
"MIT"
] | null | null | null | day1/test_day1.py | Sundin/advent-of-code-2019 | 7ba5971ab5deeec61c60e6acbe1ac223876e77fe | [
"MIT"
] | null | null | null | day1/test_day1.py | Sundin/advent-of-code-2019 | 7ba5971ab5deeec61c60e6acbe1ac223876e77fe | [
"MIT"
] | null | null | null | from day1 import *
import unittest
| 40.529412 | 93 | 0.835994 |
81ac62bd68434ff2ce3767d63cce77c07cbf51c7 | 305 | py | Python | PyObjCTest/test_nspdfinfo.py | linuxfood/pyobjc-framework-Cocoa-test | 3475890f165ab26a740f13d5afe4c62b4423a140 | [
"MIT"
] | null | null | null | PyObjCTest/test_nspdfinfo.py | linuxfood/pyobjc-framework-Cocoa-test | 3475890f165ab26a740f13d5afe4c62b4423a140 | [
"MIT"
] | null | null | null | PyObjCTest/test_nspdfinfo.py | linuxfood/pyobjc-framework-Cocoa-test | 3475890f165ab26a740f13d5afe4c62b4423a140 | [
"MIT"
] | null | null | null | import AppKit
from PyObjCTools.TestSupport import TestCase, min_os_level
| 30.5 | 73 | 0.783607 |
81acfe851d89593a12e5f0cfee315b25fd2a0d5f | 1,636 | py | Python | gap/src/util/data_iterator.py | cosmozhang/autoencoding_parsing | 2e8f4811ca6032f4f89195cd019a4fce4b399dcc | [
"BSD-3-Clause"
] | null | null | null | gap/src/util/data_iterator.py | cosmozhang/autoencoding_parsing | 2e8f4811ca6032f4f89195cd019a4fce4b399dcc | [
"BSD-3-Clause"
] | null | null | null | gap/src/util/data_iterator.py | cosmozhang/autoencoding_parsing | 2e8f4811ca6032f4f89195cd019a4fce4b399dcc | [
"BSD-3-Clause"
] | null | null | null | from collections import OrderedDict, defaultdict
import numpy as np
'''
generate a id to length dic
'''
| 31.461538 | 88 | 0.665037 |
81ad78394864e547b6c74d97fef7b7beb9ca5228 | 1,793 | py | Python | recipe_parser/recipes/thehappyfoodie.py | tyler-a-cox/recipe-parsing | fa883f66a39063cf72912527628b082cda455e76 | [
"MIT"
] | null | null | null | recipe_parser/recipes/thehappyfoodie.py | tyler-a-cox/recipe-parsing | fa883f66a39063cf72912527628b082cda455e76 | [
"MIT"
] | null | null | null | recipe_parser/recipes/thehappyfoodie.py | tyler-a-cox/recipe-parsing | fa883f66a39063cf72912527628b082cda455e76 | [
"MIT"
] | null | null | null | from ._schema import DefaultSchema
from ._utils import get_minutes, get_yields, normalize_string
| 28.919355 | 88 | 0.516453 |
81ae2fd5d9a2f3d3be912f6bccda2599695dd505 | 2,866 | py | Python | Scopuli/WEB/DebugToolbar/Toolbar.py | MaxOnNet/scopuli-core-web | 66a2c31b36d7fc05be36ba5d5b141644459b4aba | [
"Apache-2.0"
] | null | null | null | Scopuli/WEB/DebugToolbar/Toolbar.py | MaxOnNet/scopuli-core-web | 66a2c31b36d7fc05be36ba5d5b141644459b4aba | [
"Apache-2.0"
] | null | null | null | Scopuli/WEB/DebugToolbar/Toolbar.py | MaxOnNet/scopuli-core-web | 66a2c31b36d7fc05be36ba5d5b141644459b4aba | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright [2017] Tatarnikov Viktor [viktor@tatarnikov.org]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from urllib.parse import unquote
except ImportError:
from urllib import unquote
from flask import url_for, current_app
from werkzeug.utils import import_string
| 29.854167 | 97 | 0.63887 |
81ae89733812f04310f2f41f0ec35c6af756fb98 | 8,372 | py | Python | CalculateLods.py | makeling/AGSSmartVectorTileTools | 009d925f883729f98c0d0744e1d466062dc260e6 | [
"Apache-2.0"
] | 3 | 2019-02-19T06:14:03.000Z | 2020-01-06T07:57:12.000Z | CalculateLods.py | makeling/AGSSmartVectorTileTools | 009d925f883729f98c0d0744e1d466062dc260e6 | [
"Apache-2.0"
] | null | null | null | CalculateLods.py | makeling/AGSSmartVectorTileTools | 009d925f883729f98c0d0744e1d466062dc260e6 | [
"Apache-2.0"
] | 2 | 2019-03-25T09:43:30.000Z | 2019-11-28T03:52:56.000Z | # -*- coding: utf-8 -*-
# !/usr/bin/python
__author__ = 'ma_keling'
# Version : 1.0.0
# Start Time : 2018-11-29
# Update Time :
# Change Log :
## 1.
## 2.
## 3.
import time
import arcpy
import math
# Description: Loop layers and calculate lod for every feature in the layer.
# Description: Compute the total records for a featureclass
# Description: get the start level based on layer extent
# Description: Add a new field with name lod to a table
# Description: Compute get area and length per pixel based on dpi and scale
# Description: Compute get length per pixel based on dpi and scale
# Description: Calculate lod for every polygon
# Description: Calculate lod for every point
# Description: Calculate lod for every polyline
# execute()
| 36.086207 | 121 | 0.568562 |
81aeea522ff7190fbf314844be2ef81f5a72b4cb | 3,894 | py | Python | python/one-liner/cluster_of_non_0.py | Hamng/python-sources | 0cc5a5d9e576440d95f496edcfd921ae37fcd05a | [
"Unlicense"
] | null | null | null | python/one-liner/cluster_of_non_0.py | Hamng/python-sources | 0cc5a5d9e576440d95f496edcfd921ae37fcd05a | [
"Unlicense"
] | 1 | 2019-02-23T18:30:51.000Z | 2019-02-23T18:30:51.000Z | python/one-liner/cluster_of_non_0.py | Hamng/python-sources | 0cc5a5d9e576440d95f496edcfd921ae37fcd05a | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Sat Feb 8 07:38:05 2020
@author: Ham
Self Challenge: Count Cluster of Non-0s
Given a 1-dimension array of integers,
determine how many 'clusters' of non-0 in the array.
A 'cluster' is a group of consecutive non-0 values.
Scoring: a solution needs to be a 1-liner;
i.e. NO point if implementing with a traditional 'for' loop!
Sample Input (see STDIN_SIO)
A : [
9, 0, 0, 22, 0, 0, 39, 11, 3, 0, \
0, 24, 1, 0, 50, 23, 3, 44, 0, 23, \
25, 6, 36, 19, 10, 23, 0, 37, 4, 1, \
7, 12, 0, 0, 49
]
Expected Output:
8
"""
import itertools
STDIN_SIO = """
9, 0, 0, 22, 0, 0, 39, 11, 3, 0, \
0, 24, 1, 0, 50, 23, 3, 44, 0, 23, \
2, 8, 20, 35, 0, 40, 34, 26, 36, 0, \
35, 19, 20, 18, 11, 43, 19, 21, 40, 0, \
14, 0, 14, 0, 0, 25, 35, 24, 49, 15, \
13, 3, 0, 10, 31, 25, 27, 37, 27, 43, \
44, 27, 8, 43, 0, 0, 33, 25, 19, 47, \
0, 29, 5, 2, 12, 8, 7, 0, 16, 36, \
0, 6, 17, 35, 36, 21, 0, 9, 1, 0, \
43, 29, 39, 15, 18, 0, 34, 26, 48, 0, \
34, 35, 7, 10, 0, 0, 15, 5, 12, 26, \
0, 37, 30, 33, 27, 34, 9, 37, 22, 0, \
0, 24, 30, 0, 0, 38, 23, 25, 0, 30, \
39, 24, 31, 0, 6, 19, 25, 0, 28, 15, \
8, 0, 48, 0, 35, 41, 0, 24, 1, 41, \
31, 0, 35, 21, 15, 26, 15, 27, 4, 0, \
8, 4, 0, 0, 2, 42, 18, 0, 28, 18, \
49, 34, 5, 10, 41, 48, 26, 14, 45, 44, \
9, 0, 49, 50, 24, 0, 0, 0, 23, 0, \
17, 0, 47, 31, 0, 42, 0, 0, 0, 40, \
46, 22, 50, 32, 20, 3, 44, 22, 0, 37, \
25, 0, 19, 26, 14, 23, 27, 41, 0, 1, \
13, 0, 48, 20, 37, 8, 0, 18, 0, 26, \
12, 19, 32, 19, 22, 0, 0, 0, 0, 0, \
16, 0, 0, 43, 0, 10, 5, 0, 6, 26, \
0, 24, 40, 29, 0, 43, 18, 27, 0, 0, \
37, 0, 46, 35, 17, 0, 20, 44, 29, 29, \
40, 33, 22, 27, 0, 0, 38, 21, 4, 0, \
0, 15, 31, 48, 36, 10, 0, 41, 0, 45, \
39, 0, 11, 9, 3, 38, 16, 0, 11, 22, \
37, 0, 3, 44, 10, 12, 47, 22, 32, 7, \
24, 1, 0, 22, 25, 0, 14, 0, 0, 0, \
23, 0, 36, 1, 42, 46, 0, 48, 0, 33, \
5, 27, 45, 0, 15, 29, 0, 50, 2, 31, \
25, 6, 36, 19, 10, 23, 0, 37, 4, 1, \
7, 12, 0, 0, 49
""".strip()
def count_non_0_clusters_1(arr):
"""Translate each non-0 to an 'A' char, and 0 to a space.
Then join together to become a string.
Then split(), then return number of tokens.
"""
return len("".join(["A" if e else " " for e in arr]).split())
def count_non_0_clusters_2(arr):
"""groupby() partitions into groups as:
[[True , [list of non-0]],
[False, [list of 0s]],
[True , [list of non-0]],
[False, [list of 0s]],
...
[True , [list of non-0]]]
(Old) Next, the list comprenhension iterates thru each tuple,
then collects the 1st element in each tuple if True.
Finally, return the len/count of Trues:
return len([t[0] for t in itertools.groupby(...) if t[0]])
Next, the list comprenhension iterates thru each tuple,
then collects the 1st element in each tuple.
Then return the count() of True elements.
"""
return [t[0] for t in itertools.groupby(arr, lambda e: bool(e))].count(True)
if __name__ == '__main__':
a = list(map(int, STDIN_SIO.split(",")))
# Nicely print it, 10 entries per line, with continuation
# so can copy-n-paste back into STDIN_SIO
#print(len(a))
#for i in range(0, (len(a) // 10) * 10, 10):
# print("%3u," * 10 % tuple(a[i:i+10]), end=" \\\n")
#j = a[(len(a) // 10) * 10:]
#print("%3u," * (len(j) - 1) % tuple(j[:-1]), end="")
#print("%3u" % j[-1])
print("count_*_1() returns", count_non_0_clusters_1(a), "clusters of non-0")
print("count_*_2() returns", count_non_0_clusters_2(a), "clusters of non-0")
| 34.157895 | 81 | 0.48793 |
81aef189550b2b54e321b9991b292a9ac7b3bfcb | 5,133 | py | Python | cardano-node-tests/cardano_node_tests/tests/test_configuration.py | MitchellTesla/Cardano-SCK | f394506eb0875622093805c009951f6905261778 | [
"Apache-2.0"
] | 6 | 2021-08-30T00:49:12.000Z | 2022-01-27T07:07:53.000Z | cardano-node-tests/cardano_node_tests/tests/test_configuration.py | c-spider/Cardano-SCK | 1accb0426289489e371eb67422ccb19ffaab5f3c | [
"Apache-2.0"
] | 17 | 2021-08-31T23:27:44.000Z | 2022-03-25T20:35:16.000Z | cardano-node-tests/cardano_node_tests/tests/test_configuration.py | c-spider/Cardano-SCK | 1accb0426289489e371eb67422ccb19ffaab5f3c | [
"Apache-2.0"
] | 3 | 2021-05-20T08:26:00.000Z | 2022-03-27T22:31:36.000Z | """Tests for node configuration."""
import json
import logging
import time
from pathlib import Path
import allure
import pytest
from _pytest.tmpdir import TempdirFactory
from cardano_clusterlib import clusterlib
from cardano_node_tests.utils import cluster_management
from cardano_node_tests.utils import cluster_nodes
from cardano_node_tests.utils import configuration
from cardano_node_tests.utils import helpers
LOGGER = logging.getLogger(__name__)
# use the "temp_dir" fixture for all tests automatically
pytestmark = pytest.mark.usefixtures("temp_dir")
def check_epoch_length(cluster_obj: clusterlib.ClusterLib) -> None:
end_sec = 15
end_sec_padded = end_sec + 15 # padded to make sure tip got updated
cluster_obj.wait_for_new_epoch()
epoch = cluster_obj.get_epoch()
sleep_time = cluster_obj.epoch_length_sec - end_sec
time.sleep(sleep_time)
assert epoch == cluster_obj.get_epoch()
time.sleep(end_sec_padded)
assert epoch + 1 == cluster_obj.get_epoch()
| 33.116129 | 100 | 0.728034 |
81afed5d2a7be68d968744aa55c07d3f1c78d48b | 241,016 | py | Python | output/myresults.py | jacobseiler/rsage | b3b0a3fa3c676eab188991e37d06894396bfc74f | [
"MIT"
] | 1 | 2019-05-23T04:11:32.000Z | 2019-05-23T04:11:32.000Z | output/myresults.py | jacobseiler/rsage | b3b0a3fa3c676eab188991e37d06894396bfc74f | [
"MIT"
] | 7 | 2018-08-17T05:04:57.000Z | 2019-01-16T05:40:16.000Z | output/myresults.py | jacobseiler/rsage | b3b0a3fa3c676eab188991e37d06894396bfc74f | [
"MIT"
] | null | null | null | #!/usr/bin/env python
from __future__ import print_function
import matplotlib
matplotlib.use('Agg')
import os
import heapq
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.colors as colors
import matplotlib.cm as cm
from numpy import *
from random import sample, seed, randint
from os.path import getsize as getFileSize
import math
import random
import csv
from cycler import cycler
from io import StringIO
#np.set_printoptions(threshold=np.nan)
from collections import Counter
from matplotlib.colors import LogNorm
from mpl_toolkits.axes_grid1 import AxesGrid
from astropy import units as u
from astropy import cosmology
import matplotlib.ticker as mtick
import PlotScripts
import ReadScripts
import AllVars
import GalaxyPhotoion as photo
import ObservationalData as Obs
import gnedin_analytic as ga
from mpi4py import MPI
import sys
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
AllVars.Set_Params_Kali()
AllVars.Set_Constants()
PlotScripts.Set_Params_Plot()
output_format = ".png"
# For the Tiamat extended results there is a weird hump when calculating the escape fraction.
# This hump occurs at a halo mass of approximately 10.3.
# The calculation of fesc skips this hump range (defined from kink_low to kink_high)
kink_low = 10.3
kink_high = 10.30000001
m_low = 7.0 # We only sum the photons coming from halos within the mass range m_low < Halo Mass < m_high
m_high = 15.0
m_gal_low = 3.0
m_gal_high = 12.0
m_low_SAGE = pow(10, m_low)/1.0e10 * AllVars.Hubble_h
m_high_SAGE = pow(10, m_high)/1.0e10 * AllVars.Hubble_h
bin_width = 0.2
NB = int((m_high - m_low) / bin_width)
NB_gal = int((m_gal_high - m_gal_low) / bin_width)
fej_low = 0.0
fej_high = 1.0
fej_bin_width = 0.05
NB_fej = int((fej_high - fej_low) / fej_bin_width)
def calculate_beta(MUV, z):
'''
Calculation of the dust attenuation parameter Beta. Fit values are from Bouwens (2015) ApJ 793, 115.
For z = 5 and 6, Bouwens uses a piece-wise linear relationship and a linear relationship for higher redshift. ##
Parameters
----------
MUV : `float'
A value of the absolute magnitude in the UV (generally M1600) in the AB magnitude system.
z : `float'
Redshift the attenuation is calculated at.
Returns
------
beta : `float'
Value of the UV continuum paramaeter beta.
'''
if (z >= 4.5 and z < 5.5): # z = 5 fits.
if (MUV > -18.8):
dB = -0.08
else:
dB = -0.17
B = -2.05
offset = 18.8
elif (z >= 5.5 and z < 6.5): # z = 6 fits.
if (MUV > -18.8):
dB = -0.08
else:
dB = -0.24
B = -2.22
offset = 18.8
elif (z >= 6.5 and z < 7.5): # z = 7 fits.
dB = -0.20
B = -2.05
offset = 19.5
elif (z >= 7.5 and z < 8.5): # z = 8 fits.
dB = -0.15
B = -2.13
offset = 19.5
elif (z >= 8.5 and z < 9.5): # z = 9 fits.
dB = -0.16
B = -2.19
offset = 19.5
elif (z >= 9.5 and z < 10.5): # z = 10 fits.
dB = -0.16
B = -2.16
offset = 19.5
beta = dB * (MUV + offset) + B
return beta
def multiply(array):
'''
Performs element wise multiplication.
Parameters
----------
array : `~numpy.darray'
The array to be multiplied.
Returns
-------
total : `float'
Total of the elements multiplied together.
'''
total = 1
for i in range(0, len(array)):
total *= array[i]
return total
##
def Sum_Log(array):
'''
Performs an element wise sum of an array who's elements are in log-space.
Parameters
----------
array : array
Array with elements in log-space.
Returns
------
sum_total : float
Value of the elements taken to the power of 10 and summed.
Units
-----
All units are kept the same as the inputs.
'''
sum_total = 0.0
for i in range(0, len(array)):
sum_total += 10**array[i]
return sum_total
##
def Std_Log(array, mean):
'''
Calculates the standard deviation of an array with elements in log-space.
Parameters
----------
array : array
Array with elements in log-space.
mean : float
Mean of the array (not in log).
Returns
------
std : float
Standard deviation of the input array taken to the power of 10.
Units
-----
All units are kept the same as the inputs.
'''
sum_total = 0.0
for i in range(0, len(array)):
sum_total += (10**array[i] - mean)**2
sum_total *= 1.0/len(array)
std = np.sqrt(sum_total)
return std
###
def collect_across_tasks(mean_per_task, std_per_task, N_per_task, SnapList,
BinSnapList=[], binned=False, m_bin_low=0.0,
m_bin_high=0.0, my_bin_width=bin_width):
"""
Reduces arrays that are unique to each task onto the master task.
The dimensions of the input arrays will change slightly if we are collecting a statistics
that is binned across e.g., halo mass or galaxy stellar mass.
Parameters
----------
mean_per_task, std_per_task, N_per_task: Nested 2D (or 3D if binned == True) arrays of floats.
Outer length is equal to the number of models.
Inner length is equal to the number of snapshots the data has been calculated for.
Most inner length is equal to the number of bins.
Contains the mean/standard deviation/number of objects unique for each task.
SnapList: Nested 2D arrays of integers. Outer length is equal to the number of models.
Contains the snapshot numbers the data has been calculated for each model.
BinSnapList: Nested 2D arrays of integers. Outer length is equal to the number of models.
Often statistics are calculated for ALL snapshots but we only wish to plot for a subset of snapshots.
This variable allows the binned data to be collected for only a subset of the snapshots.
binned: Boolean.
Dictates whether the collected data is a 2D or 3D array with the inner-most array being binned across e.g., halo mass.
Returns
----------
master_mean, master_std, master_N: Nested 2D (or 3D if binned == True) arrays of floats.
Shape is identical to the input mean_per_task etc.
If rank == 0 these contain the collected statistics.
Otherwise these will be none.
master_bin_middle: Array of floats.
Contains the location of the middle of the bins for the data.
"""
master_mean = []
master_std = []
master_N = []
master_bin_middle = []
for model_number in range(0, len(SnapList)):
master_mean.append([])
master_std.append([])
master_N.append([])
master_bin_middle.append([])
# If we're collecting a binned statistic (e.g., binned across halo mass), then we need to perform the collecting per snapshot.
if binned:
count = 0
for snapshot_idx in range(len(SnapList[model_number])):
if SnapList[model_number][snapshot_idx] == BinSnapList[model_number][count]:
master_mean[model_number], master_std[model_number], master_N[model_number] = calculate_pooled_stats(master_mean[model_number], master_std[model_number], master_N[model_number], mean_per_task[model_number][snapshot_idx], std_per_task[model_number][snapshot_idx], N_per_task[model_number][snapshot_idx])
master_bin_middle[model_number].append(np.arange(m_bin_low,
m_bin_high+my_bin_width,
my_bin_width)[:-1]
+ my_bin_width* 0.5)
count += 1
if count == len(BinSnapList[model_number]):
break
else:
master_mean[model_number], master_std[model_number], master_N[model_number] = calculate_pooled_stats(master_mean[model_number], master_std[model_number], master_N[model_number],
mean_per_task[model_number], std_per_task[model_number],
N_per_task[model_number])
if rank == 0:
master_mean[model_number] = master_mean[model_number][0]
master_std[model_number] = master_std[model_number][0]
master_N[model_number] = master_N[model_number][0]
return master_mean, master_std, master_N, master_bin_middle
###
def calculate_pooled_stats(mean_pool, std_pool, N_pool, mean_local, std_local, N_local):
'''
Calculates the pooled mean and standard deviation from multiple processors and appends it to an input array.
Formulae taken from https://en.wikipedia.org/wiki/Pooled_variance
As we only care about these stats on the rank 0 process, we make use of junk inputs/outputs for other ranks.
NOTE: Since the input data may be an array (e.g. pooling the mean/std for a stellar mass function).
Parameters
----------
mean_pool, std_pool, N_pool : array of floats.
Arrays that contain the current pooled means/standard deviation/number of data points (for rank 0) or just a junk input (for other ranks).
mean_local, mean_std : float or array of floats.
The non-pooled mean and standard deviation unique for each process.
N_local : floating point number or array of floating point numbers.
Number of data points used to calculate the mean/standard deviation that is going to be added to the pool.
NOTE: Use floating point here so we can use MPI.DOUBLE for all MPI functions.
Returns
-------
mean_pool, std_pool : array of floats.
Original array with the new pooled mean/standard deviation appended (for rank 0) or the new pooled mean/standard deviation only (for other ranks).
Units
-----
All units are the same as the input.
All inputs MUST BE real-space (not log-space).
'''
if isinstance(mean_local, list) == True:
if len(mean_local) != len(std_local):
print("len(mean_local) = {0} \t len(std_local) = {1}".format(len(mean_local), len(std_local)))
raise ValueError("Lengths of mean_local and std_local should be equal")
if ((type(mean_local).__module__ == np.__name__) == True or (isinstance(mean_local, list) == True)): # Checks to see if we are dealing with arrays.
N_times_mean_local = np.multiply(N_local, mean_local)
N_times_var_local = np.multiply(N_local, np.multiply(std_local, std_local))
N_local = np.array(N_local).astype(float)
N_times_mean_local = np.array(N_times_mean_local).astype(np.float32)
if rank == 0: # Only rank 0 holds the final arrays so only it requires proper definitions.
N_times_mean_pool = np.zeros_like(N_times_mean_local)
N_pool_function = np.zeros_like(N_local)
N_times_var_pool = np.zeros_like(N_times_var_local)
N_times_mean_pool = N_times_mean_pool.astype(np.float64) # Recast everything to double precision then use MPI.DOUBLE.
N_pool_function = N_pool_function.astype(np.float64)
N_times_var_pool = N_times_var_pool.astype(np.float64)
else:
N_times_mean_pool = None
N_pool_function = None
N_times_var_pool = None
comm.Barrier()
N_times_mean_local = N_times_mean_local.astype(np.float64)
N_local = N_local.astype(np.float64)
N_times_var_local = N_times_var_local.astype(np.float64)
comm.Reduce([N_times_mean_local, MPI.DOUBLE], [N_times_mean_pool, MPI.DOUBLE], op = MPI.SUM, root = 0) # Sum the arrays across processors.
comm.Reduce([N_local, MPI.DOUBLE],[N_pool_function, MPI.DOUBLE], op = MPI.SUM, root = 0)
comm.Reduce([N_times_var_local, MPI.DOUBLE], [N_times_var_pool, MPI.DOUBLE], op = MPI.SUM, root = 0)
else:
N_times_mean_local = N_local * mean_local
N_times_var_local = N_local * std_local * std_local
N_times_mean_pool = comm.reduce(N_times_mean_local, op = MPI.SUM, root = 0)
N_pool_function = comm.reduce(N_local, op = MPI.SUM, root = 0)
N_times_var_pool = comm.reduce(N_times_var_local, op = MPI.SUM, root = 0)
if rank == 0:
mean_pool_function = np.zeros((len(N_pool_function)))
std_pool_function = np.zeros((len(N_pool_function)))
for i in range(0, len(N_pool_function)):
if N_pool_function[i] == 0:
mean_pool_function[i] = 0.0
else:
mean_pool_function[i] = np.divide(N_times_mean_pool[i], N_pool_function[i])
if N_pool_function[i] < 3:
std_pool_function[i] = 0.0
else:
std_pool_function[i] = np.sqrt(np.divide(N_times_var_pool[i], N_pool_function[i]))
mean_pool.append(mean_pool_function)
std_pool.append(std_pool_function)
N_pool.append(N_pool_function)
return mean_pool, std_pool, N_pool
else:
return mean_pool, std_pool, N_pool_function # Junk return because non-rank 0 doesn't care.
##
def StellarMassFunction(SnapList, SMF, simulation_norm, FirstFile, LastFile, NumFile, ResolutionLimit_mean, model_tags, observations, paper_plot, output_tag):
'''
Calculates the stellar mass function for given galaxies with the option to overplot observations by Song et al. (2013) at z = 6, 7, 8 and/or Baldry et al. (2008) at z = 0.1.
Parallel compatible.
NOTE: The plotting assumes the redshifts we are plotting at are (roughly) the same for each model.
Parameters
---------
SnapList : Nested 'array-like`, SnapList[model_number0] = [snapshot0_model0, ..., snapshotN_model0], with length equal to the number of models.
Snapshots that we plot the stellar mass function at for each model.
SMF : Nested 2-dimensional array, SMF[model_number0][snapshot0] = [bin0galaxies, ..., binNgalaxies], with length equal to the number of bins (NB_gal).
The count of galaxies within each stellar mass bin. Bounds are given by 'm_gal_low' and 'm_gal_high' in bins given by 'bin_width'.
simulation_norm : array with length equal to the number of models.
Denotes which simulation each model uses.
0 : MySim
1 : Mini-Millennium
2 : Tiamat (down to z = 5)
3 : Extended Tiamat (down to z = 1.6ish).
4 : Britton's Simulation
5 : Kali
FirstFile, LastFile, NumFile : array of integers with length equal to the number of models.
The file numbers for each model that were read in (defined by the range between [FirstFile, LastFile] inclusive) and the TOTAL number of files for this model (we may only be plotting a subset of the volume).
ResolutionLimit_mean : array of floats with the same shape as SMF.
This is the mean stellar mass for a halo with len (number of N-body simulation particles) between 'stellar_mass_halolen_lower' and 'stellar_mass_halolen_upper'.
model_tags : array of strings with length equal to the number of models.
Strings that contain the tag for each model. Will be placed on the plot.
observations : int
Denotes whether we want to overplot observational results.
0 : Don't plot anything.
1 : Plot Song et al. (2016) at z = 6, 7, 8.
2 : Plot Baldry et al. (2008) at z = 0.1.
3 : Plot both of these.
paper_plot : int
Denotes whether we want to split the plotting over three panels (z = 6, 7, 8) for the paper or keep it all to one figure.
output_tag : string
Name of the file that will be generated. File will be saved in the current directory with the output format defined by the 'output_format' variable at the beggining of the file.
Returns
-------
No returns.
Generates and saves the plot (named via output_tag).
Units
-----
Stellar Mass is in units of log10(Msun).
'''
## Empty array initialization ##
title = []
normalization_array = []
redshift_labels = []
counts_array = []
bin_middle_array = []
for model_number in range(0, len(SnapList)):
counts_array.append([])
bin_middle_array.append([])
redshift_labels.append([])
####
for model_number in range(0, len(SnapList)): # Does this for each of the models.
## Normalization for each model. ##
if (simulation_norm[model_number] == 0):
AllVars.Set_Params_Mysim()
elif (simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif (simulation_norm[model_number] == 2):
AllVars.Set_Params_Tiamat()
elif (simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif (simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
box_factor = (LastFile[model_number] - FirstFile[model_number] + 1.0)/(NumFile[model_number]) # This factor allows us to take a sub-volume of the box and scale the results to represent the entire box.
print("We are creating the stellar mass function using {0:.4f} of the box's volume.".format(box_factor))
norm = pow(AllVars.BoxSize,3) / pow(AllVars.Hubble_h, 3) * bin_width * box_factor
normalization_array.append(norm)
####
for snapshot_idx in range(0, len(SnapList[model_number])): # Loops for each snapshot in each model.
tmp = 'z = %.2f' %(AllVars.SnapZ[SnapList[model_number][snapshot_idx]]) # Assigns a redshift label.
redshift_labels[model_number].append(tmp)
## We perform the plotting on Rank 0 so only this rank requires the final counts array. ##
if rank == 0:
counts_total = np.zeros_like(SMF[model_number][snapshot_idx])
else:
counts_total = None
comm.Reduce([SMF[model_number][snapshot_idx], MPI.FLOAT], [counts_total, MPI.FLOAT], op = MPI.SUM, root = 0) # Sum all the stellar mass and pass to Rank 0.
if rank == 0:
counts_array[model_number].append(counts_total)
bin_middle_array[model_number].append(np.arange(m_gal_low, m_gal_high+bin_width, bin_width)[:-1] + bin_width * 0.5)
####
## Plotting ##
if rank == 0: # Plot only on rank 0.
if paper_plot == 0:
f = plt.figure()
ax = plt.subplot(111)
for model_number in range(0, len(SnapList)):
for snapshot_idx in range(0, len(SnapList[model_number])):
if model_number == 0: # We assume the redshifts for each model are the same, we only want to put a legend label for each redshift once.
title = redshift_labels[model_number][snapshot_idx]
else:
title = ''
plt.plot(bin_middle_array[model_number][snapshot_idx], counts_array[model_number][snapshot_idx] / normalization_array[model_number], color = PlotScripts.colors[snapshot_idx], linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = title, linewidth = PlotScripts.global_linewidth)
#print(np.min(np.log10(ResolutionLimit_mean)))
#ax.axvline(np.max(np.log10(ResolutionLimit_mean)), color = 'k', linewidth = PlotScripts.global_linewidth, linestyle = '--')
#ax.text(np.max(np.log10(ResolutionLimit_mean)) + 0.1, 1e-3, "Resolution Limit", color = 'k')
for model_number in range(0, len(SnapList)): # Place legend labels for each of the models. NOTE: Placed after previous loop for proper formatting of labels.
plt.plot(1e100, 1e100, color = 'k', linestyle = PlotScripts.linestyles[model_number], label = model_tags[model_number], rasterized=True, linewidth = PlotScripts.global_linewidth)
## Adjusting axis labels/limits. ##
plt.yscale('log', nonposy='clip')
plt.axis([6, 11.5, 1e-6, 1e-0])
ax.set_xlabel(r'$\log_{10}\ m_{\mathrm{*}} \:[M_{\odot}]$', fontsize = PlotScripts.global_fontsize)
ax.set_ylabel(r'$\Phi\ [\mathrm{Mpc}^{-3}\: \mathrm{dex}^{-1}]$', fontsize = PlotScripts.global_fontsize)
ax.xaxis.set_minor_locator(plt.MultipleLocator(0.25))
ax.set_xticks(np.arange(6.0, 12.0))
if (observations == 1 or observations == 3): # If we wanted to plot Song.
Obs.Get_Data_SMF()
delta = 0.05
caps = 5
## Song (2016) Plotting ##
plt.errorbar(Obs.Song_SMF_z6[:,0], 10**Obs.Song_SMF_z6[:,1], yerr= (10**Obs.Song_SMF_z6[:,1] - 10**Obs.Song_SMF_z6[:,3], 10**Obs.Song_SMF_z6[:,2] - 10**Obs.Song_SMF_z6[:,1]), xerr = 0.25, capsize = caps, elinewidth = PlotScripts.global_errorwidth, alpha = 1.0, lw=2.0, marker='o', ls='none', label = 'Song 2015, z = 6', color = PlotScripts.colors[0], rasterized=True)
plt.errorbar(Obs.Song_SMF_z7[:,0], 10**Obs.Song_SMF_z7[:,1], yerr= (10**Obs.Song_SMF_z7[:,1] - 10**Obs.Song_SMF_z7[:,3], 10**Obs.Song_SMF_z7[:,2] - 10**Obs.Song_SMF_z7[:,1]), xerr = 0.25, capsize = caps, alpha=0.75, elinewidth = PlotScripts.global_errorwidth, lw=1.0, marker='o', ls='none', label = 'Song 2015, z = 7', color = PlotScripts.colors[1], rasterized=True)
plt.errorbar(Obs.Song_SMF_z8[:,0], 10**Obs.Song_SMF_z8[:,1], yerr= (10**Obs.Song_SMF_z8[:,1] - 10**Obs.Song_SMF_z8[:,3], 10**Obs.Song_SMF_z8[:,2] - 10**Obs.Song_SMF_z8[:,1]), xerr = 0.25, capsize = caps, alpha=0.75, elinewidth = PlotScripts.global_errorwidth, lw=1.0, marker='o', ls='none', label = 'Song 2015, z = 8', color = PlotScripts.colors[2], rasterized=True)
####
if ((observations == 2 or observations == 3) and rank == 0): # If we wanted to plot Baldry.
Baldry_xval = np.log10(10 ** Obs.Baldry_SMF_z0[:, 0] /AllVars.Hubble_h/AllVars.Hubble_h)
Baldry_xval = Baldry_xval - 0.26 # convert back to Chabrier IMF
Baldry_yvalU = (Obs.Baldry_SMF_z0[:, 1]+Obs.Baldry_SMF_z0[:, 2]) * AllVars.Hubble_h*AllVars.Hubble_h*AllVars.Hubble_h
Baldry_yvalL = (Obs.Baldry_SMF_z0[:, 1]-Obs.Baldry_SMF_z0[:, 2]) * AllVars.Hubble_h*AllVars.Hubble_h*AllVars.Hubble_h
plt.fill_between(Baldry_xval, Baldry_yvalU, Baldry_yvalL,
facecolor='purple', alpha=0.25, label='Baldry et al. 2008 (z=0.1)')
####
leg = plt.legend(loc='lower left', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
outputFile = './%s%s' %(output_tag, output_format)
plt.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close()
if (paper_plot == 1):
fig, ax = plt.subplots(nrows=1, ncols=3, sharex=False, sharey=True, figsize=(16, 6))
delta_fontsize = 0
caps = 5
ewidth = 1.5
for model_number in range(0, len(SnapList)):
for count in range(len(SnapList[model_number])):
w = np.where((counts_array[model_number][count] > 0))[0]
ax[count].plot(bin_middle_array[model_number][count][w], counts_array[model_number][count][w]
/ normalization_array[model_number], color = PlotScripts.colors[model_number],
linestyle = PlotScripts.linestyles[model_number], rasterized = True,
label = r"$\mathbf{SAGE}$", linewidth = PlotScripts.global_linewidth)
tick_locs = np.arange(6.0, 12.0)
ax[count].set_xticklabels([r"$\mathbf{%d}$" % x for x in tick_locs], fontsize = PlotScripts.global_fontsize)
ax[count].set_xlim([6.8, 10.3])
ax[count].tick_params(which = 'both', direction='in',
width = PlotScripts.global_tickwidth)
ax[count].tick_params(which = 'major', length = PlotScripts.global_ticklength)
ax[count].tick_params(which = 'minor', length = PlotScripts.global_ticklength-2)
ax[count].set_xlabel(r'$\mathbf{log_{10} \: M_{*} \:[M_{\odot}]}$',
fontsize = PlotScripts.global_labelsize - delta_fontsize)
ax[count].xaxis.set_minor_locator(plt.MultipleLocator(0.25))
#ax[count].set_xticks(np.arange(6.0, 12.0))
for axis in ['top','bottom','left','right']: # Adjust axis thickness.
ax[count].spines[axis].set_linewidth(PlotScripts.global_axiswidth)
# Since y-axis is shared, only need to do this once.
ax[0].set_yscale('log', nonposy='clip')
ax[0].set_yticklabels([r"$\mathbf{10^{-5}}$",r"$\mathbf{10^{-5}}$",r"$\mathbf{10^{-4}}$", r"$\mathbf{10^{-3}}$",
r"$\mathbf{10^{-2}}$",r"$\mathbf{10^{-1}}$"])
ax[0].set_ylim([1e-5, 1e-1])
#ax[0].set_ylabel(r'\mathbf{$\log_{10} \Phi\ [\mathrm{Mpc}^{-3}\: \mathrm{dex}^{-1}]}$',
ax[0].set_ylabel(r'$\mathbf{log_{10} \: \Phi\ [Mpc^{-3}\: dex^{-1}]}$',
fontsize = PlotScripts.global_labelsize - delta_fontsize)
Obs.Get_Data_SMF()
PlotScripts.Plot_SMF_z6(ax[0], errorwidth=ewidth, capsize=caps)
PlotScripts.Plot_SMF_z7(ax[1], errorwidth=ewidth, capsize=caps)
PlotScripts.Plot_SMF_z8(ax[2], errorwidth=ewidth, capsize=caps)
####
ax[0].text(0.7, 0.9, r"$\mathbf{z = 6}$", transform = ax[0].transAxes, fontsize = PlotScripts.global_fontsize - delta_fontsize)
ax[1].text(0.7, 0.9, r"$\mathbf{z = 7}$", transform = ax[1].transAxes, fontsize = PlotScripts.global_fontsize - delta_fontsize)
ax[2].text(0.7, 0.9, r"$\mathbf{z = 8}$", transform = ax[2].transAxes, fontsize = PlotScripts.global_fontsize - delta_fontsize)
#leg = ax[0,0].legend(loc=2, bbox_to_anchor = (0.2, -0.5), numpoints=1, labelspacing=0.1)
leg = ax[0].legend(loc='lower left', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize - 2)
plt.tight_layout()
outputFile = "{0}_paper{1}".format(output_tag, output_format)
plt.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close()
##
def plot_fesc_galaxy(SnapList, PlotSnapList, simulation_norm,
mean_galaxy_fesc, std_galaxy_fesc, N_galaxy_fesc,
mean_halo_fesc, std_halo_fesc, N_halo_fesc,
ResolutionLimit_mean, model_tags, paper_plots,
mass_global, fesc_global, Ngamma_global, output_tag):
"""
Plots the escape fraction as a function of stellar/halo mass.
Parallel compatible.
Accepts 3D arrays of the escape fraction binned into Stellar Mass bins to plot the escape fraction for multiple models.
Mass units are log(Msun)
Parameters
---------
SnapList : Nested array, SnapList[model_number0] = [snapshot0_model0, ..., snapshotN_model0], with length equal to the number of models.
Snapshots for each model.
simulation_norm : array with length equal to the number of models.
Denotes which simulation each model uses.
0 : MySim
1 : Mini-Millennium
2 : Tiamat (down to z = 5)
3 : Extended Tiamat (down to z = 1.6ish).
4 : Britton's Simulation
5 : Kali
mean_galaxy_fesc, std_galaxy_fesc, N_galaxy_fesc : Nested 3-dimensional array, mean_galaxy_fesc[model_number0][snapshot0] = [bin0_meanfesc, ..., binN_meanfesc], with length equal to the number of models.
Mean/Standard deviation for fesc in each stellar mass bin, for each [model_number] and [snapshot_number]. N_galaxy_fesc is the number of galaxies placed into each mass bin.
mean_halo_fesc, std_halo_fesc, N_halo_fesc Nested 3-dimensional array, mean_halo_fesc[model_number0][snapshot0] = [bin0_meanfesc, ..., binN_meanfesc], with length equal to the number of models.
Identical to previous except using the halo virial mass for the binning rather than stellar mass.
ResolutionLimit_mean : array of floats with the same shape as mean_galaxy_fesc.
This is the mean stellar mass for a halo with len (number of N-body simulation particles) between 'stellar_mass_halolen_lower' and 'stellar_mass_halolen_upper'.
model_tags : array of strings with length equal to the number of models.
Strings that contain the tag for each model. Will be placed on the plot.
paper_plots: Integer.
Flag to denote whether we should plot a full, 4 panel plot for the
RSAGE paper.
output_tag : string
Name of the file that will be generated.
Returns
-------
No returns.
Generates and saves the plot (named via output_tag).
Units
-----
Mass units are log(Msun).
"""
print("Plotting fesc as a function of stellar mass.")
## Array initialization ##
master_mean_fesc_stellar, master_std_fesc_stellar, master_N_fesc_stellar, master_bin_middle_stellar = \
collect_across_tasks(mean_galaxy_fesc, std_galaxy_fesc, N_galaxy_fesc,
SnapList, PlotSnapList, True, m_gal_low, m_gal_high)
if rank == 0:
if paper_plots == 0:
fig = plt.figure()
ax1 = fig.add_subplot(111)
else:
fig, ax = plt.subplots(nrows=2, ncols=2, sharex='col', sharey='row', figsize=(16, 6))
fig2, ax2 = plt.subplots(nrows=2, ncols=2, sharex='col', sharey='row', figsize=(16, 6))
delta_fontsize = 0
caps = 5
ewidth = 1.5
count_x = 0
for count, model_number in enumerate(range(0, len(SnapList))):
if count == 2:
count_x += 1
print("There were a total of {0} galaxies over the entire redshift range.".format(sum(N_halo_fesc[model_number])))
## Normalization for each model. ##
if (simulation_norm[model_number] == 0):
AllVars.Set_Params_Mysim()
elif (simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif (simulation_norm[model_number] == 2):
AllVars.Set_Params_Tiamat()
elif (simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif (simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
plot_count = 0
for snapshot_idx in range(0, len(SnapList[model_number])):
if (SnapList[model_number][snapshot_idx] == PlotSnapList[model_number][plot_count]):
if (model_number == 0):
label = r"$\mathbf{z = " + \
str(int(round(AllVars.SnapZ[SnapList[model_number][snapshot_idx]]))) +\
"}$"
else:
label = ""
## Plots as a function of stellar mass ##
w = np.where((master_N_fesc_stellar[model_number][snapshot_idx] < 4))[0] # If there are no galaxies in the bin we don't want to plot.
master_mean_fesc_stellar[model_number][snapshot_idx][w] = np.nan
if paper_plots == 0:
print(master_mean_fesc_stellar[model_number][snapshot_idx])
ax1.plot(master_bin_middle_stellar[model_number][snapshot_idx],
master_mean_fesc_stellar[model_number][snapshot_idx],
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[model_number],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
else:
ax[count_x, count%2].plot(master_bin_middle_stellar[model_number][snapshot_idx],
master_mean_fesc_stellar[model_number][snapshot_idx],
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[0],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
#w = np.random.randint(0,
# len(mass_global[model_number][snapshot_idx][0]),
# size=500)
#sc = ax2[count_x, count%2].scatter(mass_global[model_number][snapshot_idx][0][w],
# fesc_global[model_number][snapshot_idx][0][w],
# c=np.log10(Ngamma_global[model_number][snapshot_idx][0][w]*1.0e50),
# alpha = 0.5,cmap='plasma')
#plt.colorbar(sc)
#ax2[count_x, count%2].hexbin(mass_global[model_number][snapshot_idx],
# fesc_global[model_number][snapshot_idx],
# C=Ngamma_global[model_number][snapshot_idx])
plot_count += 1
if (plot_count == len(PlotSnapList[model_number])):
break
## Stellar Mass plots ##
if paper_plots == 0:
adjust_stellarmass_plot(ax1)
else:
adjust_paper_plots(ax, model_tags)
leg = ax[0,0].legend(loc="upper right", numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
plt.tight_layout()
plt.subplots_adjust(wspace = 0.0, hspace = 0.0)
#leg = ax2[0,0].legend(loc="upper right", numpoints=1, labelspacing=0.1)
#leg.draw_frame(False) # Don't want a box frame
#for t in leg.get_texts(): # Reduce the size of the text
# t.set_fontsize('medium')
plt.tight_layout()
plt.subplots_adjust(wspace = 0.0, hspace = 0.0)
## Output ##
outputFile = './%s%s' %(output_tag, output_format)
fig.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close(fig)
if paper_plots == 1:
outputFile = './%s_scatter%s' %(output_tag, output_format)
fig2.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close(fig2)
##
def plot_reionmod_galaxy(SnapList, PlotSnapList, simulation_norm,
mean_galaxy_reionmod, std_galaxy_reionmod, N_galaxy_reionmod,
mean_galaxy_reionmod_gnedin, std_galaxy_reionmod_gnedin,
model_tags, paper_plots, output_tag):
"""
"""
print("Reionization Modifier as a function of stellar mass.")
## Array initialization ##
master_mean_reionmod_stellar, master_std_reionmod_stellar, master_N_reionmod_stellar, master_bin_middle_stellar = \
collect_across_tasks(mean_galaxy_reionmod, std_galaxy_reionmod, N_galaxy_reionmod,
SnapList, PlotSnapList, True, m_gal_low, m_gal_high)
master_mean_reionmod_gnedin_stellar, master_std_reionmod_gnedin_stellar, master_N_reionmod_gnedin_stellar, master_bin_middle_stellar = \
collect_across_tasks(mean_galaxy_reionmod_gnedin, std_galaxy_reionmod_gnedin, N_galaxy_reionmod,
SnapList, PlotSnapList, True, m_gal_low, m_gal_high)
if rank == 0:
if paper_plots == 0:
fig = plt.figure()
ax1 = fig.add_subplot(111)
else:
fig, ax = plt.subplots(nrows=2, ncols=2, sharex='col', sharey='row', figsize=(16, 6))
fig2, ax2 = plt.subplots(nrows=2, ncols=2, sharex='col', sharey='row', figsize=(16, 6))
delta_fontsize = 0
caps = 5
ewidth = 1.5
count_x = 0
for count, model_number in enumerate(range(0, len(SnapList))):
if count == 2:
count_x += 1
plot_count = 0
for snapshot_idx in range(0, len(SnapList[model_number])):
if (SnapList[model_number][snapshot_idx] == PlotSnapList[model_number][plot_count]):
if (model_number == 0):
label = r"$\mathbf{z = " + \
str(int(round(AllVars.SnapZ[SnapList[model_number][snapshot_idx]]))) +\
"}$"
else:
label = ""
## Plots as a function of stellar mass ##
w = np.where((master_N_reionmod_stellar[model_number][snapshot_idx] < 4))[0] # If there are no galaxies in the bin we don't want to plot.
master_mean_reionmod_stellar[model_number][snapshot_idx][w] = np.nan
master_mean_reionmod_gnedin_stellar[model_number][snapshot_idx][w] = np.nan
if paper_plots == 0:
ax1.plot(master_bin_middle_stellar[model_number][snapshot_idx],
master_mean_reionmod_stellar[model_number][snapshot_idx],
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[model_number],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
else:
ax[count_x, count%2].plot(master_bin_middle_stellar[model_number][snapshot_idx],
master_mean_reionmod_stellar[model_number][snapshot_idx],
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[0],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
ax[count_x, count%2].plot(master_bin_middle_stellar[model_number][snapshot_idx],
master_mean_reionmod_gnedin_stellar[model_number][snapshot_idx],
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[1],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
plot_count += 1
if (plot_count == len(PlotSnapList[model_number])):
break
z_labels = []
for model_number in range(0, len(SnapList)):
count_x = 0
plot_count = 0
for count, snapshot_idx in enumerate(range(len(SnapList[model_number]))):
if count == 2:
count_x += 1
if (SnapList[model_number][snapshot_idx] == PlotSnapList[model_number][plot_count]):
label = model_tags[model_number]
if (model_number == 0):
z_label = r"$\mathbf{z = " + \
str(int(round(AllVars.SnapZ[SnapList[model_number][snapshot_idx]]))) +\
"}$"
z_labels.append(z_label)
## Plots as a function of stellar mass ##
w = np.where((master_N_reionmod_stellar[model_number][snapshot_idx] < 4))[0] # If there are no galaxies in the bin we don't want to plot.
master_mean_reionmod_stellar[model_number][snapshot_idx][w] = np.nan
master_mean_reionmod_gnedin_stellar[model_number][snapshot_idx][w] = np.nan
if (model_number == 0):
print(master_mean_reionmod_stellar[model_number][snapshot_idx])
ax2[count_x, count%2].plot(master_bin_middle_stellar[model_number][snapshot_idx],
master_mean_reionmod_stellar[model_number][snapshot_idx],
color = PlotScripts.colors[model_number],
ls = PlotScripts.linestyles[model_number],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
if (model_number == 0):
ax2[count_x, count%2].plot(master_bin_middle_stellar[model_number][snapshot_idx],
master_mean_reionmod_gnedin_stellar[model_number][snapshot_idx],
color = 'k',
ls = '--',
rasterized = True, label = "Gnedin",
lw = PlotScripts.global_linewidth)
plot_count += 1
if (plot_count == len(PlotSnapList[model_number])):
break
## Stellar Mass plots ##
if paper_plots == 0:
adjust_stellarmass_plot(ax1)
else:
adjust_paper_plots(ax, model_tags)
print(z_labels)
adjust_redshift_panels(ax2, z_labels)
leg = ax[0,0].legend(loc="upper right", numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
leg = ax2[0,0].legend(loc="upper right", numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
plt.tight_layout()
plt.subplots_adjust(wspace = 0.0, hspace = 0.0)
#leg = ax2[0,0].legend(loc="upper right", numpoints=1, labelspacing=0.1)
#leg.draw_frame(False) # Don't want a box frame
#for t in leg.get_texts(): # Reduce the size of the text
# t.set_fontsize('medium')
plt.tight_layout()
plt.subplots_adjust(wspace = 0.0, hspace = 0.0)
## Output ##
outputFile = "{0}{1}".format(output_tag, output_format)
fig.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close(fig)
outputFile2 = "{0}_redshiftpanels{1}".format(output_tag, output_format)
fig2.savefig(outputFile2, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile2))
plt.close(fig2)
##
def plot_nion_galaxy(SnapList, PlotSnapList, simulation_norm,
mean_Ngamma_galaxy, std_Ngamma_galaxy, N_Ngamma_galaxy,
model_tags, paper_plots, output_tag):
"""
Plots the number of ionizing photons emitted (not necessarily escaped) as a
function of galaxy stellar mass.
Parallel compatible.
Accepts 3D arrays of the escape fraction binned into Stellar Mass bins to plot the escape fraction for multiple models.
Mass units are log(Msun)
Parameters
---------
SnapList : Nested array, SnapList[model_number0] = [snapshot0_model0, ..., snapshotN_model0], with length equal to the number of models.
Snapshots for each model.
simulation_norm : array with length equal to the number of models.
Denotes which simulation each model uses.
0 : MySim
1 : Mini-Millennium
2 : Tiamat (down to z = 5)
3 : Extended Tiamat (down to z = 1.6ish).
4 : Britton's Simulation
5 : Kali
mean_galaxy_Ngamma, std_galaxy_Ngamma, N_galaxy_Ngamma : Nested
3-dimensional array, mean_galaxy_Ngamma[model_number0][snapshot0] = [bin0_meanNgamma, ..., binN_meanNgamma], with length equal to the number of models.
Mean/Standard deviation for Ngamma in each stellar mass bin, for each
[model_number] and [snapshot_number]. N_galaxy_Ngamma is the number
of galaxies placed into each mass bin.
model_tags : array of strings with length equal to the number of models.
Strings that contain the tag for each model. Will be placed on the plot.
paper_plots: Integer.
Flag to denote whether we should plot a full, 4 panel plot for the
RSAGE paper.
output_tag : string
Name of the file that will be generated.
Returns
-------
No returns.
Generates and saves the plot (named via output_tag).
Units
-----
Mass units are log(Msun).
Ngamma units are 1.0e50 photons/s.
"""
print("Plotting Ngamma*fesc as a function of stellar mass.")
## Array initialization ##
master_mean_Ngamma_stellar, master_std_Ngamma_stellar, master_N_Ngamma_stellar, master_bin_middle_stellar = \
collect_across_tasks(mean_Ngamma_galaxy, std_Ngamma_galaxy, N_Ngamma_galaxy,
SnapList, PlotSnapList, True, m_gal_low, m_gal_high)
if rank == 0:
if paper_plots == 0:
fig = plt.figure()
ax1 = fig.add_subplot(111)
else:
fig, ax = plt.subplots(nrows=2, ncols=2, sharex='col', sharey='row', figsize=(16, 6))
delta_fontsize = 0
caps = 5
ewidth = 1.5
z_tags = np.zeros_like(model_tags, dtype=np.float32)
for model_number in range(0, len(SnapList)):
count_x = 0
## Normalization for each model. ##
if (simulation_norm[model_number] == 0):
AllVars.Set_Params_Mysim()
elif (simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif (simulation_norm[model_number] == 2):
AllVars.Set_Params_Tiamat()
elif (simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif (simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
plot_count = 0
for count, snapshot_idx in enumerate(range(0, len(SnapList[model_number]))):
if (SnapList[model_number][snapshot_idx] == PlotSnapList[model_number][plot_count]):
if count == 2:
count_x += 1
label = model_tags[model_number]
z_tags[count] = float(AllVars.SnapZ[SnapList[model_number][snapshot_idx]])
## Plots as a function of stellar mass ##
w = np.where((master_N_Ngamma_stellar[model_number][snapshot_idx] < 4))[0] # If there are no galaxies in the bin we don't want to plot.
master_mean_Ngamma_stellar[model_number][snapshot_idx][w] = np.nan
if paper_plots == 0:
ax1.plot(master_bin_middle_stellar[model_number][snapshot_idx],
np.log10(master_mean_Ngamma_stellar[model_number][snapshot_idx]*1.0e50),
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[model_number],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
else:
ax[count_x, count%2].plot(master_bin_middle_stellar[model_number][snapshot_idx],
np.log10(master_mean_Ngamma_stellar[model_number][snapshot_idx]*1.0e50),
color = PlotScripts.colors[model_number],
ls = PlotScripts.linestyles[model_number],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
plot_count += 1
if (plot_count == len(PlotSnapList[model_number])):
break
## Stellar Mass plots ##
if paper_plots == 0:
adjust_stellarmass_plot(ax1)
else:
adjust_paper_plots(ax, z_tags)
leg = ax[0,0].legend(loc="upper left", numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
plt.tight_layout()
plt.subplots_adjust(wspace = 0.0, hspace = 0.0)
## Output ##
outputFile = './%s%s' %(output_tag, output_format)
fig.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close(fig)
##
def plot_photo_galaxy(SnapList, PlotSnapList, simulation_norm,
mean_photo_galaxy, std_photo_galaxy, N_photo_galaxy,
model_tags, paper_plots, output_tag):
"""
Plots the photoionization rate as a function of galaxy stellar mass.
Parallel compatible.
Accepts 3D arrays of the escape fraction binned into Stellar Mass bins to plot the escape fraction for multiple models.
Mass units are log(Msun)
Parameters
---------
SnapList : Nested array, SnapList[model_number0] = [snapshot0_model0, ..., snapshotN_model0], with length equal to the number of models.
Snapshots for each model.
simulation_norm : array with length equal to the number of models.
Denotes which simulation each model uses.
0 : MySim
1 : Mini-Millennium
2 : Tiamat (down to z = 5)
3 : Extended Tiamat (down to z = 1.6ish).
4 : Britton's Simulation
5 : Kali
mean_photo_galaxy, std_photo_galaxy, N_photo_galaxy : Nested
3-dimensional array, mean_photo_galaxy[model_number0][snapshot0] =
[bin0_meanphoto, ..., binN_meanphoto], with length equal to the number of models.
Mean/Standard deviation for Photionization Rate in each stellar mass
bin, for each [model_number] and [snapshot_number]. N_photo_galaxy is
the number of galaxies placed into each mass bin.
model_tags : array of strings with length equal to the number of models.
Strings that contain the tag for each model. Will be placed on the plot.
paper_plots: Integer.
Flag to denote whether we should plot a full, 4 panel plot for the
RSAGE paper.
output_tag : string
Name of the file that will be generated.
Returns
-------
No returns.
Generates and saves the plot (named via output_tag).
Units
-----
Mass units are log(Msun).
Ngamma units are 1.0e50 photons/s.
"""
print("Plotting photoionization rate as a function of stellar mass.")
## Array initialization ##
master_mean_photo_stellar, master_std_photo_stellar, master_N_photo_stellar, master_bin_middle_stellar = \
collect_across_tasks(mean_photo_galaxy, std_photo_galaxy, N_photo_galaxy,
SnapList, PlotSnapList, True, m_gal_low, m_gal_high)
if rank == 0:
if paper_plots == 0:
fig = plt.figure()
ax1 = fig.add_subplot(111)
else:
pass
for model_number in range(0, len(SnapList)):
count_x = 0
## Normalization for each model. ##
if (simulation_norm[model_number] == 0):
AllVars.Set_Params_Mysim()
elif (simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif (simulation_norm[model_number] == 2):
AllVars.Set_Params_Tiamat()
elif (simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif (simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
plot_count = 0
for count, snapshot_idx in enumerate(range(0, len(SnapList[model_number]))):
if (SnapList[model_number][snapshot_idx] == PlotSnapList[model_number][plot_count]):
if (model_number == 0):
label = r"$\mathbf{z = " + \
str(int(round(AllVars.SnapZ[SnapList[model_number][snapshot_idx]]))) +\
"}$"
else:
label = ""
## Plots as a function of stellar mass ##
w = np.where((master_N_photo_stellar[model_number][snapshot_idx] < 4))[0] # If there are no galaxies in the bin we don't want to plot.
master_mean_photo_stellar[model_number][snapshot_idx][w] = np.nan
if paper_plots == 0:
ax1.plot(master_bin_middle_stellar[model_number][snapshot_idx],
np.log10(master_mean_photo_stellar[model_number][snapshot_idx]),
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[model_number],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
else:
pass
plot_count += 1
if (plot_count == len(PlotSnapList[model_number])):
break
for model_number in range(0, len(SnapList)):
ax1.plot(np.nan, np.nan, color = 'k',
label = model_tags[model_number],
lw = PlotScripts.global_linewidth,
ls = PlotScripts.linestyles[model_number])
## Stellar Mass plots ##
if paper_plots == 0:
adjust_stellarmass_plot(ax1)
else:
pass
## Output ##
outputFile = './%s%s' %(output_tag, output_format)
fig.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close(fig)
##
##
def plot_sfr_galaxy(SnapList, PlotSnapList, simulation_norm,
mean_galaxy_sfr, std_galaxy_sfr,
mean_galaxy_ssfr, std_galaxy_ssfr,
N_galaxy, model_tags, output_tag):
"""
Plots the specific star formation rate (sSFR) as a function of stellar mass.
Parallel compatible.
Accepts 3D arrays of the sSFR binned into Stellar Mass bins.
Mass units log(Msun).
Parameters
---------
SnapList : Nested array, SnapList[model_number0] = [snapshot0_model0, ..., snapshotN_model0], with length equal to the number of models.
Snapshots for each model.
simulation_norm : array with length equal to the number of models.
Denotes which simulation each model uses.
0 : MySim
1 : Mini-Millennium
2 : Tiamat (down to z = 5)
3 : Extended Tiamat (down to z = 1.6ish).
4 : Britton's Simulation
5 : Kali
mean_galaxy_ssfr, std_galaxy_ssfr, N_galaxy_ssfr : Nested 3-dimensional array,
mean_galaxy_sfr[model_number0][snapshot0] = [bin0_meanssfr, ..., binN_meanssfr],
with length equal to the number of models.
Mean/Standard deviation for sSFR in each stellar mass bin, for each [model_number] and [snapshot_number].
N_galaxy_fesc is the number of galaxies placed into each mass bin.
model_tags : array of strings with length equal to the number of models.
Strings that contain the tag for each model. Will be placed on the plot.
output_tag : string
Name of the file that will be generated.
Returns
-------
No returns.
Generates and saves the plot (named via output_tag).
Units
-----
Mass units are 1e10 Msun (no h).
"""
print("Plotting sSFR as a function of stellar mass.")
## Array initialization ##
master_mean_sfr_stellar, master_std_sfr_stellar, master_N_sfr_stellar, master_bin_middle_stellar = \
collect_across_tasks(mean_galaxy_sfr, std_galaxy_sfr, N_galaxy,
SnapList, PlotSnapList, True, m_gal_low, m_gal_high)
master_mean_ssfr_stellar, master_std_ssfr_stellar, master_N_ssfr_stellar, master_bin_middle_stellar = \
collect_across_tasks(mean_galaxy_ssfr, std_galaxy_ssfr, N_galaxy,
SnapList, PlotSnapList, True, m_gal_low, m_gal_high)
if rank == 0:
fig = plt.figure()
ax1 = fig.add_subplot(111)
fig2 = plt.figure()
ax2 = fig2.add_subplot(111)
for model_number in range(0, len(SnapList)):
## Normalization for each model. ##
if (simulation_norm[model_number] == 0):
AllVars.Set_Params_Mysim()
elif (simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif (simulation_norm[model_number] == 2):
AllVars.Set_Params_Tiamat()
elif (simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif (simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
plot_count = 0
for snapshot_idx in range(0, len(SnapList[model_number])):
if (SnapList[model_number][snapshot_idx] == PlotSnapList[model_number][plot_count]):
if (model_number == 0):
label = r"$\mathbf{z = " + \
str(int(round(AllVars.SnapZ[SnapList[model_number][snapshot_idx]]))) +\
"}$"
else:
label = ""
## Plots as a function of stellar mass ##
ax1.plot(master_bin_middle_stellar[model_number][snapshot_idx],
master_mean_sfr_stellar[model_number][snapshot_idx],
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[model_number],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
ax2.plot(master_bin_middle_stellar[model_number][snapshot_idx],
master_mean_ssfr_stellar[model_number][snapshot_idx],
color = PlotScripts.colors[plot_count],
ls = PlotScripts.linestyles[model_number],
rasterized = True, label = label,
lw = PlotScripts.global_linewidth)
plot_count += 1
if (plot_count == len(PlotSnapList[model_number])):
break
#for model_number in range(0, len(SnapList)): # Just plot some garbage to get the legend labels correct.
#ax1.plot(np.nan, np.nan, color = 'k', linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = model_tags[model_number], linewidth = PlotScripts.global_linewidth)
#ax3.plot(np.nan, np.nan, color = 'k', linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = model_tags[model_number], linewidth = PlotScripts.global_linewidth)
## Stellar Mass plots ##
adjust_sfr_plot(ax1)
adjust_ssfr_plot(ax2)
## Output ##
outputFile = "./{0}SFR{1}".format(output_tag, output_format)
fig.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
outputFile = "./{0}sSFR{1}".format(output_tag, output_format)
fig2.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close(fig)
##
##
def plot_ejectedfraction(SnapList, PlotSnapList, simulation_norm, mean_mvir_ejected,
std_mvir_ejected, N_ejected, mean_ejected_z,
std_ejected_z, N_z, model_tags, output_tag):
'''
Plots the ejected fraction as a function of the halo mass.
Parallel compatible.
Accepts a 3D array of the ejected fraction so we can plot for multiple models and redshifts.
Parameters
---------
SnapList : Nested array, SnapList[model_number0] = [snapshot0_model0, ..., snapshotN_model0], with length equal to the number of models.
Snapshots for each model.
mean_mvir_ejected, std_mvir_ejected, N_ejected : Nested 3-dimensional array, mean_mvir_ejected[model_number0][snapshot0] = [bin0_meanejected, ..., binN_meanejected], with length equal to the number of models.
Mean/Standard deviation for the escape fraction binned into Halo Mass bins. N_ejected is the number of data points in each bin. Bounds are given by 'm_low' and 'm_high' in bins given by 'bin_width'.
model_tags : array of strings with length equal to the number of models.
Strings that contain the tag for each model. Will be placed on the plot.
output_tag : string
Name of the file that will be generated.
Returns
-------
No returns.
Generates and saves the plot (named via output_tag).
Units
-----
Halo Mass is in units of log10(Msun).
'''
print("Plotting the Ejected Fraction as a function of halo mass.")
master_mean_ejected_halo, master_std_ejected_halo, master_N_ejected_halo, master_bin_middle_halo = \
collect_across_tasks(mean_mvir_ejected, std_mvir_ejected, N_ejected, SnapList,
PlotSnapList, True, m_low, m_high)
master_mean_ejected_z, master_std_ejected_z, master_N_ejected_z, _ = \
collect_across_tasks(mean_ejected_z, std_ejected_z, N_z, SnapList)
if rank == 0:
fig1 = plt.figure()
ax1 = fig1.add_subplot(111)
fig2 = plt.figure()
ax2 = fig2.add_subplot(111)
for model_number in range(0, len(SnapList)):
if(simulation_norm[model_number] == 1):
cosmo = AllVars.Set_Params_MiniMill()
elif(simulation_norm[model_number] == 3):
cosmo = AllVars.Set_Params_Tiamat_extended()
elif(simulation_norm[model_number] == 4):
cosmo = AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
cosmo = AllVars.Set_Params_Kali()
for snapshot_idx in range(0, len(PlotSnapList[model_number])):
label = AllVars.SnapZ[PlotSnapList[model_number][snapshot_idx]]
ax1.plot(master_bin_middle_halo[model_number][snapshot_idx],
master_mean_ejected_halo[model_number][snapshot_idx],
color = PlotScripts.colors[snapshot_idx],
linestyle = PlotScripts.linestyles[model_number],
label = label, lw = PlotScripts.global_linewidth)
ax2.plot((AllVars.t_BigBang - AllVars.Lookback_Time[SnapList[model_number]]) * 1.0e3,
master_mean_ejected_z[model_number],
color = PlotScripts.colors[model_number],
label = model_tags[model_number],
ls = PlotScripts.linestyles[model_number],
lw = PlotScripts.global_linewidth)
for model_number in range(0, len(SnapList)): # Just plot some garbage to get the legend labels correct.
ax1.plot(np.nan, np.nan, color = 'k', linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = model_tags[model_number], linewidth = PlotScripts.global_linewidth)
ax1.set_xlabel(r'$\log_{10}\ M_{\mathrm{vir}}\ [M_{\odot}]$', size = PlotScripts.global_fontsize)
ax1.set_ylabel(r'$\mathrm{Ejected \: Fraction}$', size = PlotScripts.global_fontsize)
ax1.set_xlim([8.0, 12])
ax1.set_ylim([-0.05, 1.0])
ax1.xaxis.set_minor_locator(mtick.MultipleLocator(0.1))
ax1.yaxis.set_minor_locator(mtick.MultipleLocator(0.025))
leg = ax1.legend(loc=1, numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
outputFile = "./{0}{1}".format(output_tag, output_format)
fig1.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close(fig1)
ax2.set_xlabel(r"$\mathbf{Time \: since \: Big \: Bang \: [Myr]}$", fontsize = PlotScripts.global_labelsize)
tick_locs = np.arange(200.0, 1000.0, 100.0)
tick_labels = [r"$\mathbf{%d}$" % x for x in tick_locs]
ax2.xaxis.set_major_locator(mtick.MultipleLocator(100))
ax2.set_xticklabels(tick_labels, fontsize = PlotScripts.global_fontsize)
ax2.set_xlim(PlotScripts.time_xlim)
ax2.set_ylabel(r'$\mathbf{Mean f_{ej}}$', fontsize = PlotScripts.global_labelsize)
ax3 = ax2.twiny()
t_plot = (AllVars.t_BigBang - cosmo.lookback_time(PlotScripts.z_plot).value) * 1.0e3 # Corresponding Time values on the bottom.
z_labels = ["$\mathbf{%d}$" % x for x in PlotScripts.z_plot] # Properly Latex-ize the labels.
ax3.set_xlabel(r"$\mathbf{z}$", fontsize = PlotScripts.global_labelsize)
ax3.set_xlim(PlotScripts.time_xlim)
ax3.set_xticks(t_plot) # Set the ticks according to the time values on the bottom,
ax3.set_xticklabels(z_labels, fontsize = PlotScripts.global_fontsize) # But label them as redshifts.
leg = ax2.legend(loc='lower right', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
outputFile2 = "./{0}_z{1}".format(output_tag, output_format)
fig2.savefig(outputFile2, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile2))
plt.close(fig2)
##
##
def plot_mvir_Ngamma(SnapList, mean_mvir_Ngamma, std_mvir_Ngamma, N_Ngamma, model_tags, output_tag,fesc_prescription=None, fesc_normalization=None, fitpath=None):
'''
Plots the number of ionizing photons (pure ngamma times fesc) as a function of halo mass.
Parallel compatible.
The input data has been binned as a function of halo virial mass (Mvir), with the bins defined at the top of the file (m_low, m_high, bin_width).
Accepts 3D arrays to plot ngamma for multiple models.
Parameters
----------
SnapList : Nested array, SnapList[model_number0] = [snapshot0_model0, ..., snapshotN_model0], with length equal to the number of models.
Snapshots for each model.
mean_mvir_Ngamma, std_mvir_Ngamma, N_Ngamma : Nested 2-dimensional array, mean_mvir_Ngamma[model_number0][snapshot0] = [bin0_meanNgamma, ..., binN_meanNgamma], with length equal to the number of bins.
Mean/Standard deviation/number of data points in each halo mass (Mvir) bin.
The number of photons is in units of 1.0e50 s^-1.
model_tags : array of strings with length equal to the number of models.
Strings that contain the tag for each model. Will be placed on the plot.
output_tag : string
Name of the file that will be generated.
fesc_prescription : int (optional)
If this parameter is defined, we will save the Mvir-Ngamma results in a text file (not needed if not saving).
Number that controls what escape fraction prescription was used to generate the escape fractions.
0 : Constant, fesc = Constant.
1 : Scaling with Halo Mass, fesc = A*Mh^B.
2 : Scaling with ejected fraction, fesc = fej*A + B.
fesc_normalization : float (if fesc_prescription == 0) or `numpy.darray' with length 2 (if fesc_prescription == 1 or == 2) (optional).
If this parameter is defined, we will save the Mvir-Ngamma results in a text file (not needed if not saving).
Parameter not needed if you're not saving the Mvir-Ngamma results.
If fesc_prescription == 0, gives the constant value for the escape fraction.
If fesc_prescription == 1 or == 2, gives A and B with the form [A, B].
fitpath : string (optional)
If this parameter is defined, we will save the Mvir-Ngamma results in a text file (not needed if not saving).
Defines the base path for where we are saving the results.
Returns
-------
No returns.
Generates and saves the plot (named via output_tag).
Units
-----
Ngamma is in units of 1.0e50 s^-1.
'''
print("Plotting ngamma*fesc against the halo mass")
## Array initialization. ##
title = []
redshift_labels = []
mean_ngammafesc_array = []
std_ngammafesc_array = []
mean_halomass_array = []
std_halomass_array = []
bin_middle_array = []
for model_number in range(0, len(SnapList)):
redshift_labels.append([])
mean_ngammafesc_array.append([])
std_ngammafesc_array.append([])
mean_halomass_array.append([])
std_halomass_array.append([])
bin_middle_array.append([])
for model_number in range(0, len(SnapList)):
for snapshot_idx in range(0, len(SnapList[model_number])):
print("Doing Snapshot {0}".format(SnapList[model_number][snapshot_idx]))
tmp = 'z = %.2f' %(AllVars.SnapZ[SnapList[model_number][snapshot_idx]])
redshift_labels[model_number].append(tmp)
N = N_Ngamma[model_number][snapshot_idx]
mean_ngammafesc_array[model_number], std_ngammafesc_array[model_number] = calculate_pooled_stats(mean_ngammafesc_array[model_number], std_ngammafesc_array[model_number], mean_mvir_Ngamma[model_number][snapshot_idx], std_mvir_Ngamma[model_number][snapshot_idx], N) # Collate the values from all processors.
bin_middle_array[model_number].append(np.arange(m_low, m_high+bin_width, bin_width)[:-1] + bin_width * 0.5)
if rank == 0:
f = plt.figure()
ax1 = plt.subplot(111)
for model_number in range(0, len(SnapList)):
count = 0
for snapshot_idx in range(0, len(SnapList[model_number])):
if model_number == 0:
title = redshift_labels[model_number][snapshot_idx]
else:
title = ''
mean = np.zeros((len(mean_ngammafesc_array[model_number][snapshot_idx])), dtype = np.float32)
std = np.zeros((len(mean_ngammafesc_array[model_number][snapshot_idx])), dtype=np.float32)
for i in range(0, len(mean)):
if(mean_ngammafesc_array[model_number][snapshot_idx][i] < 1e-10):
mean[i] = np.nan
std[i] = np.nan
else:
mean[i] = np.log10(mean_ngammafesc_array[model_number][snapshot_idx][i] * 1.0e50) # Remember that the input data is in units of 1.0e50 s^-1.
std[i] = 0.434 * std_ngammafesc_array[model_number][snapshot_idx][i] / mean_ngammafesc_array[model_number][snapshot_idx][i] # We're plotting in log space so the standard deviation is 0.434*log10(std)/log10(mean).
bin_middle = bin_middle_array[model_number][snapshot_idx]
if (count < 4): # Only plot at most 5 lines.
ax1.plot(bin_middle, mean, color = PlotScripts.colors[snapshot_idx], linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = title, linewidth = PlotScripts.global_linewidth)
count += 1
## In this block we save the Mvir-Ngamma results to a file. ##
if (fesc_prescription == None or fesc_normalization == None or fitpath == None):
raise ValueError("You've specified you want to save the Mvir-Ngamma results but haven't provided an escape fraction prescription, normalization and base path name")
# Note: All the checks that escape fraction normalization was written correctly were performed in 'calculate_fesc()', hence it will be correct by this point and we don't need to double check.
if (fesc_prescription[model_number] == 0): # Slightly different naming scheme for the constant case (it only has a float for fesc_normalization).
fname = "%s/fesc%d_%.3f_z%.3f.txt" %(fitpath, fesc_prescription[model_number], fesc_normalization[model_number], AllVars.SnapZ[SnapList[model_number][snapshot_idx]])
elif (fesc_prescription[model_number] == 1 or fesc_prescription[model_number] == 2):
fname = "%s/fesc%d_A%.3eB%.3f_z%.3f.txt" %(fitpath, fesc_prescription[model_number], fesc_normalization[model_number][0], fesc_normalization[model_number][1], AllVars.SnapZ[SnapList[model_number][snapshot_idx]])
f = open(fname, "w+")
if not os.access(fname, os.W_OK):
print("The filename is {0}".format(fname))
raise ValueError("Can't write to this file.")
for i in range(0, len(bin_middle)):
f.write("%.4f %.4f %.4f %d\n" %(bin_middle[i], mean[i], std[i], N_Ngamma[model_number][snapshot_idx][i]))
f.close()
print("Wrote successfully to file {0}".format(fname))
##
for model_number in range(0, len(SnapList)): # Just plot some garbage to get the legend labels correct.
ax1.plot(np.nan, np.nan, color = 'k', linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = model_tags[model_number], linewidth = PlotScripts.global_linewidth)
ax1.set_xlabel(r'$\log_{10}\ M_{\mathrm{vir}}\ [M_{\odot}]$', size = PlotScripts.global_fontsize)
ax1.set_ylabel(r'$\log_{10}\ \dot{N}_\gamma \: f_\mathrm{esc} \: [\mathrm{s}^{-1}]$', size = PlotScripts.global_fontsize)
ax1.set_xlim([8.5, 12])
ax1.xaxis.set_minor_locator(mtick.MultipleLocator(0.1))
leg = ax1.legend(loc='upper left', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize('medium')
outputFile = './' + output_tag + output_format
plt.savefig(outputFile, bbox_inches='tight') # Save the figure
print('Saved file to'.format(outputFile))
plt.close()
def plot_photoncount(SnapList, sum_nion, simulation_norm, FirstFile, LastFile, NumFiles, model_tags, output_tag):
'''
Plots the ionizing emissivity as a function of redshift.
We normalize the emissivity to Mpc^-3 and this function allows the read-in of only a subset of the volume.
Parallel compatible.
Parameters
---------
SnapList : Nested array, SnapList[model_number0] = [snapshot0_model0, ..., snapshotN_model0], with length equal to the number of models.
Snapshots for each model, defines the x-axis we plot against.
sum_nion : Nested 1-dimensional array, sum_nion[z0, z1, ..., zn], with length equal to the number of redshifts.
Number of escape ionizing photons (i.e., photon rate times the local escape fraction) at each redshift.
In units of 1.0e50 s^-1.
simulation_norm : array of ints with length equal to the number of models.
Denotes which simulation each model uses.
0 : MySim
1 : Mini-Millennium
2 : Tiamat (down to z = 5)
3 : Extended Tiamat (down to z = 1.6ish).
4 : Britton's Simulation
FirstFile, LastFile, NumFile : array of integers with length equal to the number of models.
The file numbers for each model that were read in (defined by the range between [FirstFile, LastFile] inclusive) and the TOTAL number of files for this model (we may only be plotting a subset of the volume).
model_tags : array of strings with length equal to the number of models.
Strings that contain the tag for each model. Will be placed on the plot.
output_tag : string
Name of the file that will be generated.
Returns
-------
No returns.
Generates and saves the plot (named via output_tag).
Units
-----
sum_nion is in units of 1.0e50 s^-1.
'''
print("Plotting the ionizing emissivity.")
sum_array = []
for model_number in range(0, len(SnapList)):
if(simulation_norm[model_number] == 0):
AllVars.Set_Params_Mysim()
if(simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif(simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif(simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
else:
print("Simulation norm was set to {0}.".format(simulation_norm[model_number]))
raise ValueError("This option has been implemented yet. Get your head in the game Jacob!")
sum_array.append([])
for snapshot_idx in range(0, len(SnapList[model_number])):
nion_sum_snapshot = comm.reduce(sum_nion[model_number][snapshot_idx], op = MPI.SUM, root = 0)
if rank == 0:
sum_array[model_number].append(nion_sum_snapshot * 1.0e50 / (pow(AllVars.BoxSize / AllVars.Hubble_h,3) * (float(LastFile[model_number] - FirstFile[model_number] + 1) / float(NumFiles[model_number]))))
if (rank == 0):
ax1 = plt.subplot(111)
for model_number in range(0, len(SnapList)):
if(simulation_norm[model_number] == 0):
cosmo = AllVars.Set_Params_Mysim()
if(simulation_norm[model_number] == 1):
cosmo = AllVars.Set_Params_MiniMill()
elif(simulation_norm[model_number] == 3):
cosmo = AllVars.Set_Params_Tiamat_extended()
elif(simulation_norm[model_number] == 4):
cosmo = AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
cosmo = AllVars.Set_Params_Kali()
else:
print("Simulation norm was set to {0}.".format(simulation_norm[model_number]))
raise ValueError("This option has been implemented yet. Get your head in the game Jacob!")
t = np.empty(len(SnapList[model_number]))
for snapshot_idx in range(0, len(SnapList[model_number])):
t[snapshot_idx] = (AllVars.t_BigBang - cosmo.lookback_time(AllVars.SnapZ[SnapList[model_number][snapshot_idx]]).value) * 1.0e3
t = [t for t, N in zip(t, sum_array[model_number]) if N > 1.0]
sum_array[model_number] = [x for x in sum_array[model_number] if x > 1.0]
print("The total number of ionizing photons for model {0} is {1} s^1 Mpc^-3".format(model_number, sum(sum_array[model_number])))
print(np.log10(sum_array[model_number]))
ax1.plot(t, np.log10(sum_array[model_number]), color = PlotScripts.colors[model_number], linestyle = PlotScripts.linestyles[model_number], label = model_tags[model_number], linewidth = PlotScripts.global_linewidth)
#ax1.fill_between(t, np.subtract(mean,std), np.add(mean,std), color = colors[model_number], alpha = 0.25)
ax1.xaxis.set_minor_locator(mtick.MultipleLocator(PlotScripts.time_tickinterval))
#ax1.yaxis.set_minor_locator(mtick.MultipleLocator(0.025))
ax1.set_xlim(PlotScripts.time_xlim)
ax1.set_ylim([48.5, 51.5])
ax2 = ax1.twiny()
t_plot = (AllVars.t_BigBang - cosmo.lookback_time(PlotScripts.z_plot).value) * 1.0e3 # Corresponding Time values on the bottom.
z_labels = ["$%d$" % x for x in PlotScripts.z_plot] # Properly Latex-ize the labels.
ax2.set_xlabel(r"$z$", size = PlotScripts.global_labelsize)
ax2.set_xlim(PlotScripts.time_xlim)
ax2.set_xticks(t_plot) # Set the ticks according to the time values on the bottom,
ax2.set_xticklabels(z_labels) # But label them as redshifts.
ax1.set_xlabel(r"$\mathrm{Time \: Since \: Big \: Bang \: [Myr]}$", size = PlotScripts.global_fontsize)
ax1.set_ylabel(r'$\sum f_\mathrm{esc}\dot{N}_\gamma \: [\mathrm{s}^{-1}\mathrm{Mpc}^{-3}]$', fontsize = PlotScripts.global_fontsize)
plot_time = 1
bouwens_z = np.arange(6,16) # Redshift range for the observations.
bouwens_t = (AllVars.t_BigBang - cosmo.lookback_time(bouwens_z).value) * 1.0e3 # Corresponding values for what we will plot on the x-axis.
bouwens_1sigma_lower = [50.81, 50.73, 50.60, 50.41, 50.21, 50.00, 49.80, 49.60, 49.39, 49.18] # 68% Confidence Intervals for the ionizing emissitivity from Bouwens 2015.
bouwens_1sigma_upper = [51.04, 50.85, 50.71, 50.62, 50.56, 50.49, 50.43, 50.36, 50.29, 50.23]
bouwens_2sigma_lower = [50.72, 50.69, 50.52, 50.27, 50.01, 49.75, 49.51, 49.24, 48.99, 48.74] # 95% CI.
bouwens_2sigma_upper = [51.11, 50.90, 50.74, 50.69, 50.66, 50.64, 50.61, 50.59, 50.57, 50.55]
if plot_time == 1:
ax1.fill_between(bouwens_t, bouwens_1sigma_lower, bouwens_1sigma_upper, color = 'k', alpha = 0.2)
ax1.fill_between(bouwens_t, bouwens_2sigma_lower, bouwens_2sigma_upper, color = 'k', alpha = 0.4, label = r"$\mathrm{Bouwens \: et \: al. \: (2015)}$")
else:
ax1.fill_between(bouwens_z, bouwens_1sigma_lower, bouwens_1sigma_upper, color = 'k', alpha = 0.2)
ax1.fill_between(bouwens_z, bouwens_2sigma_lower, bouwens_2sigma_upper, color = 'k', alpha = 0.4, label = r"$\mathrm{Bouwens \: et \: al. \: (2015)}$")
# ax1.text(0.075, 0.965, '(a)', horizontalalignment='center', verticalalignment='center', transform = ax.transAxes)
ax1.text(350, 50.0, r"$68\%$", horizontalalignment='center', verticalalignment = 'center', fontsize = PlotScripts.global_labelsize)
ax1.text(350, 50.8, r"$95\%$", horizontalalignment='center', verticalalignment = 'center', fontsize = PlotScripts.global_labelsize)
leg = ax1.legend(loc='lower right', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
plt.tight_layout()
outputFile = './{0}{1}'.format(output_tag, output_format)
plt.savefig(outputFile) # Save the figure
print('Saved file to {0}'.format(outputFile))
plt.close()
##
##
def plot_quasars_count(SnapList, PlotList, N_quasars_z, N_quasars_boost_z, N_gal_z, mean_quasar_activity, std_quasar_activity, N_halo, N_merger_halo, N_gal, N_merger_galaxy, fesc_prescription, simulation_norm, FirstFile, LastFile, NumFile, model_tags, output_tag):
'''
Parameters
---------
SnapList : Nested 'array-like` of ints, SnapList[model_number0] = [snapshot0_model0, ..., snapshotN_model0], with length equal to the number of models.
Snapshots that we plot the quasar density at for each model.
PlotList : Nested array of ints, PlotList[model_number0]= [plotsnapshot0_model0, ..., plotsnapshotN_model0], with length equal to the number of models.
Snapshots that will be plotted for the quasar activity as a function of halo mass.
N_quasars_z : Nested array of floats, N_quasars_z[model_number0] = [N_quasars_z0, N_quasars_z1, ..., N_quasars_zN]. Outer array has length equal to the number of models, inner array has length equal to length of the model's SnapList.
Number of quasars, THAT WENT OFF, during the given redshift.
N_quasars_boost_z : Nested array of floats, N_quasars_boost_z[model_number0] = [N_quasars_boost_z0, N_quasars_boost_z1, ..., N_quasars_boost_zN]. Outer array has length equal to the number of models, inner array has length equal to length of the model's SnapList.
Number of galaxies that had their escape fraction boosted by quasar activity.
N_gal_z : Nested array of floats, N_gal_z[model_number0] = [N_gal_z0, N_gal_z1, ..., N_gal_zN]. Outer array has length equal to the number of models, inner array has length equal to length of the model's SnapList.
Number of galaxies at each redshift.
mean_quasar_activity, std_quasar_activity : Nested 2-dimensional array of floats, mean_quasar_activity[model_number0][snapshot0] = [bin0quasar_activity, ..., binNquasar_activity]. Outer array has length equal to the number of models, inner array has length equal to the length of the model's snaplist and most inner array has length equal to the number of halo bins (NB).
Mean/std fraction of galaxies that had quasar go off during each snapshot as a function of halo mass.
NOTE : This is for quasars going off, not for galaxies that have their escape fraction being boosted.
fesc_prescription : Array with length equal to the number of models.
Denotes what escape fraction prescription each model used. Quasars are only tracked when fesc_prescription == 3.
simulation_norm : array with length equal to the number of models.
Denotes which simulation each model uses.
0 : MySim
1 : Mini-Millennium
2 : Tiamat (down to z = 5)
3 : Extended Tiamat (down to z = 1.6ish).
4 : Britton's Simulation
5 : Kali
FirstFile, LastFile, NumFile : array of integers with length equal to the number of models.
The file numbers for each model that were read in (defined by the range between [FirstFile, LastFile] inclusive) and the TOTAL number of files for this model (we may only be plotting a subset of the volume).
model_tags : array of strings with length equal to the number of models.
Strings that contain the tag for each model. Will be placed on the plot.
output_tag : string
Name of the file that will be generated. File will be saved in the current directory with the output format defined by the 'output_format' variable at the beggining of the file.
Returns
-------
No returns.
Generates and saves the plot (named via output_tag).
Units
-----
No relevant units.
'''
print("Plotting quasar count/density")
if rank == 0:
fig = plt.figure()
ax1 = fig.add_subplot(111)
ax6 = ax1.twinx()
fig2 = plt.figure()
ax3 = fig2.add_subplot(111)
ax5 = ax3.twinx()
fig3 = plt.figure()
ax7 = fig3.add_subplot(111)
fig4 = plt.figure()
ax50 = fig4.add_subplot(111)
fig5 = plt.figure()
ax55 = fig5.add_subplot(111)
fig6 = plt.figure()
ax56 = fig6.add_subplot(111)
mean_quasar_activity_array = []
std_quasar_activity_array = []
N_quasar_activity_array = []
N_gal_halo_array = []
N_gal_array = []
merger_counts_halo_array = []
merger_counts_galaxy_array = []
bin_middle_halo_array = []
bin_middle_galaxy_array = []
for model_number in range(0, len(SnapList)): # Does this for each of the models.
if (fesc_prescription[model_number] != 3): # Want to skip the models that didn't count quasars.
continue
## Normalization for each model. ##
if (simulation_norm[model_number] == 0):
AllVars.Set_Params_Mysim()
elif (simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif (simulation_norm[model_number] == 2):
AllVars.Set_Params_Tiamat()
elif (simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif (simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif (simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
mean_quasar_activity_array.append([])
std_quasar_activity_array.append([])
N_quasar_activity_array.append([])
N_gal_halo_array.append([])
N_gal_array.append([])
merger_counts_halo_array.append([])
merger_counts_galaxy_array.append([])
bin_middle_halo_array.append([])
bin_middle_galaxy_array.append([])
box_factor = (LastFile[model_number] - FirstFile[model_number] + 1.0)/(NumFile[model_number]) # This factor allows us to take a sub-volume of the box and scale the results to represent the entire box.
print("We are plotting the quasar density using {0:.4f} of the box's volume.".format(box_factor))
norm = pow(AllVars.BoxSize,3) / pow(AllVars.Hubble_h, 3) * box_factor
####
## We perform the plotting on Rank 0 so only this rank requires the final counts array. ##
if rank == 0:
quasars_total = np.zeros_like((N_quasars_z[model_number]))
boost_total = np.zeros_like(N_quasars_boost_z[model_number])
gal_count_total = np.zeros_like(N_gal_z[model_number])
else:
quasars_total = None
boost_total = None
gal_count_total = None
N_quasars_tmp = np.array((N_quasars_z[model_number])) # So we can use MPI.Reduce()
comm.Reduce([N_quasars_tmp, MPI.DOUBLE], [quasars_total, MPI.DOUBLE], op = MPI.SUM, root = 0) # Sum the number of quasars and passes back to rank 0.
N_quasars_boost_tmp = np.array(N_quasars_boost_z[model_number]) # So we can use MPI.Reduce()
comm.Reduce([N_quasars_boost_tmp, MPI.DOUBLE], [boost_total, MPI.DOUBLE], op = MPI.SUM, root = 0) # Sum the number of galaxies that had their fesc boosted.
N_gal_tmp = np.array(N_gal_z[model_number]) # So we can use MPI.Reduce()
comm.Reduce([N_gal_tmp, MPI.DOUBLE], [gal_count_total, MPI.DOUBLE], op = MPI.SUM, root = 0) # Sum the number of total galaxies.
for snapshot_idx in range(len(SnapList[model_number])):
mean_quasar_activity_array[model_number], std_quasar_activity_array[model_number], N_quasar_activity_array[model_number] = calculate_pooled_stats(mean_quasar_activity_array[model_number], std_quasar_activity_array[model_number], N_quasar_activity_array[model_number], mean_quasar_activity[model_number][snapshot_idx], std_quasar_activity[model_number][snapshot_idx], N_halo[model_number][snapshot_idx])
if rank == 0:
merger_count_halo_total = np.zeros_like((N_merger_halo[model_number][snapshot_idx]))
N_gal_halo_total = np.zeros_like((N_halo[model_number][snapshot_idx]))
merger_count_galaxy_total = np.zeros_like((N_merger_galaxy[model_number][snapshot_idx]))
N_gal_total = np.zeros_like((N_gal[model_number][snapshot_idx]))
else:
merger_count_halo_total = None
N_gal_halo_total = None
merger_count_galaxy_total = None
N_gal_total = None
comm.Reduce([N_merger_halo[model_number][snapshot_idx], MPI.FLOAT], [merger_count_halo_total, MPI.FLOAT], op = MPI.SUM, root = 0) # Sum all the stellar mass and pass to Rank 0.
comm.Reduce([N_halo[model_number][snapshot_idx], MPI.FLOAT], [N_gal_halo_total, MPI.FLOAT], op = MPI.SUM, root = 0) # Sum all the stellar mass and pass to Rank 0.
comm.Reduce([N_merger_galaxy[model_number][snapshot_idx], MPI.FLOAT], [merger_count_galaxy_total, MPI.FLOAT], op = MPI.SUM, root = 0) # Sum all the stellar mass and pass to Rank 0.
comm.Reduce([N_gal[model_number][snapshot_idx], MPI.FLOAT], [N_gal_total, MPI.FLOAT], op = MPI.SUM, root = 0) # Sum all the stellar mass and pass to Rank 0.
if rank == 0:
merger_counts_halo_array[model_number].append(merger_count_halo_total)
N_gal_halo_array[model_number].append(N_gal_halo_total)
merger_counts_galaxy_array[model_number].append(merger_count_galaxy_total)
N_gal_array[model_number].append(N_gal_total)
bin_middle_halo_array[model_number].append(np.arange(m_low, m_high+bin_width, bin_width)[:-1] + bin_width * 0.5)
bin_middle_galaxy_array[model_number].append(np.arange(m_gal_low, m_gal_high+bin_width, bin_width)[:-1] + bin_width * 0.5)
if rank == 0:
plot_count = 0
stop_plot = 0
title = model_tags[model_number]
t = np.empty(len(SnapList[model_number]))
ZZ = np.empty(len(SnapList[model_number]))
for snapshot_idx in range(0, len(SnapList[model_number])):
t[snapshot_idx] = (AllVars.t_BigBang - AllVars.Lookback_Time[SnapList[model_number][snapshot_idx]]) * 1.0e3
ZZ[snapshot_idx] = AllVars.SnapZ[SnapList[model_number][snapshot_idx]]
if (stop_plot == 0):
# print("Snapshot {0} PlotSnapshot "
#"{1}".format(SnapList[model_number][snapshot_idx], PlotList[model_number][plot_count]))
if (SnapList[model_number][snapshot_idx] == PlotList[model_number][plot_count]):
label = "z = {0:.2f}".format(AllVars.SnapZ[PlotList[model_number][plot_count]])
ax7.plot(bin_middle_halo_array[model_number][snapshot_idx], mean_quasar_activity_array[model_number][snapshot_idx], color = PlotScripts.colors[plot_count], linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = label, linewidth = PlotScripts.global_linewidth)
#ax50.plot(bin_middle_halo_array[model_number][snapshot_idx], merger_counts_array[model_number][snapshot_idx] / gal_count_total[snapshot_idx], color = PlotScripts.colors[plot_count], linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = label, linewidth = PlotScripts.global_linewidth)
ax50.plot(bin_middle_halo_array[model_number][snapshot_idx], merger_counts_halo_array[model_number][snapshot_idx], color = PlotScripts.colors[plot_count], linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = label, linewidth = PlotScripts.global_linewidth)
#ax50.plot(bin_middle_halo_array[model_number][snapshot_idx], merger_counts_array[model_number][snapshot_idx] / N_gal_halo_array[model_number][snapshot_idx], color = PlotScripts.colors[plot_count], linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = label, linewidth = PlotScripts.global_linewidth)
#ax55.plot(bin_middle_galaxy_array[model_number][snapshot_idx], merger_counts_galaxy_array[model_number][snapshot_idx], color = PlotScripts.colors[plot_count], linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = label, linewidth = PlotScripts.global_linewidth)
ax55.plot(bin_middle_galaxy_array[model_number][snapshot_idx],
merger_counts_galaxy_array[model_number][snapshot_idx] / N_gal_array[model_number][snapshot_idx], color = PlotScripts.colors[plot_count], linestyle = PlotScripts.linestyles[model_number], rasterized = True, label = label, linewidth = PlotScripts.global_linewidth)
print("plot_count = {0} len(PlotList) = {1}".format(plot_count,
len(PlotList[model_number])))
plot_count += 1
print("plot_count = {0} len(PlotList) = {1}".format(plot_count,
len(PlotList[model_number])))
if (plot_count == len(PlotList[model_number])):
stop_plot = 1
print("For Snapshot {0} at t {3} there were {1} total mergers compared to {2} total galaxies.".format(snapshot_idx, np.sum(merger_counts_galaxy_array[model_number][snapshot_idx]), np.sum(gal_count_total[snapshot_idx]), t[snapshot_idx]))
if (np.sum(gal_count_total[snapshot_idx]) > 0.0 and np.sum(merger_counts_galaxy_array[model_number][snapshot_idx]) > 0.0):
ax56.scatter(t[snapshot_idx], np.sum(merger_counts_galaxy_array[model_number][snapshot_idx]) / np.sum(gal_count_total[snapshot_idx]), color = 'r', rasterized = True)
#ax56.scatter(t[snapshot_idx], quasars_total[snapshot_idx] / np.sum(gal_count_total[snapshot_idx]), color = 'r', rasterized = True)
ax1.plot(t, quasars_total / norm, color = PlotScripts.colors[model_number], linestyle = PlotScripts.linestyles[0], rasterized = True, linewidth = PlotScripts.global_linewidth)
p = np.where((ZZ < 15))[0]
#ax1.plot(ZZ[p], quasars_total[p] / norm, color = PlotScripts.colors[model_number], linestyle = PlotScripts.linestyles[0], rasterized = True, linewidth = PlotScripts.global_linewidth)
ax3.plot(t, boost_total, color = PlotScripts.colors[model_number], linestyle = PlotScripts.linestyles[0], rasterized = True, label = title, linewidth = PlotScripts.global_linewidth)
w = np.where((gal_count_total > 0.0))[0] # Since we're doing a division, need to only plot those redshifts that actually have galaxies.
ax5.plot(t[w], np.divide(boost_total[w], gal_count_total[w]), color = PlotScripts.colors[model_number], linestyle = PlotScripts.linestyles[1], rasterized = True, linewidth = PlotScripts.global_linewidth)
ax6.plot(t[w], gal_count_total[w] / norm, color = PlotScripts.colors[model_number], linestyle = PlotScripts.linestyles[1], rasterized = True, linewidth = PlotScripts.global_linewidth)
#ax6.plot(ZZ[p], gal_count_total[p] / norm, color = PlotScripts.colors[model_number], linestyle = PlotScripts.linestyles[1], rasterized = True, linewidth = PlotScripts.global_linewidth)
ax1.plot(np.nan, np.nan, color = PlotScripts.colors[0], linestyle = PlotScripts.linestyles[0], label = "Quasar Ejection Density")
ax1.plot(np.nan, np.nan, color = PlotScripts.colors[0], linestyle = PlotScripts.linestyles[1], label = "Galaxy Density")
ax3.plot(np.nan, np.nan, color = 'k', linestyle = PlotScripts.linestyles[0], label = "Count")
ax3.plot(np.nan, np.nan, color = 'k', linestyle = PlotScripts.linestyles[1], label = "Fraction of Galaxies")
ax7.set_xlabel(r'$\log_{10}\ M_\mathrm{vir}\ [M_{\odot}]$', size = PlotScripts.global_fontsize)
ax7.set_ylabel(r'$\mathrm{Mean \: Quasar \: Activity}$', size = PlotScripts.global_fontsize)
ax50.set_xlabel(r'$\log_{10}\ M_\mathrm{vir}\ [M_{\odot}]$', size = PlotScripts.global_fontsize)
#ax50.set_ylabel(r'$\mathrm{Fraction \: Galaxies \: Undergoing \: Merger}$', size = PlotScripts.global_fontsize)
ax50.set_ylabel(r'$\mathrm{Number \: Galaxies \: Undergoing \: Merger}$', size = PlotScripts.global_fontsize)
ax55.set_xlabel(r'$\log_{10}\ M_\mathrm{*}\ [M_{\odot}]$', size = PlotScripts.global_fontsize)
ax55.set_ylabel(r'$\mathrm{Fraction \: Galaxies \: Undergoing \: Merger}$', size = PlotScripts.global_fontsize)
#ax55.set_ylabel(r'$\mathrm{Number \: Galaxies \: Undergoing \: Merger}$', size = PlotScripts.global_fontsize)
ax56.set_xlabel(r"$\mathrm{Time \: Since \: Big \: Bang \: [Myr]}$", size = PlotScripts.global_labelsize)
ax56.set_ylabel(r'$\mathrm{Fraction \: Galaxies \: Undergoing \: Merger}$', size = PlotScripts.global_fontsize)
#ax56.set_ylabel(r'$\mathrm{Fraction \: Galaxies \: Quasar \: Activity}$', size = PlotScripts.global_fontsize)
ax56.set_yscale('log', nonposy='clip')
ax50.axvline(np.log10(32.0*AllVars.PartMass / AllVars.Hubble_h), color = 'k', linewidth = PlotScripts.global_linewidth, linestyle = '-.')
ax1.xaxis.set_minor_locator(mtick.MultipleLocator(PlotScripts.time_tickinterval))
ax1.set_xlim(PlotScripts.time_xlim)
ax1.set_yscale('log', nonposy='clip')
ax3.xaxis.set_minor_locator(mtick.MultipleLocator(PlotScripts.time_tickinterval))
ax3.set_xlim(PlotScripts.time_xlim)
ax3.set_yscale('log', nonposy='clip')
## Create a second axis at the top that contains the corresponding redshifts. ##
## The redshift defined in the variable 'z_plot' will be displayed. ##
ax2 = ax1.twiny()
ax4 = ax3.twiny()
ax57 = ax56.twiny()
t_plot = (AllVars.t_BigBang - AllVars.cosmo.lookback_time(PlotScripts.z_plot).value) * 1.0e3 # Corresponding time values on the bottom.
z_labels = ["$%d$" % x for x in PlotScripts.z_plot] # Properly Latex-ize the labels.
ax2.set_xlabel(r"$z$", size = PlotScripts.global_labelsize)
ax2.set_xlim(PlotScripts.time_xlim)
ax2.set_xticks(t_plot) # Set the ticks according to the time values on the bottom,
ax2.set_xticklabels(z_labels) # But label them as redshifts.
ax4.set_xlabel(r"$z$", size = PlotScripts.global_labelsize)
ax4.set_xlim(PlotScripts.time_xlim)
ax4.set_xticks(t_plot) # Set the ticks according to the time values on the bottom,
ax4.set_xticklabels(z_labels) # But label them as redshifts.
ax57.set_xlabel(r"$z$", size = PlotScripts.global_labelsize)
ax57.set_xlim(PlotScripts.time_xlim)
ax57.set_xticks(t_plot) # Set the ticks according to the time values on the bottom,
ax57.set_xticklabels(z_labels) # But label them as redshifts.
ax1.set_xlabel(r"$\mathrm{Time \: Since \: Big \: Bang \: [Myr]}$", size = PlotScripts.global_labelsize)
#ax1.set_xlabel(r"$z$", size = PlotScripts.global_labelsize)
ax1.set_ylabel(r'$N_\mathrm{Quasars} \: [\mathrm{Mpc}^{-3}]$', fontsize = PlotScripts.global_fontsize)
ax6.set_ylabel(r'$N_\mathrm{Gal} \: [\mathrm{Mpc}^{-3}]$', fontsize = PlotScripts.global_fontsize)
ax3.set_xlabel(r"$\mathrm{Time \: Since \: Big \: Bang \: [Myr]}$", size = PlotScripts.global_labelsize)
ax3.set_ylabel(r'$N_\mathrm{Boosted}$', fontsize = PlotScripts.global_fontsize)
ax5.set_ylabel(r'$\mathrm{Fraction \: Boosted}$', fontsize = PlotScripts.global_fontsize)
leg = ax1.legend(loc='lower right', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
leg = ax3.legend(loc='lower left', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
leg = ax7.legend(loc='upper left', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
leg = ax50.legend(loc='upper right', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
leg = ax55.legend(loc='upper right', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
fig.tight_layout()
fig2.tight_layout()
fig3.tight_layout()
fig5.tight_layout()
fig6.tight_layout()
outputFile1 = './{0}_quasardensity{1}'.format(output_tag, output_format)
outputFile2 = './{0}_boostedcount{1}'.format(output_tag, output_format)
outputFile3 = './{0}_quasar_activity_halo{1}'.format(output_tag, output_format)
outputFile4 = './{0}_mergercount_global{1}'.format(output_tag, output_format)
outputFile5 = './{0}_mergercount_global_stellarmass{1}'.format(output_tag, output_format)
outputFile6 = './{0}_mergercount_total{1}'.format(output_tag, output_format)
fig.savefig(outputFile1) # Save the figure
fig2.savefig(outputFile2) # Save the figure
fig3.savefig(outputFile3) # Save the figure
fig4.savefig(outputFile4) # Save the figure
fig5.savefig(outputFile5) # Save the figure
fig6.savefig(outputFile6) # Save the figure
print("Saved to {0}".format(outputFile1))
print("Saved to {0}".format(outputFile2))
print("Saved to {0}".format(outputFile3))
print("Saved to {0}".format(outputFile4))
print("Saved to {0}".format(outputFile5))
print("Saved to {0}".format(outputFile6))
plt.close(fig)
plt.close(fig2)
plt.close(fig3)
##
###
###
###
###
def plot_reionmod(PlotSnapList, SnapList, simulation_norm, mean_reionmod_halo,
std_reionmod_halo, N_halo, mean_reionmod_z, std_reionmod_z,
N_reionmod, plot_z, model_tags, output_tag):
"""
Plot the reionization modifier as a function of halo mass and redshift.
Parameters
----------
PlotSnapList, SnapList: 2D Nested arrays of integers. Outer length is equal to the number of models and inner length is number of snapshots we're plotting/calculated for.
PlotSnapList contains the snapshots for each model we will plot for the halo mass figure.
SnapList contains the snapshots for each model that we have performed calculations for. These aren't equal because we don't want to plot halo curves for ALL redshifts.
simulation_norm: Array of integers. Length is equal to the number of models.
Contains the simulation identifier for each model. Used to set the parameters of each model.
mean_reionmod_halo, std_reionmod_halo: 3D Nested arrays of floats. Most outer length is equal to the number of models, next length is number of snapshots for each model, then inner-most length is the number of halo mass- bins (given by NB).
Contains the mean/standard deviation values for the reionization modifier as a function of halo mass.
NOTE: These are unique for each task.
N_halo: 3D Nested arrays of floats. Lengths are identical to mean_reionmod_halo.
Contains the number of halos in each halo mass bin.
NOTE: These are unique for each task.
mean_reionmod_z, std_reionmod_z: 2D Nested arrays of floats. Outer length is equal to the number of models, inner length is the number of snapshots for each model. NOTE: This inner length can be different to the length of PlotSnapList as we don't necessarily need to plot for every snapshot we calculate.
Contains the mean/standard deviation values for the rieonization modifier as a function of redshift.
NOTE: These are unique for each task.
N_reionmod: 2D Nested arrays of floats. Lengths are identical to mean_reionmod_z.
Contains the number of galaxies at each redshift that have non-negative reionization modifier. A negative reionization modifier is a galaxy who didn't have infall/stripping during the snapshot.
NOTE: These are unique for each task.
plot_z: Boolean.
Denotes whether we want to plot the reionization modifier as a function
of redshift. Useful because we often only calculate statistics for a
subset of the snapshots to decrease computation time. For these runs,
we don't want to plot for something that requires ALL snapshots.
model_tags: Array of strings. Length is equal to the number of models.
Contains the legend labels for each model.
output_tag: String.
The prefix for the output file.
Returns
----------
None. Plot is saved in current directory as "./<output_tag>.<output_format>"
"""
master_mean_reionmod_halo, master_std_reionmod_halo,
master_N_reionmod_halo, master_bin_middle = collect_across_tasks(mean_reionmod_halo,
std_reionmod_halo,
N_halo, SnapList,
PlotSnapList, True,
m_low, m_high)
if plot_z:
master_mean_reionmod_z, master_std_reionmod_z, master_N_reionmod_z, _ = collect_across_tasks(mean_reionmod_z,
std_reionmod_z,
N_reionmod)
if rank == 0:
fig1 = plt.figure()
ax1 = fig1.add_subplot(111)
if plot_z:
fig2 = plt.figure()
ax10 = fig2.add_subplot(111)
for model_number in range(len(PlotSnapList)):
if(simulation_norm[model_number] == 1):
cosmo = AllVars.Set_Params_MiniMill()
elif(simulation_norm[model_number] == 3):
cosmo = AllVars.Set_Params_Tiamat_extended()
elif(simulation_norm[model_number] == 4):
cosmo = AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
cosmo = AllVars.Set_Params_Kali()
for snapshot_idx in range(len((PlotSnapList[model_number]))):
if snapshot_idx == 0:
label = model_tags[model_number]
else:
label = ""
nonzero_bins = np.where(master_N_reionmod_halo[model_number][snapshot_idx] > 0.0)[0]
ax1.plot(master_bin_middle[model_number][snapshot_idx][nonzero_bins],
master_mean_reionmod_halo[model_number][snapshot_idx][nonzero_bins],
label = label, ls = PlotScripts.linestyles[model_number],
color = PlotScripts.colors[snapshot_idx])
if plot_z:
ax10.plot((AllVars.t_BigBang - AllVars.Lookback_Time[SnapList[model_number]])*1.0e3, master_mean_reionmod_z[model_number], color = PlotScripts.colors[model_number], label = model_tags[model_number], ls = PlotScripts.linestyles[model_number], lw = 3)
for count, snapshot_idx in enumerate(PlotSnapList[model_number]):
#label = r"$\mathbf{z = " + str(int(round(AllVars.SnapZ[snapshot_idx]))) + "}$"
label = r"$\mathbf{z = " + str(AllVars.SnapZ[snapshot_idx]) + "}$"
ax1.plot(np.nan, np.nan, ls = PlotScripts.linestyles[0], color =
PlotScripts.colors[count], label = label)
ax1.set_xlim([8.5, 11.5])
ax1.set_ylim([0.0, 1.05])
ax1.set_xlabel(r'$\mathbf{log_{10} \: M_{vir} \:[M_{\odot}]}$', fontsize = PlotScripts.global_labelsize)
ax1.set_ylabel(r'$\mathbf{Mean ReionMod}$', fontsize = PlotScripts.global_labelsize)
leg = ax1.legend(loc='lower right', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
outputFile1 = "./{0}_halo{1}".format(output_tag, output_format)
fig1.savefig(outputFile1, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile1))
plt.close(fig1)
if plot_z:
ax10.set_xlabel(r"$\mathbf{Time \: since \: Big \: Bang \: [Myr]}$", fontsize = PlotScripts.global_labelsize)
tick_locs = np.arange(200.0, 1000.0, 100.0)
tick_labels = [r"$\mathbf{%d}$" % x for x in tick_locs]
ax10.xaxis.set_major_locator(mtick.MultipleLocator(100))
ax10.set_xticklabels(tick_labels, fontsize = PlotScripts.global_fontsize)
ax10.set_xlim(PlotScripts.time_xlim)
ax10.set_ylabel(r'$\mathbf{Mean ReionMod}$', fontsize = PlotScripts.global_labelsize)
ax11 = ax10.twiny()
t_plot = (AllVars.t_BigBang - cosmo.lookback_time(PlotScripts.z_plot).value) * 1.0e3 # Corresponding Time values on the bottom.
z_labels = ["$\mathbf{%d}$" % x for x in PlotScripts.z_plot] # Properly Latex-ize the labels.
ax11.set_xlabel(r"$\mathbf{z}$", fontsize = PlotScripts.global_labelsize)
ax11.set_xlim(PlotScripts.time_xlim)
ax11.set_xticks(t_plot) # Set the ticks according to the time values on the bottom,
ax11.set_xticklabels(z_labels, fontsize = PlotScripts.global_fontsize) # But label them as redshifts.
leg = ax10.legend(loc='lower right', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
outputFile2 = "./{0}_z{1}".format(output_tag, output_format)
fig2.savefig(outputFile2, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile2))
plt.close(fig2)
##
def plot_dust(PlotSnapList, SnapList, simulation_norm, mean_dust_galaxy, std_dust_galaxy,
N_galaxy, mean_dust_halo, std_dust_halo, N_halo, plot_z,
model_tags, output_tag):
"""
"""
master_mean_dust_galaxy, master_std_dust_galaxy, master_N_dust_galaxy, master_bin_middle_galaxy = \
collect_across_tasks(mean_dust_galaxy, std_dust_galaxy, N_galaxy, SnapList,
PlotSnapList, True, m_gal_low, m_gal_high)
master_mean_dust_halo, master_std_dust_halo, master_N_dust_halo, master_bin_middle_halo = \
collect_across_tasks(mean_dust_halo, std_dust_halo, N_halo, SnapList,
PlotSnapList, True, m_low, m_high)
if rank == 0:
fig1 = plt.figure()
ax1 = fig1.add_subplot(111)
fig2 = plt.figure()
ax2 = fig2.add_subplot(111)
for model_number in range(len(PlotSnapList)):
if(simulation_norm[model_number] == 1):
cosmo = AllVars.Set_Params_MiniMill()
elif(simulation_norm[model_number] == 3):
cosmo = AllVars.Set_Params_Tiamat_extended()
elif(simulation_norm[model_number] == 4):
cosmo = AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
cosmo = AllVars.Set_Params_Kali()
for snapshot_idx in range(len((PlotSnapList[model_number]))):
if snapshot_idx == 0:
label = model_tags[model_number]
else:
label = ""
nonzero_bins = np.where(master_N_dust_galaxy[model_number][snapshot_idx] > 0.0)[0]
ax1.plot(master_bin_middle_galaxy[model_number][snapshot_idx][nonzero_bins],
master_mean_dust_galaxy[model_number][snapshot_idx][nonzero_bins],
label = label, ls = PlotScripts.linestyles[model_number],
color = PlotScripts.colors[snapshot_idx])
nonzero_bins = np.where(master_N_dust_halo[model_number][snapshot_idx] > 0.0)[0]
ax2.plot(master_bin_middle_halo[model_number][snapshot_idx][nonzero_bins],
master_mean_dust_halo[model_number][snapshot_idx][nonzero_bins],
label = label, ls = PlotScripts.linestyles[model_number],
color = PlotScripts.colors[snapshot_idx])
print(master_mean_dust_halo[model_number][snapshot_idx])
for count, snapshot_idx in enumerate(PlotSnapList[model_number]):
#label = r"$\mathbf{z = " + str(int(round(AllVars.SnapZ[snapshot_idx]))) + "}$"
label = r"$\mathbf{z = " + str(AllVars.SnapZ[snapshot_idx]) + "}$"
ax1.plot(np.nan, np.nan, ls = PlotScripts.linestyles[0], color =
PlotScripts.colors[count], label = label)
ax2.plot(np.nan, np.nan, ls = PlotScripts.linestyles[0], color =
PlotScripts.colors[count], label = label)
ax1.set_xlim([2.0, 10.5])
#ax1.set_ylim([1.0, 6.0])
ax1.set_xlabel(r'$\mathbf{log_{10} \: M_{*} \:[M_{\odot}]}$', fontsize = PlotScripts.global_labelsize)
ax1.set_ylabel(r'$\mathbf{log_{10} \: \langle M_{Dust}\rangle_{M*}}$', fontsize = PlotScripts.global_labelsize)
leg = ax1.legend(loc='upper left', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
outputFile1 = "./{0}_galaxy{1}".format(output_tag, output_format)
fig1.savefig(outputFile1, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile1))
plt.close(fig1)
ax2.set_xlim([6.8, 11.5])
#ax2.set_ylim([1.0, 6.0])
ax2.set_xlabel(r'$\mathbf{log_{10} \: M_{vir} \:[M_{\odot}]}$', fontsize = PlotScripts.global_labelsize)
ax2.set_ylabel(r'$\mathbf{log_{10} \: \langle M_{Dust}\rangle_{Mvir}}$', fontsize = PlotScripts.global_labelsize)
leg = ax2.legend(loc='upper left', numpoints=1, labelspacing=0.1)
leg.draw_frame(False) # Don't want a box frame
for t in leg.get_texts(): # Reduce the size of the text
t.set_fontsize(PlotScripts.global_legendsize)
outputFile2 = "./{0}_halo{1}".format(output_tag, output_format)
fig2.savefig(outputFile2, bbox_inches='tight') # Save the figure
print('Saved file to {0}'.format(outputFile2))
plt.close(fig2)
### Here ends the plotting functions. ###
### Here begins the functions that calculate various properties for the galaxies (fesc, Magnitude etc). ###
def Calculate_HaloPartStellarMass(halo_part, stellar_mass, bound_low, bound_high):
'''
Calculates the stellar mass for galaxies whose host halos contain a specified number of particles.
Parameters
----------
halo_part : array
Array containing the number of particles inside each halo.
stellar_mass : array
Array containing the Stellar Mass for each galaxy (entries align with HaloPart). Units of log10(Msun).
bound_low, bound_high : int
We calculate the Stellar Mass of galaxies whose host halo has, bound_low <= halo_part <= bound_high.
Return
-----
mass, mass_std : float
Mean and standard deviation stellar mass of galaxies whose host halo has number of particles between the specified bounds. Units of log10(Msun)
Units
-----
Input Stellar Mass is in units of log10(Msun).
Output mean/std Stellar Mass is in units of log10(Msun).
'''
w = np.where((halo_part >= bound_low) & (halo_part <= bound_high))[0] # Find the halos with particle number between the bounds.
mass = np.mean(10**(stellar_mass[w]))
mass_std = np.std(10**(stellar_mass[w]))
return np.log10(mass), np.log10(mass_std)
##
def calculate_UV_extinction(z, L, M):
'''
Calculates the observed UV magnitude after dust extinction is accounted for.
Parameters
----------
z : float
Redshift we are calculating the extinction at.
L, M : array, length equal to the number of galaxies at this snapshot.
Array containing the UV luminosities and magnitudes.
Returns
-------
M_UV_obs : array, length equal to the number of galaxies at this snapshot.
Array containing the observed UV magnitudes.
Units
-----
Luminosities are in units of log10(erg s^-1 A^-1).
Magnitudes are in the AB system.
'''
M_UV_bins = np.arange(-24, -16, 0.1)
A_mean = np.zeros((len(MUV_bins))) # A_mean is the average UV extinction for a given UV bin.
for j in range(0, len(M_UV_bins)):
beta = calculate_beta(M_UV_bins[j], AllVars.SnapZ[current_snap]) # Fits the beta parameter for the current redshift/UV bin.
dist = np.random.normal(beta, 0.34, 10000) # Generates a normal distribution with mean beta and standard deviation of 0.34.
A = 4.43 + 1.99*dist
A[A < 0] = 0 # Negative extinctions don't make sense.
A_Mean[j] = np.mean(A)
indices = np.digitize(M, M_UV_bins) # Bins the simulation magnitude into the MUV bins. Note that digitize defines an index i if bin[i-1] <= x < bin[i] whereas I prefer bin[i] <= x < bin[i+1]
dust = A_Mean[indices]
flux = AllVars.Luminosity_to_Flux(L, 10.0) # Calculate the flux from a distance of 10 parsec, units of log10(erg s^-1 A^-1 cm^-2).
flux_observed = flux - 0.4*dust
f_nu = ALlVars.spectralflux_wavelength_to_frequency(10**flux_observed, 1600) # Spectral flux desnity in Janksy.
M_UV_obs(-2.5 * np.log10(f_nu) + 8.90) # AB Magnitude from http://www.astro.ljmu.ac.uk/~ikb/convert-units/node2.html
return M_UV_obs
##
def update_cumulative_stats(mean_pool, std_pool, N_pool, mean_local, std_local, N_local):
'''
Update the cumulative statistics (such as Stellar Mass Function, Mvir-Ngamma, fesc-z) that are saved across files.
Pooled mean formulae taken : from https://www.ncbi.nlm.nih.gov/books/NBK56512/
Pooled variance formulae taken from : https://en.wikipedia.org/wiki/Pooled_variance
Parameters
----------
mean_pool, std_pool, N_pool : array of floats with length equal to the number of bins (e.g. the mass bins for the Stellar Mass Function).
The current mean, standard deviation and number of data points within in each bin. This is the array that will be updated in this function.
mean_local, std_local, N_local : array of floats with length equal to the number of bins.
The mean, standard deviation and number of data points within in each bin that will be added to the pool.
Returns
-------
mean_pool, std_pool, N_pool : (See above)
The updated arrays with the local values added and accounted for within the pools.
Units
-----
All units are kept the same as the input units.
Values are in real-space (not log-space).
'''
N_times_mean_local = np.multiply(N_local, mean_local)
N_times_var_local = np.multiply(N_local - 1, np.multiply(std_local, std_local)) # Actually N - 1 because of Bessel's Correction
# https://en.wikipedia.org/wiki/Bessel%27s_correction). #
N_times_mean_pool = np.add(N_times_mean_local, np.multiply(N_pool, mean_pool))
N_times_var_pool = np.add(N_times_var_local, np.multiply(N_pool - 1, np.multiply(std_pool, std_pool)))
N_pool = np.add(N_local, N_pool)
'''
print(mean_local)
print(type(mean_local))
print((type(mean_local).__module__ == np.__name__))
print(isinstance(mean_local, list))
print(isinstance(mean_local,float64))
print(isinstance(mean_local,float32))
'''
if (((type(mean_local).__module__ == np.__name__) == True or (isinstance(mean_local, list) == True)) and isinstance(mean_local, float) == False and isinstance(mean_local, int) == False and isinstance(mean_local,float32) == False and isinstance(mean_local, float64) == False): # Checks to see if we are dealing with arrays.
for i in range(0, len(N_pool)):
if(N_pool[i] == 0): # This case is when we have no data points in the bin.
mean_pool[i] = 0.0
else:
mean_pool[i] = N_times_mean_pool[i]/N_pool[i]
if(N_pool[i] < 3): # In this instance we don't have enough data points to properly calculate the standard deviation.
std_pool[i] = 0.0
else:
std_pool[i] = np.sqrt(N_times_var_pool[i]/ (N_pool[i] - 2)) # We have -2 because there is two instances of N_pool contains two 'N - 1' terms.
else:
mean_pool = N_times_mean_pool / N_pool
if(N_pool < 3):
std_pool = 0.0
else:
std_pool = np.sqrt(N_times_var_pool / (N_pool - 2))
return mean_pool, std_pool
### Here ends the functions that deal with galaxy data manipulation. ###
#################################
if __name__ == '__main__':
np.seterr(divide='ignore')
number_models = 4
galaxies_model1="/fred/oz004/jseiler/kali/self_consistent_output/rsage_constant/galaxies/const_0.3_z5.782"
merged_galaxies_model1="/fred/oz004/jseiler/kali/self_consistent_output/rsage_constant/galaxies/const_0.3_MergedGalaxies"
photo_model1="/fred/oz004/jseiler/kali/self_consistent_output/rsage_constant/grids/cifog/const_0.3_photHI"
zreion_model1="/fred/oz004/jseiler/kali/self_consistent_output/rsage_constant/grids/cifog/const_0.3_reionization_redshift"
galaxies_model2="/fred/oz004/jseiler/kali/self_consistent_output/rsage_fej/galaxies/fej_alpha0.40_beta0.05_z5.782"
merged_galaxies_model2="/fred/oz004/jseiler/kali/self_consistent_output/rsage_fej/galaxies/fej_alpha0.40_beta0.05_MergedGalaxies"
photo_model2="/fred/oz004/jseiler/kali/self_consistent_output/rsage_fej/grids/cifog/fej_alpha0.40_beta0.05_photHI"
zreion_model2="/fred/oz004/jseiler/kali/self_consistent_output/rsage_fej/grids/cifog/fej_alpha0.40_beta0.05_reionization_redshift"
galaxies_model3="/fred/oz004/jseiler/kali/self_consistent_output/rsage_MHneg/galaxies/MHneg_1e8_1e12_0.99_0.05_z5.782"
merged_galaxies_model3="/fred/oz004/jseiler/kali/self_consistent_output/rsage_MHneg/galaxies/MHneg_1e8_1e12_0.99_0.05_MergedGalaxies"
photo_model3="/fred/oz004/jseiler/kali/self_consistent_output/rsage_MHneg/grids/cifog/MHneg_1e8_1e12_0.99_0.05_photHI"
zreion_model3="/fred/oz004/jseiler/kali/self_consistent_output/rsage_MHneg/grids/cifog/MHneg_1e8_1e12_0.99_0.05_reionization_redshift"
galaxies_model4="/fred/oz004/jseiler/kali/self_consistent_output/rsage_MHpos/galaxies/MHpos_1e8_1e12_0.01_0.50_z5.782"
merged_galaxies_model4="/fred/oz004/jseiler/kali/self_consistent_output/rsage_MHpos/galaxies/MHpos_1e8_1e12_0.01_0.50_MergedGalaxies"
photo_model4="/fred/oz004/jseiler/kali/self_consistent_output/rsage_MHpos/grids/cifog/MHpos_1e8_1e12_0.01_0.50_photHI"
zreion_model4="/fred/oz004/jseiler/kali/self_consistent_output/rsage_MHpos/grids/cifog/MHpos_1e8_1e12_0.01_0.50_reionization_redshift"
galaxies_filepath_array = [galaxies_model1,
galaxies_model2,
galaxies_model3,
galaxies_model4]
photo_array = [photo_model1,
photo_model2,
photo_model3,
photo_model4]
zreion_array = [zreion_model1,
zreion_model2,
zreion_model3,
zreion_model4]
GridSize_array = [256,
256,
256,
256]
precision_array = [2,
2,
2,
2]
merged_galaxies_filepath_array = [merged_galaxies_model1,
merged_galaxies_model2,
merged_galaxies_model3,
merged_galaxies_model4]
number_substeps = [10, 10, 10, 10] # How many substeps does each model have (specified by STEPS variable within SAGE).
number_snapshots = [99, 99, 99, 99] # Number of snapshots in the simulation (we don't have to do calculations for ALL snapshots).
# Tiamat extended has 164 snapshots.
FirstFile = [0, 0, 0, 0] # The first file number THAT WE ARE PLOTTING.
#LastFile = [63, 63, 63, 63] # The last file number THAT WE ARE PLOTTING.
LastFile = [0, 0, 0, 0] # The last file number THAT WE ARE PLOTTING.
NumFile = [64, 64, 64, 64] # The number of files for this simulation (plotting a subset of these files is allowed).
same_files = [0, 0, 0, 0] # In the case that model 1 and model 2 (index 0 and 1) have the same files, we don't want to read them in a second time.
# This array will tell us if we should keep the files for the next model or otherwise throw them away.
# The files will be kept until same_files[current_model_number] = 0.
# For example if we had 5 models we were plotting and model 1, 2, 3 shared the same files and models 4, 5 shared different files,
# Then same_files = [1, 1, 0, 1, 0] would be the correct values.
done_model = np.zeros((number_models)) # We use this to keep track of if we have done a model already.
model_tags = [r"$\mathbf{f_\mathrm{esc} \: Constant}$",
r"$\mathbf{f_\mathrm{esc} \: \propto \: f_\mathrm{ej}}$",
r"$\mathbf{f_\mathrm{esc} \: \propto \: M_\mathrm{H}^{-1}}$",
r"$\mathbf{f_\mathrm{esc} \: \propto \: M_\mathrm{H}}$"]
## Constants used for each model. ##
# Need to add an entry for EACH model. #
halo_cut = [32, 32, 32, 32] # Only calculate properties for galaxies whose host halos have at least this many particles.
# For Tiamat, z = [6, 7, 8] are snapshots [78, 64, 51]
# For Kali, z = [6, 7, 8] are snapshots [93, 76, 64]
#SnapList = [np.arange(0,99), np.arange(0,99)] # These are the snapshots over which the properties are calculated. NOTE: If the escape fraction is selected (fesc_prescription == 3) then this should be ALL the snapshots in the simulation as this prescriptions is temporally important.
#SnapList = [np.arange(20,99), np.arange(20, 99), np.arange(20, 99)]
SnapList = [[33, 50, 76, 93],
[33, 50, 76, 93],
[33, 50, 76, 93],
[33, 50, 76, 93]]
#SnapList = [[64],
# [64],
# [64],
# [64]]
#SnapList = [[33, 50, 64, 76, 93]]
#SnapList = [[64], [64]]
#SnapList = [np.arange(20,99)]
#PlotSnapList = [[30, 50, 64, 76, 93]]
#PlotSnapList = [[93, 76, 64], [93, 76, 64]]
#SnapList = [[93, 76, 64], [93, 76, 64]]
PlotSnapList = SnapList
simulation_norm = [5, 5, 5, 5] # Changes the constants (cosmology, snapshot -> redshift mapping etc) for each simulation.
# 0 for MySim (Manodeep's old one).
# 1 for Mini-Millennium.
# 2 for Tiamat (up to z =5).
# 3 for extended Tiamat (down to z = 1.6ish).
# 4 for Britton's Sim Pip
# 5 for Manodeep's new simulation Kali.
stellar_mass_halolen_lower = [32, 95, 95, 95] # These limits are for the number of particles in a halo.
stellar_mass_halolen_upper = [50, 105, 105, 105] # We calculate the average stellar mass for galaxies whose host halos have particle count between these limits.
calculate_observed_LF = [0, 0, 0, 0] # Determines whether we want to account for dust extinction when calculating the luminosity function of each model.
paper_plots = 1
##############################################################################################################
## Do a few checks to ensure all the arrays were specified properly. ##
for model_number in range(0,number_models):
assert(LastFile[model_number] - FirstFile[model_number] + 1 >= size)
if(simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif(simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif(simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
else:
print("Simulation norm was set to {0}.".format(simulation_norm[model_number]))
raise ValueError("This option has been implemented yet. Get your head in the game Jacob!")
if (number_snapshots[model_number] != len(AllVars.SnapZ)): # Here we do a check to ensure that the simulation we've defined correctly matches the number of snapshots we have also defined.
print("The number_snapshots array is {0}".format(number_snapshots))
print("The simulation_norm array is {0}".format(simulation_norm))
print("The number of snapshots for model_number {0} has {1} but you've said there is only {2}".format(model_number, len(AllVars.SnapZ), number_snapshots[model_number]))
raise ValueError("Check either that the number of snapshots has been defined properly and that the normalization option is correct.")
######################################################################
##################### SETTING UP ARRAYS ##############################
######################################################################
### The arrays are set up in a 3 part process. ###
### This is because our arrays are 3D nested to account for the model number and snapshots. ###
# First set up the outer most array. #
## Arrays for functions of stellar mass. ##
SMF = [] # Stellar Mass Function.
mean_fesc_galaxy_array = [] # Mean escape fraction as a function of stellar mass.
std_fesc_galaxy_array = [] # Same as above but standard devation.
N_galaxy_array = [] # Number of galaxies as a function of stellar mass.
mean_BHmass_galaxy_array = [] # Black hole mass as a function of stellar mass.
std_BHmass_galaxy_array = [] # Same as above but standard deviation.
mergers_galaxy_array = [] # Number of mergers as a function of halo mass.
mean_dust_galaxy_array = [] # Mean dust mass as a function of stellar mass.
std_dust_galaxy_array = [] # Same as above but standard deviation.
mean_sfr_galaxy_array = [] # Mean star formation rate as a
# function of stellar mass
std_sfr_galaxy_array = [] # Same as above but standard deviation.
mean_ssfr_galaxy_array = [] # Mean specific star formation rate as a
# function of stellar mass
std_ssfr_galaxy_array = [] # Same as above but standard deviation.
mean_Ngamma_galaxy_array = [] # Mean number of ionizing photons emitted as
# a function of stellar mass.
std_Ngamma_galaxy_array = [] # Same as above but standard deviation.
mean_photo_galaxy_array = [] # Mean photoionization rate.
std_photo_galaxy_array = [] # Std photoionization rate.
mean_reionmod_galaxy_array = [] # Mean reionization modifier using RSAGE.
std_reionmod_galaxy_array = [] # Std.
mean_gnedin_reionmod_galaxy_array = [] # Mean reionization modifier using Gnedin analytic prescription.
std_gnedin_reionmod_galaxy_array = [] # Std.
## Arrays for functions of halo mass. ##
mean_ejected_halo_array = [] # Mean ejected fractions as a function of halo mass.
std_ejected_halo_array = [] # Same as above but standard deviation.
mean_fesc_halo_array = [] # Mean escape fraction as a function of halo mass.
std_fesc_halo_array = [] # Same as above but standard deviation.
mean_Ngamma_halo_array = [] # Mean number of ionizing photons THAT ESCAPE as a function of halo mass.
std_Ngamma_halo_array = [] # Same as above but standard deviation.
N_halo_array = [] # Number of galaxies as a function of halo mass.
mergers_halo_array = [] # Number of mergers as a function of halo mass.
mean_quasar_activity_array = [] # Mean fraction of galaxies that have quasar actvitity as a function of halo mas.
std_quasar_activity_array = [] # Same as above but standard deviation.
mean_reionmod_halo_array = [] # Mean reionization modifier as a function of halo mass.
std_reionmod_halo_array = [] # Same as above but for standard deviation.
mean_dust_halo_array = [] # Mean dust mass as a function of halo mass.
std_dust_halo_array = [] # Same as above but standard deviation.
## Arrays for functions of redshift. ##
sum_Ngamma_z_array = [] # Total number of ionizing photons THAT ESCAPE as a functio of redshift.
mean_fesc_z_array = [] # Mean number of ionizing photons THAT ESCAPE as a function of redshift.
std_fesc_z_array = [] # Same as above but standard deviation.
N_z = [] # Number of galaxies as a function of redshift.
galaxy_halo_mass_mean = [] # Mean galaxy mass as a function of redshift.
N_quasars_z = [] # This tracks how many quasars went off during a specified snapshot.
N_quasars_boost_z = [] # This tracks how many galaxies are having their escape fraction boosted by quasar activity.
dynamicaltime_quasars_mean_z = [] # Mean dynamical time of galaxies that have a quasar event as a function of redshift.
dynamicaltime_quasars_std_z = [] # Same as above but standard deviation.
dynamicaltime_all_mean_z = [] # Mean dynamical time of all galaxies.
dynamicaltime_all_std_z = [] # Same as above but standard deviation.
mean_reionmod_z = [] # Mean reionization modifier as a function of redshift.
std_reionmod_z = [] # Same as above but for standard deviation.
N_reionmod_z = [] # Number of galaxies with a non-negative reionization modifier.
mean_ejected_z = [] # Mean ejected fraction as a function of redshift.
std_ejected_z = [] # Same as above but for standard deviation.
## Arrays that aren't functions of other variables. ##
Ngamma_global = []
mass_global = []
fesc_global = []
## Arrays as a function of fej ##
mean_Ngamma_fej = []
std_Ngamma_fej = []
N_fej = []
## Now the outer arrays have been defined, set up the next nest level for the number of models. ##
for model_number in range(0,number_models):
## Galaxy Arrays ##
SMF.append([])
mean_fesc_galaxy_array.append([])
std_fesc_galaxy_array.append([])
N_galaxy_array.append([])
mean_BHmass_galaxy_array.append([])
std_BHmass_galaxy_array.append([])
mergers_galaxy_array.append([])
mean_dust_galaxy_array.append([])
std_dust_galaxy_array.append([])
mean_sfr_galaxy_array.append([])
std_sfr_galaxy_array.append([])
mean_ssfr_galaxy_array.append([])
std_ssfr_galaxy_array.append([])
mean_Ngamma_galaxy_array.append([])
std_Ngamma_galaxy_array.append([])
mean_photo_galaxy_array.append([])
std_photo_galaxy_array.append([])
mean_reionmod_galaxy_array.append([])
std_reionmod_galaxy_array.append([])
mean_gnedin_reionmod_galaxy_array.append([])
std_gnedin_reionmod_galaxy_array.append([])
## Halo arrays. ##
mean_ejected_halo_array.append([])
std_ejected_halo_array.append([])
mean_fesc_halo_array.append([])
std_fesc_halo_array.append([])
mean_Ngamma_halo_array.append([])
std_Ngamma_halo_array.append([])
N_halo_array.append([])
mergers_halo_array.append([])
mean_quasar_activity_array.append([])
std_quasar_activity_array.append([])
mean_reionmod_halo_array.append([])
std_reionmod_halo_array.append([])
mean_dust_halo_array.append([])
std_dust_halo_array.append([])
## Redshift arrays. ##
sum_Ngamma_z_array.append([])
mean_fesc_z_array.append([])
std_fesc_z_array.append([])
N_z.append([])
galaxy_halo_mass_mean.append([])
N_quasars_z.append([])
N_quasars_boost_z.append([])
dynamicaltime_quasars_mean_z.append([])
dynamicaltime_quasars_std_z.append([])
dynamicaltime_all_mean_z.append([])
dynamicaltime_all_std_z.append([])
mean_reionmod_z.append([])
std_reionmod_z.append([])
N_reionmod_z.append([])
mean_ejected_z.append([])
std_ejected_z.append([])
## Arrays that aren't functions ##
Ngamma_global.append([])
mass_global.append([])
fesc_global.append([])
## Arrays as a function of fej ##
mean_Ngamma_fej.append([])
std_Ngamma_fej.append([])
N_fej.append([])
## And then finally set up the inner most arrays ##
## NOTE: We do the counts as float so we can keep consistency when we're calling MPI operations (just use MPI.FLOAT rather than deciding if we need to use MPI.INT)
for snapshot_idx in range(len(SnapList[model_number])):
## For the arrays that are functions of stellar/halo mass, the inner most level will be an array with the statistic binned across mass ##
## E.g. SMF[model_number][snapshot_idx] will return an array whereas N_z[model_number][snapshot_idx] will return a float. ##
## Functions of stellar mass arrays. ##
SMF[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mean_fesc_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
std_fesc_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
N_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mean_BHmass_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
std_BHmass_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mergers_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mean_dust_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
std_dust_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mean_sfr_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
std_sfr_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mean_ssfr_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
std_ssfr_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mean_Ngamma_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
std_Ngamma_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mean_photo_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
std_photo_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mean_reionmod_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
std_reionmod_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
mean_gnedin_reionmod_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
std_gnedin_reionmod_galaxy_array[model_number].append(np.zeros((NB_gal), dtype = np.float32))
## Function of halo mass arrays. ##
mean_ejected_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
std_ejected_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
mean_fesc_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
std_fesc_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
mean_Ngamma_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
std_Ngamma_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
N_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
mergers_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
mean_quasar_activity_array[model_number].append(np.zeros((NB), dtype = np.float32))
std_quasar_activity_array[model_number].append(np.zeros((NB), dtype = np.float32))
mean_reionmod_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
std_reionmod_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
mean_dust_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
std_dust_halo_array[model_number].append(np.zeros((NB), dtype = np.float32))
## Function of Redshift arrays. ##
sum_Ngamma_z_array[model_number].append(0.0)
mean_fesc_z_array[model_number].append(0.0)
std_fesc_z_array[model_number].append(0.0)
N_z[model_number].append(0.0)
galaxy_halo_mass_mean[model_number].append(0.0)
N_quasars_z[model_number].append(0.0)
N_quasars_boost_z[model_number].append(0.0)
dynamicaltime_quasars_mean_z[model_number].append(0.0)
dynamicaltime_quasars_std_z[model_number].append(0.0)
dynamicaltime_all_mean_z[model_number].append(0.0)
dynamicaltime_all_std_z[model_number].append(0.0)
mean_reionmod_z[model_number].append(0.0)
std_reionmod_z[model_number].append(0.0)
N_reionmod_z[model_number].append(0.0)
mean_ejected_z[model_number].append(0.0)
std_ejected_z[model_number].append(0.0)
Ngamma_global[model_number].append([])
mass_global[model_number].append([])
fesc_global[model_number].append([])
## Arrays as a function of fej. ##
mean_Ngamma_fej[model_number].append(np.zeros((NB_fej), dtype = np.float32))
std_Ngamma_fej[model_number].append(np.zeros((NB_fej), dtype = np.float32))
N_fej[model_number].append(np.zeros((NB_fej), dtype = np.float32))
######################################################################
#################### ALL ARRAYS SETUP ################################
######################################################################
## Now it's (finally) time to read in all the data and do the actual work. ##
for model_number in range(number_models):
if(simulation_norm[model_number] == 1):
AllVars.Set_Params_MiniMill()
elif(simulation_norm[model_number] == 3):
AllVars.Set_Params_Tiamat_extended()
elif(simulation_norm[model_number] == 4):
AllVars.Set_Params_Britton()
elif(simulation_norm[model_number] == 5):
AllVars.Set_Params_Kali()
else:
print("Simulation norm was set to {0}.".format(simulation_norm[model_number]))
raise ValueError("This option has been implemented yet. Get your head in the game Jacob!")
if (done_model[model_number] == 1): # If we have already done this model (i.e., we kept the files and skipped this loop), move along.
assert(FirstFile[model_number] == FirstFile[model_number - 1])
assert(LastFile[model_number] == LastFile[model_number - 1])
continue
for fnr in range(FirstFile[model_number] + rank, LastFile[model_number]+1, size): # Divide up the input files across the processors.
GG, Gal_Desc = ReadScripts.ReadGals_SAGE(galaxies_filepath_array[model_number], fnr, number_snapshots[model_number], comm) # Read galaxies
G_Merged, _ = ReadScripts.ReadGals_SAGE(merged_galaxies_filepath_array[model_number], fnr, number_snapshots[model_number], comm) # Also need the merged galaxies.
G = ReadScripts.Join_Arrays(GG, G_Merged, Gal_Desc) # Then join them together for all galaxies.
keep_files = 1 # Flips to 0 when we are done with this file.
current_model_number = model_number # Used to differentiate between outer model_number and the inner model_number because we can keep files across model_numbers.
while(keep_files == 1):
## Just a few definitions to cut down the clutter a smidge. ##
current_halo_cut = halo_cut[current_model_number]
NumSubsteps = number_substeps[current_model_number]
do_observed_LF = calculate_observed_LF[current_model_number]
for snapshot_idx in range(0, len(SnapList[current_model_number])): # Now let's calculate stats for each required redshift.
current_snap = SnapList[current_model_number][snapshot_idx] # Get rid of some clutter.
w_gal = np.where((G.GridHistory[:, current_snap] != -1) & (G.GridStellarMass[:, current_snap] > 0.0) & (G.LenHistory[:, current_snap] > current_halo_cut) & (G.GridSFR[:, current_snap] >= 0.0) & (G.GridFoFMass[:, current_snap] >= 0.0))[0] # Only include those galaxies that existed at the current snapshot, had positive (but not infinite) stellar/Halo mass and Star formation rate. Ensure the galaxies also resides in a halo that is sufficiently resolved.
w_merged_gal = np.where((G_Merged.GridHistory[:, current_snap] != -1) & (G_Merged.GridStellarMass[:, current_snap] > 0.0) & (G_Merged.LenHistory[:, current_snap] > current_halo_cut) & (G_Merged.GridSFR[:, current_snap] >= 0.0) & (G_Merged.GridFoFMass[:, current_snap] >= 0.0) & (G_Merged.LenMergerGal[:,current_snap] > current_halo_cut))[0]
print("There were {0} galaxies for snapshot {1} (Redshift {2:.3f}) model {3}.".format(len(w_gal), current_snap, AllVars.SnapZ[current_snap], current_model_number))
if (len(w_gal) == 0):
continue
mass_gal = np.log10(G.GridStellarMass[w_gal, current_snap] * 1.0e10 / AllVars.Hubble_h) # Msun. Log Units.
w_SFR = w_gal[np.where((G.GridSFR[w_gal, current_snap] > 0.0))[0]]
mass_SFR_gal = np.log10(G.GridStellarMass[w_SFR, current_snap] * \
1.0e10 / AllVars.Hubble_h)
SFR_gal = np.log10(G.GridSFR[w_SFR,current_snap])
sSFR_gal = SFR_gal - mass_SFR_gal
halo_part_count = G.LenHistory[w_gal, current_snap]
metallicity_gal = G.GridZ[w_gal, current_snap]
metallicity_tremonti_gal = np.log10(G.GridZ[w_gal, current_snap] / 0.02) + 9.0 # Using the Tremonti relationship for metallicity.
mass_central = np.log10(G.GridFoFMass[w_gal, current_snap] * 1.0e10 / AllVars.Hubble_h) # Msun. Log Units.
ejected_fraction = G.EjectedFraction[w_gal, current_snap]
w_dust = np.where(((G.GridDustColdGas[w_gal, current_snap]
+G.GridDustHotGas[w_gal, current_snap]
+G.GridDustEjectedMass[w_gal, current_snap]) > 0.0)
& (G.GridType[w_gal, current_snap] == 0))[0]
total_dust_gal = np.log10((G.GridDustColdGas[w_gal[w_dust], current_snap]
+G.GridDustHotGas[w_gal[w_dust], current_snap]
+G.GridDustEjectedMass[w_gal[w_dust], current_snap])
* 1.0e10 / AllVars.Hubble_h)
mass_gal_dust = np.log10(G.GridStellarMass[w_gal[w_dust], current_snap]
* 1.0e10 / AllVars.Hubble_h)
mass_centralgal_dust = np.log10(G.GridFoFMass[w_gal[w_dust], current_snap]
* 1.0e10 / AllVars.Hubble_h)
fesc = G.Gridfesc[w_gal, current_snap]
fesc[fesc < 0.0] = 0.0
Ngamma_gal = G.GridNgamma_HI[w_gal, current_snap] # 1.0e50
# photons/s.
if model_number < 3:
Ngamma_gal += 50.0 # Old versions of SAGE incorrectly
# subtracted 50.
Ngamma_gal *= fesc
reionmod = G.GridReionMod[w_gal, current_snap]
mass_reionmod_central = mass_central[reionmod > -1]
mass_reionmod_gal = mass_gal[reionmod > -1]
reionmod = reionmod[reionmod > -1] # Some satellite galaxies that don't have HotGas and hence won't be stripped. As a result reionmod = -1 for these. Ignore them.
mass_BH = G.GridBHMass[w_gal, current_snap] * 1.0e10 / AllVars.Hubble_h # Msun. Not log units.
L_UV = SFR_gal + 39.927 # Using relationship from STARBURST99, units of erg s^-1 A^-1. Log Units.
M_UV = AllVars.Luminosity_to_ABMag(L_UV, 1600)
if (do_observed_LF == 1): # Calculate the UV extinction if requested.
M_UV_obs = calculate_UV_extinction(AllVars.SnapZ[current_snap], L_UV, M_UV[snap_idx])
galaxy_halo_mass_mean_local, galaxy_halo_mass_std_local = Calculate_HaloPartStellarMass(halo_part_count, mass_gal, stellar_mass_halolen_lower[current_model_number], stellar_mass_halolen_upper[current_model_number]) # This is the average stellar mass for galaxies whose halos have the specified number of particles.
galaxy_halo_mass_mean[current_model_number][snapshot_idx] += pow(10, galaxy_halo_mass_mean_local) / (LastFile[current_model_number] + 1) # Adds to the average of the mean.
photofield_path = "{0}_{1:03d}".format(photo_array[current_model_number],
current_snap)
#photo_gal = photo.calc_gal_photoion(G.GridHistory[w_gal, current_snap],
# photofield_path,
# GridSize_array[current_model_number],
# precision_array[current_model_number])
#zreion_path = "{0}".format(zreion_array[current_model_number])
#zreion_gal = photo.calc_gal_zreion(G.GridHistory[w_gal, current_snap],
# zreion_path,
# GridSize_array[current_model_number],
# precision_array[current_model_number])
z_0 = 8.0
z_r = 7.0
gnedin_mfilt = ga.get_filter_mass(np.array(AllVars.SnapZ[current_snap]),
z_0, z_r)
gnedin_reionmod_gal = 1.0 / pow(1.0 + 0.26*pow(10, gnedin_mfilt - mass_central), 3.0)
###########################################
######## BASE PROPERTIES CALCULATED #######
###########################################
# Time to calculate relevant statistics.
### Functions of Galaxies/Stellar Mass ###
## Stellar Mass Function ##
(counts_local, bin_edges, bin_middle) = AllVars.Calculate_Histogram(mass_gal, bin_width, 0, m_gal_low, m_gal_high) # Bin the Stellar Mass
SMF[current_model_number][snapshot_idx] += counts_local
## Escape Fraction ##
(mean_fesc_galaxy_local, std_fesc_galaxy_local, N_local, sum_fesc_galaxy, bin_middle) = AllVars.Calculate_2D_Mean(mass_gal, fesc, bin_width, m_gal_low, m_gal_high)
(mean_fesc_galaxy_array[current_model_number][snapshot_idx], std_fesc_galaxy_array[current_model_number][snapshot_idx]) = update_cumulative_stats(mean_fesc_galaxy_array[current_model_number][snapshot_idx], std_fesc_galaxy_array[current_model_number][snapshot_idx], N_galaxy_array[current_model_number][snapshot_idx], mean_fesc_galaxy_local, std_fesc_galaxy_local, N_local)
## Black Hole Mass ##
(mean_BHmass_galaxy_local, std_BHmass_galaxy_local, N_local, sum_BHmass_galaxy, bin_middle) = AllVars.Calculate_2D_Mean(mass_gal, mass_BH, bin_width, m_gal_low, m_gal_high)
(mean_BHmass_galaxy_array[current_model_number][snapshot_idx], std_BHmass_galaxy_array[current_model_number][snapshot_idx]) = update_cumulative_stats(mean_BHmass_galaxy_array[current_model_number][snapshot_idx], std_BHmass_galaxy_array[current_model_number][snapshot_idx], N_galaxy_array[current_model_number][snapshot_idx], mean_BHmass_galaxy_local, std_BHmass_galaxy_local, N_local)
## Total Dust Mass ##
(mean_dust_galaxy_local, std_dust_galaxy_local, N_local,
sum_dust_galaxy, bin_middle) = AllVars.Calculate_2D_Mean(
mass_gal_dust, total_dust_gal,
bin_width, m_gal_low,
m_gal_high)
(mean_dust_galaxy_array[current_model_number][snapshot_idx],
std_dust_galaxy_array[current_model_number][snapshot_idx]) = \
update_cumulative_stats(mean_dust_galaxy_array[current_model_number][snapshot_idx],
std_dust_galaxy_array[current_model_number][snapshot_idx],
N_galaxy_array[current_model_number][snapshot_idx],
mean_dust_galaxy_local,
std_dust_galaxy_local,
N_local)
## Star Formation Rate ##
(mean_sfr_galaxy_local, std_sfr_galaxy_local, N_local,
sum_sfr_galaxy, bin_middle) = AllVars.Calculate_2D_Mean(
mass_SFR_gal, SFR_gal,
bin_width, m_gal_low,
m_gal_high)
(mean_sfr_galaxy_array[current_model_number][snapshot_idx],
std_sfr_galaxy_array[current_model_number][snapshot_idx]) = \
update_cumulative_stats(mean_sfr_galaxy_array[current_model_number][snapshot_idx],
std_sfr_galaxy_array[current_model_number][snapshot_idx],
N_galaxy_array[current_model_number][snapshot_idx],
mean_sfr_galaxy_local,
std_sfr_galaxy_local,
N_local)
## Specific Star Formation Rate ##
(mean_ssfr_galaxy_local, std_ssfr_galaxy_local, N_local,
sum_ssfr_galaxy, bin_middle) = AllVars.Calculate_2D_Mean(
mass_SFR_gal, sSFR_gal,
bin_width, m_gal_low,
m_gal_high)
(mean_ssfr_galaxy_array[current_model_number][snapshot_idx],
std_ssfr_galaxy_array[current_model_number][snapshot_idx]) = \
update_cumulative_stats(mean_ssfr_galaxy_array[current_model_number][snapshot_idx],
std_ssfr_galaxy_array[current_model_number][snapshot_idx],
N_galaxy_array[current_model_number][snapshot_idx],
mean_ssfr_galaxy_local,
std_ssfr_galaxy_local,
N_local)
## Number of Ionizing Photons ##
(mean_Ngamma_galaxy_local, std_Ngamma_galaxy_local, N_local,
sum_Ngamma_galaxy_local, bin_middle) = AllVars.Calculate_2D_Mean(
mass_gal, Ngamma_gal,
bin_width, m_gal_low,
m_gal_high)
(mean_Ngamma_galaxy_array[current_model_number][snapshot_idx],
std_Ngamma_galaxy_array[current_model_number][snapshot_idx]) = \
update_cumulative_stats(mean_Ngamma_galaxy_array[current_model_number][snapshot_idx],
std_Ngamma_galaxy_array[current_model_number][snapshot_idx],
N_galaxy_array[current_model_number][snapshot_idx],
mean_Ngamma_galaxy_local,
std_Ngamma_galaxy_local,
N_local)
## Photoionization rate ##
'''
(mean_photo_galaxy_local, std_photo_galaxy_local, N_local,
sum_photo_galaxy_local, bin_middle) = AllVars.Calculate_2D_Mean(
mass_gal, photo_gal,
bin_width, m_gal_low,
m_gal_high)
(mean_photo_galaxy_array[current_model_number][snapshot_idx],
std_photo_galaxy_array[current_model_number][snapshot_idx]) = \
update_cumulative_stats(mean_photo_galaxy_array[current_model_number][snapshot_idx],
std_photo_galaxy_array[current_model_number][snapshot_idx],
N_galaxy_array[current_model_number][snapshot_idx],
mean_photo_galaxy_local,
std_photo_galaxy_local,
N_local)
'''
## RSAGE Reionization Modifier ##
(mean_reionmod_galaxy_local, std_reionmod_galaxy_local, N_local,
sum_reionmod_galaxy_local, bin_middle) = AllVars.Calculate_2D_Mean(
mass_reionmod_gal, reionmod,
bin_width, m_gal_low,
m_gal_high)
(mean_reionmod_galaxy_array[current_model_number][snapshot_idx],
std_reionmod_galaxy_array[current_model_number][snapshot_idx]) = \
update_cumulative_stats(mean_reionmod_galaxy_array[current_model_number][snapshot_idx],
std_reionmod_galaxy_array[current_model_number][snapshot_idx],
N_galaxy_array[current_model_number][snapshot_idx],
mean_reionmod_galaxy_local,
std_reionmod_galaxy_local,
N_local)
## Gnedin Reionization Modifier ##
(mean_gnedin_reionmod_galaxy_local, std_gnedin_reionmod_galaxy_local, N_local,
sum_gnedin_reionmod_galaxy_local, bin_middle) = AllVars.Calculate_2D_Mean(
mass_gal, gnedin_reionmod_gal,
bin_width, m_gal_low,
m_gal_high)
(mean_gnedin_reionmod_galaxy_array[current_model_number][snapshot_idx],
std_gnedin_reionmod_galaxy_array[current_model_number][snapshot_idx]) = \
update_cumulative_stats(mean_gnedin_reionmod_galaxy_array[current_model_number][snapshot_idx],
std_gnedin_reionmod_galaxy_array[current_model_number][snapshot_idx],
N_galaxy_array[current_model_number][snapshot_idx],
mean_gnedin_reionmod_galaxy_local,
std_gnedin_reionmod_galaxy_local,
N_local)
N_galaxy_array[current_model_number][snapshot_idx] += N_local
### Functions of Halos/Halo Mass ###
## Ejected Fraction ##
(mean_ejected_halo_local, std_ejected_halo_local, N_local, sum_ejected_halo, bin_middle) = AllVars.Calculate_2D_Mean(mass_central, ejected_fraction, bin_width, m_low, m_high)
(mean_ejected_halo_array[current_model_number][snapshot_idx], std_ejected_halo_array[current_model_number][snapshot_idx]) = update_cumulative_stats(mean_ejected_halo_array[current_model_number][snapshot_idx], std_ejected_halo_array[current_model_number][snapshot_idx], N_halo_array[current_model_number][snapshot_idx], mean_ejected_halo_local, std_ejected_halo_local, N_local) # Then update the running total.
## Quasar Fraction ##
(mean_quasar_activity_local, std_quasar_activity_local,N_local, sum_quasar_activity_halo, bin_middle) = AllVars.Calculate_2D_Mean(mass_central, G.QuasarActivity[w_gal, current_snap], bin_width, m_low, m_high)
(mean_quasar_activity_array[current_model_number][snapshot_idx], std_quasar_activity_array[current_model_number][snapshot_idx]) = update_cumulative_stats(mean_quasar_activity_array[current_model_number][snapshot_idx], std_quasar_activity_array[current_model_number][snapshot_idx], N_halo_array[current_model_number][snapshot_idx], mean_quasar_activity_local, std_quasar_activity_local, N_local) # Then update the running total.
## fesc Value ##
(mean_fesc_halo_local, std_fesc_halo_local, N_local, sum_fesc_halo, bin_middle) = AllVars.Calculate_2D_Mean(mass_central, fesc, bin_width, m_low, m_high)
(mean_fesc_halo_array[current_model_number][snapshot_idx], std_fesc_halo_array[current_model_number][snapshot_idx]) = update_cumulative_stats(mean_fesc_halo_array[current_model_number][snapshot_idx], std_fesc_halo_array[current_model_number][snapshot_idx], N_halo_array[current_model_number][snapshot_idx], mean_fesc_halo_local, std_fesc_halo_local, N_local) # Then update the running total.
## Ngamma ##
#(mean_Ngamma_halo_local, std_Ngamma_halo_local, N_local, sum_Ngamma_halo, bin_middle) \
#= AllVars.Calculate_2D_Mean(mass_central, ionizing_photons, bin_width, m_low, m_high)
#mean_Ngamma_halo_local = np.divide(mean_Ngamma_halo_local, 1.0e50) ## Divide out a constant to keep the numbers manageable.
#std_Ngamma_halo_local = np.divide(std_Ngamma_halo_local, 1.0e50)
#(mean_Ngamma_halo_array[current_model_number][snapshot_idx], std_Ngamma_halo_array[current_model_number][snapshot_idx]) = update_cumulative_stats(mean_Ngamma_halo_array[current_model_number][snapshot_idx], std_Ngamma_halo_array[current_model_number][snapshot_idx], N_halo_array[current_model_number][snapshot_idx], mean_Ngamma_halo_local, std_Ngamma_halo_local, N_local) # Then update the running total.
## Reionization Modifier ##
(mean_reionmod_halo_local, std_reionmod_halo_local, N_local, sum_reionmod_halo, bin_middle) = AllVars.Calculate_2D_Mean(mass_reionmod_central, reionmod, bin_width, m_low, m_high)
(mean_reionmod_halo_array[current_model_number][snapshot_idx], std_reionmod_halo_array[current_model_number][snapshot_idx]) = update_cumulative_stats(mean_reionmod_halo_array[current_model_number][snapshot_idx], std_reionmod_halo_array[current_model_number][snapshot_idx], N_halo_array[current_model_number][snapshot_idx], mean_reionmod_halo_local, std_reionmod_halo_local, N_local) # Then update the running total.
## Total Dust Mass ##
(mean_dust_halo_local, std_dust_halo_local, N_local,
sum_dust_halo, bin_middle) = AllVars.Calculate_2D_Mean(
mass_centralgal_dust, total_dust_gal,
bin_width, m_low,
m_high)
(mean_dust_halo_array[current_model_number][snapshot_idx],
std_dust_halo_array[current_model_number][snapshot_idx]) = \
update_cumulative_stats(mean_dust_halo_array[current_model_number][snapshot_idx],
std_dust_halo_array[current_model_number][snapshot_idx],
N_halo_array[current_model_number][snapshot_idx],
mean_dust_halo_local,
std_dust_halo_local,
N_local)
N_halo_array[current_model_number][snapshot_idx] += N_local
### Functions of redshift ###
## Ngamma ##
#sum_Ngamma_z_array[current_model_number][snapshot_idx] += np.sum(np.divide(ionizing_photons, 1.0e50)) # Remember that we're dividing out a constant!
## fesc Value ##
(mean_fesc_z_array[current_model_number][snapshot_idx], std_fesc_z_array[current_model_number][snapshot_idx]) = update_cumulative_stats(mean_fesc_z_array[current_model_number][snapshot_idx], std_fesc_z_array[current_model_number][snapshot_idx], N_z[current_model_number][snapshot_idx], np.mean(fesc), np.std(fesc), len(w_gal)) # Updates the mean escape fraction for this redshift.
## Reionization Modifier ##
(mean_reionmod_z[current_model_number][snapshot_idx], std_reionmod_z[current_model_number][snapshot_idx]) = update_cumulative_stats(mean_reionmod_z[current_model_number][snapshot_idx], std_reionmod_z[current_model_number][snapshot_idx], N_reionmod_z[current_model_number][snapshot_idx], np.mean(reionmod), np.std(reionmod), len(reionmod))
N_reionmod_z[current_model_number][snapshot_idx] += len(reionmod)
## Ejected Fraction ##
(mean_ejected_z[current_model_number][snapshot_idx],std_ejected_z[current_model_number][snapshot_idx]) \
= update_cumulative_stats(mean_ejected_z[current_model_number][snapshot_idx],
std_ejected_z[current_model_number][snapshot_idx],
N_z[current_model_number][snapshot_idx],
np.mean(ejected_fraction),
np.std(ejected_fraction),
len(w_gal))
N_z[current_model_number][snapshot_idx] += len(w_gal)
#### Arrays that are just kept across snapshots ##
Ngamma_global[current_model_number][snapshot_idx].append(Ngamma_gal)
mass_global[current_model_number][snapshot_idx].append(mass_gal)
fesc_global[current_model_number][snapshot_idx].append(fesc)
#### Arrays that are function of fej ##
(mean_Ngamma_fej_local, std_Ngamma_fej_local, N_local,
sum_Ngamma_fej_local, bin_middle) = AllVars.Calculate_2D_Mean(
ejected_fraction, Ngamma_gal,
fej_bin_width, fej_low, fej_high)
(mean_Ngamma_fej[current_model_number][snapshot_idx],
std_Ngamma_fej[current_model_number][snapshot_idx]) = \
update_cumulative_stats(mean_Ngamma_fej[current_model_number][snapshot_idx],
std_Ngamma_fej[current_model_number][snapshot_idx],
N_fej[current_model_number][snapshot_idx],
mean_Ngamma_fej_local,
std_Ngamma_fej_local,
N_local)
N_fej[current_model_number][snapshot_idx] += N_local
done_model[current_model_number] = 1
if (current_model_number < number_models):
keep_files = same_files[current_model_number] # Decide if we want to keep the files loaded or throw them out.
current_model_number += 1 # Update the inner loop model number.
#StellarMassFunction(PlotSnapList, SMF, simulation_norm, FirstFile,
# LastFile, NumFile, galaxy_halo_mass_mean, model_tags,
# 1, paper_plots, "wtf")
#plot_reionmod(PlotSnapList, SnapList, simulation_norm, mean_reionmod_halo_array,
#std_reionmod_halo_array, N_halo_array, mean_reionmod_z,
#std_reionmod_z, N_reionmod_z, False, model_tags,
#"reionmod_selfcon")
#plot_dust_scatter(SnapList, mass_gal_dust, mass_centralgal_dust, total_dust_gal,
# "dust_scatter")
#plot_dust(PlotSnapList, SnapList, simulation_norm, mean_dust_galaxy_array,
# std_dust_galaxy_array, N_galaxy_array, mean_dust_halo_array,
# std_dust_halo_array, N_halo_array, False, model_tags,
# "dustmass_total")
#plot_stellarmass_blackhole(PlotSnapList, simulation_norm, mean_BHmass_galaxy_array,
# std_BHmass_galaxy_array, N_galaxy_array,
# FirstFile, LastFile, NumFile,
# model_tags, "StellarMass_BHMass")
#plot_ejectedfraction(SnapList, PlotSnapList, simulation_norm,
# mean_ejected_halo_array, std_ejected_halo_array,
# N_halo_array, mean_ejected_z, std_ejected_z, N_z,
# model_tags, "ejectedfraction")
#plot_quasars_count(SnapList, PlotSnapList, N_quasars_z, N_quasars_boost_z, N_z, mean_quasar_activity_array, std_quasar_activity_array, N_halo_array, mergers_halo_array, SMF, mergers_galaxy_array, fesc_prescription, simulation_norm, FirstFile, LastFile, NumFile, model_tags, "SN_Prescription")
plot_fesc_galaxy(SnapList, PlotSnapList, simulation_norm,
mean_fesc_galaxy_array, std_fesc_galaxy_array,
N_galaxy_array, mean_fesc_halo_array,
std_fesc_halo_array, N_halo_array,
galaxy_halo_mass_mean, model_tags,
paper_plots, mass_global, fesc_global, Ngamma_global,
"fesc_paper")
plot_reionmod_galaxy(SnapList, PlotSnapList, simulation_norm,
mean_reionmod_galaxy_array, std_reionmod_galaxy_array,
N_galaxy_array, mean_gnedin_reionmod_galaxy_array,
std_gnedin_reionmod_galaxy_array,
model_tags, paper_plots, "reionmod")
exit()
#plot_nion_galaxy(SnapList, PlotSnapList, simulation_norm,
# mean_Ngamma_galaxy_array, std_Ngamma_galaxy_array,
# N_galaxy_array, model_tags,
# paper_plots, "Ngamma")
'''
plot_photo_galaxy(SnapList, PlotSnapList, simulation_norm,
mean_photo_galaxy_array, std_photo_galaxy_array,
N_galaxy_array, model_tags,
paper_plots, "photo")
'''
plot_sfr_galaxy(SnapList, PlotSnapList, simulation_norm,
mean_sfr_galaxy_array, std_sfr_galaxy_array,
mean_ssfr_galaxy_array, std_ssfr_galaxy_array,
N_galaxy_array, model_tags, "sSFR")
#plot_fej_Ngamma(SnapList, PlotSnapList, simulation_norm,
# mean_Ngamma_fej, std_Ngamma_fej,
# N_fej, model_tags, "Ngamma_fej")
#plot_photoncount(SnapList, sum_Ngamma_z_array, simulation_norm, FirstFile, LastFile, NumFile, model_tags, "Ngamma_test") ## PARALELL COMPATIBLE
#plot_mvir_Ngamma(SnapList, mean_Ngamma_halo_array, std_Ngamma_halo_array, N_halo_array, model_tags, "Mvir_Ngamma_test", fesc_prescription, fesc_normalization, "/lustre/projects/p004_swin/jseiler/tiamat/halo_ngamma/") ## PARALELL COMPATIBLE
| 51.258188 | 474 | 0.610478 |