repo_name
stringlengths
6
97
path
stringlengths
3
341
text
stringlengths
8
1.02M
malina-molodchina/letoproject
nobelevka.py
import pandas as pd import numpy as np import matplotlib.pyplot as plt import streamlit as st import time import seaborn as sns import squarify import pycountry import geopandas import plotly.graph_objects as go from PIL import Image from wordcloud import WordCloud, STOPWORDS,ImageColorGenerator st.title("Nobelevo4ka") st.image('1.jpg') "В этой проге я постараюсь визаулизировать данные по Нобелевской премии 1901-2019. " \ "Данные взяты с Kagle (https://www.kaggle.com/imdevskp/nobel-prize)" "В датасете содержится информация о литературе/медицине/премии мира/физике/химии и экономике (основана в 1969)" "Стоит также упомянуть, что в период 1940-1942 Нобелевская премия не вручалась((" with st.echo(code_location='below'): data = pd.read_csv('complete.csv', delimiter=',').sort_values("awardYear").reset_index() del data["index"] with st.echo(code_location='below'): data["gender"] = data["gender"].fillna("Organization") data_year = pd.DataFrame({"Year": range(1901, 1940), "male": 0, "female": 0, "Organization": 0}) data_year2 = pd.DataFrame({"Year": range(1943, 2020), "male": 0, "female": 0, "Organization": 0}) data_year = pd.concat([data_year, data_year2], ignore_index=True) for j in ["male", "female", "Organization"]: for i in range(1901, 1939): data_year.loc[i - 1901][j] = int(data[data["awardYear"] == int(i)]["gender"].to_list().count(j)) for i in range(1943, 2020): data_year.loc[i - 1904][j] = int(data[data["awardYear"] == int(i)]["gender"].to_list().count(j)) with st.echo(code_location='below'): number = data_year.sum()[1:4].to_list() genders = ["male", "female", "Organization"] a, b = plt.subplots() ### FROM https://stackoverflow.com/questions/6170246/how-do-i-use-matplotlib-autopct" def make_autopct(values): def my_autopct(pct): total = sum(values) val = int(round(pct * total / 100.0)) return '{p:.1f}% ({v:d})'.format(p=pct, v=val) return my_autopct ### END FROM https://stackoverflow.com/questions/6170246/how-do-i-use-matplotlib-autopct b.pie(number, (0, 0, 0.1), genders, ['#60b3ff', '#ff9999', '#99ff99'], autopct=make_autopct(number), shadow=True, startangle=11) b.axis('equal') plt.tight_layout() plt.legend(title='Гендерное равенство?! Ну типа', bbox_to_anchor=(1, 1), loc='upper center') ''''Получился нелепый мужской пакмэн, проглотивший остальных. Действительно, пока что нет никаких новостей, с куммулятивным гендерным распределением всё и так был понятно.''' st.pyplot(plt) with st.echo(code_location='below'): b = ["Chemistry", "Literature", "Physiology or Medicine", "Peace", "Physics", "Economic Sciences"] subj = pd.DataFrame({"Category": b, "Total": 0, "Female": 0}) for j in b: data_year[j] = 0 for i in data_year["Year"]: data_year.loc[i - 1901 - 3 * int(i / 1943)][j] = int( data[data["awardYear"] == int(i)]["category"].to_list().count(j)) subj.iloc[b.index(j), 1] = data_year[j].sum() subj.iloc[b.index(j), 2] = data[data["category"] == j]["gender"].to_list().count("female") "Стой!!! Совсем забыл сказать, ты можешь воспользоваться уникальной поисковой системой! Она бесполезная, но вдруг тебе пригодится..." \ "Код я спрятал, потому что он большой и некрасивый" cat = st.selectbox('Выберите интересующую вас область:', ["Literature", "Chemistry", "Physiology or Medicine", "Physics", "Economic Sciences"]) year = st.selectbox('Выберите интересующий вас год:', range(1901, 2020)) if year > 1939 and year < 1943: "В этом году нобелевскую премию по данному предмету никто не получал. Да и по другим претметам тоже. " \ "Война всё-так дело серьезное" else: if len(data.loc[data["awardYear"] == year].loc[data["category"] == cat]) == 0: "В этом году по Экономике никто не получал премию. Знаете почему? Её тогда ещё не было)) " \ "Она появилась в 1969." elif len(data.loc[data["awardYear"] == year].loc[data["category"] == cat]) == 1: "В этом году Нобелевскую премию по " + str(cat) + " была вручена " + \ data.loc[data["awardYear"] == year].loc[data["category"] == cat]["name"].iloc[0] "За что получил? Тут всё очев: " + str(data.loc[data["awardYear"] == year].loc[data["category"] == cat]["motivation"].iloc[0]) if data.loc[data["awardYear"] == year].loc[data["category"] == cat]["birth_date"].iloc[0] == "": st.write("Датафрейм не знает, когда этот человек родился, значит и нам не положено") else: st.write("Дата рождения " + str(data.loc[data["awardYear"] == year].loc[data["category"] == cat]["name"].iloc[0]) + " - " + \ data[data["awardYear"] == year].loc[data["category"] == cat]["birth_date"].iloc[0]) if data.loc[data["awardYear"] == year].loc[data["category"] == cat]["birth_countryNow"].iloc[0] == "": st.write("Датафрейм не знает, где она родилась, значит и нам не положено") else: st.write("Место рождения " + str(data.loc[data["awardYear"] == year].loc[data["category"] == cat]["name"].iloc[0]) + " - " + \ data.loc[data["awardYear"] == year].loc[data["category"] == cat]["birth_countryNow"].iloc[0]) else: st.write("В "+str(year)+" году нобелевской премией по "+str(cat)+ " было награждено сразу несколько человек!!") chel = st.selectbox( "Выберите, кто именно вас интересует: ", data[data["awardYear"] == year][data["category"] == cat]["name"].to_list()) "За что получил? Тут всё очев: " + str( data[data["awardYear"] == year][data["category"] == cat][data["name"] == chel]["motivation"].iloc[0]) if data[data["awardYear"] == year][data["category"] == cat][data["name"] == chel]["birth_date"].iloc[0] == "": st.write("Датафрейм не знает, когда этот человек родился, значит и нам не положено") else: st.write("Дата рождения " + str(data[data["awardYear"] == year][data["category"] == cat][data["name"] == chel]["name"].iloc[0]) + " - " + \ data[data["awardYear"] == year][data["category"] == cat][data["name"] == chel]["birth_date"].iloc[0]) if data[data["awardYear"] == year][data["category"] == cat][data["name"] == chel]["birth_countryNow"].iloc[0] == "": st.write("Датафрейм не знает, где она родилась, значит и нам не положено") else: st.write("Место рождения " + str(data[data["awardYear"] == year][data["category"] == cat][data["name"] == chel]["name"].iloc[0]) + " - " + \ data[data["awardYear"] == year][data["category"] == cat]["birth_countryNow"][data["name"] == chel].iloc[0]) with st.echo(code_location='below'): sns.set_theme(style="whitegrid") f, ax = plt.subplots(figsize=(4, 4)) subjects = subj.sort_values("Total", ascending=False) sns.barplot(x="Total", y="Category", data=subjects, label="Total", color="#60b3ff") sns.set_color_codes("muted") sns.barplot(x="Female", y="Category", data=subjects, label="Female", color="#ff9999") ax.legend(ncol=1, loc="lower right", frameon=True) ax.set(xlim=(0, 250), ylabel="", xlabel="Number of prizes") plt.title("Female distribution per categories") st.set_option('deprecation.showPyplotGlobalUse', False) st.pyplot(sns.despine(left=True, bottom=True)) "Как мы видим, процентные соотношения женщин в каждой из категорий очень разнятся. Наибольшую долю они составляют " \ "в премии Мира и премии по литературе, откуда можно сделать вывод о том, что женщины лучше преуспевают в гуманитарных науках (литература)" \ "и в социальной активности/иницитивности (премия Мира), нежели в естественных науках ('преуспевают' в данном контексте относится именно" \ "к Нобелевской премии)" "Хмммммм. А вы заметили, что во всех категориях количество врученных премий значительно превышает временной промежуток, на протяжении " \ "которого эти премии присуждались (раз в год)??? Омагадддд, получается, одну премию могут получать сразу несколько человек!!!" with st.echo(code_location='below'): y = data[["awardYear", "category"]].copy() y["1"] = 1 y = y.groupby(["awardYear", "category"]).sum("1") x = data_year[["Year", "Chemistry", "Literature", "Physiology or Medicine", "Peace", "Physics", "Economic Sciences"]].pivot_table(["Chemistry", "Literature", "Physiology or Medicine", "Peace", "Physics", "Economic Sciences"], "Year") x = pd.pivot_table(x, values=["Chemistry", "Literature", "Physiology or Medicine", "Peace", "Physics", "Economic Sciences"], columns="Year") sns.set_theme() f, ax = plt.subplots(figsize=(10, 3)) sns.heatmap(x, annot=False, fmt="d", linewidths=0.05, ax=ax) """Цветовая палитра показывает, сколько человек в конкретный год взяли Нобеля в конкретной категории. По литературе, например, почти во все года был награждён один человек, что достаточно логично. Действительно интересный момент, который мы видим - в естественных науках с момента появления премии количество дуэтов/трио постепенно росло и в последние десятителия стало модно брать нобеля не одному, а со своими корешами.""" st.pyplot() with st.echo(code_location='below'): quant = subj[["Category", "Total"]] colors = [plt.cm.Spectral(i / float(len(quant["Category"]))) for i in range(len(quant["Category"]))] plt.figure(figsize=(15, 8), dpi=80) squarify.plot(sizes=quant["Total"], label=quant["Category"], color=colors, alpha=0.8, value=quant["Total"]) plt.title('Treemap of the number of the Nobel prizes per category') plt.axis('off') st.pyplot() with st.echo(code_location='below'): x = data_year[["Chemistry", "Literature", "Physiology or Medicine", "Peace", "Physics", "Economic Sciences"]].copy() x.index = data_year["Year"] x.plot.area(color=colors) st.pyplot() "А вот тут можно сломать мозг, но всё на самом деле проще. По оси Y отложено количество человек, которые взяли нобеля " \ "в каждый год. Верхняя огибающая - тотал, а если мы рассмотрим закрашенные зоны, то поймём, разбиение на предметы внутри " \ "этого тотала. В отличии от прошлого графика, этот ещё выресовываает общую тенденцию в виде роста количества премий с теч времени" with st.echo(code_location='below'): strany = data_year[["Year", "male"]].copy() data["birth_countryNow"] = data["birth_countryNow"].fillna(data["org_founded_countryNow"]) for i in set(data["birth_countryNow"].to_list()): strany[i] = 0 strany = strany.drop(strany.columns[1], axis=1) for j in strany.columns.to_list()[1:-1]: for i in data_year["Year"]: strany.loc[i - 1901 - 3 * int(i / 1943)][j] = int( data[data["awardYear"] == int(i)]["birth_countryNow"].to_list().count(j)) po_strane = pd.DataFrame(strany[strany.columns[1:]].sum().sort_values(ascending=False)) po_strane = po_strane.iloc[0:25, :] plt.figure(figsize=(9, 9)) ax = plt.subplot(111, polar=True) plt.axis('off') lowerLimit = 30 labelPadding = 4 max = int(po_strane.max()) # Idea taken FROM https://www.python-graph-gallery.com/circular-barplot-basic slope = (max - lowerLimit) / max heights = slope * po_strane.iloc[:, 0] + lowerLimit width = 2 * np.pi / len(po_strane.index) indexes = list(range(1, len(po_strane.index) + 1)) angles = [element * width for element in indexes] bars = ax.bar(angles, height=heights, width=width, bottom=lowerLimit, linewidth=2, edgecolor="white", color=colors) for bar, angle, height, label in zip(bars, angles, heights, po_strane.index.to_list()): rotation = np.rad2deg(angle) alignment = "" if angle >= np.pi / 2 and angle < 3 * np.pi / 2: alignment = "right" rotation = rotation + 180 else: alignment = "left" ax.text(x=angle, y=lowerLimit + bar.get_height() + labelPadding, s=str(label) + " " + str(int( po_strane[po_strane.index == label].iloc[ :, 0])), ha=alignment, va='center', rotation=rotation, rotation_mode="anchor") # END FROM https://www.python-graph-gallery.com/circular-barplot-basic "тут и так всё понятно" "Btw, я бы на вашем месте не проверял числа, потому что они не сойдутся, во всём виноват датафрейм((( Я честно пытался ручками сделать его лучше, но он всё ещё дефектный парень" st.pyplot() with st.echo(code_location='below'): country = pd.DataFrame(strany[strany.columns[2:]].sum()) country = country.reset_index(level=0, drop=False) # FROM https://melaniesoek0120.medium.com/data-visualization-how-to-plot-a-map-with-geopandas-in-python-73b10dcd4b4b # Аббревиатура страны по названию def alpha3code(column): CODE = [] for country in column: try: code = pycountry.countries.get(name=country).alpha_3 # .alpha_3 means 3-letter country code # .alpha_2 means 2-letter country code CODE.append(code) except: CODE.append('None') return CODE # END FROM https://melaniesoek0120.medium.com/data-visualization-how-to-plot-a-map-with-geopandas-in-python-73b10dcd4b4b # Плохой датасет, некоторое пришлось прописывать ручками country['CODE'] = alpha3code(country["index"]) country.loc[country['index'] == "Czech Republic", 'CODE'] = "CZE" country.loc[country['index'] == "United Kingdom", 0] = 105 country.loc[country['index'] == "Vietnam", 'CODE'] = "VNM" country.loc[country['index'] == "Iran", 'CODE'] = "IRN" country.loc[country['index'] == "Venezuela", 'CODE'] = "VEN" country.loc[country['index'] == "South Korea", 'CODE'] = "KOR" country.loc[country['index'] == "Russia", 'CODE'] = "RUS" country.loc[country['index'] == "the Netherlands", 'CODE'] = "NLD" country.loc[country['index'] == "USA", 'CODE'] = "USA" world = geopandas.read_file(geopandas.datasets.get_path('naturalearth_lowres')) country = country.drop(country[country["CODE"] == "None"].index) ppp = world.merge(country[["CODE", 0]], left_on='iso_a3', right_on='CODE') # Using the example (close to documentation) from https://medium.com/using-specialist-business-databases/creating-a-choropleth-map-using-geopandas-and-financial-data-c76419258746 plt.figure(figsize=(9, 9)) ax = ppp.dropna().plot(column=0, cmap="OrRd", figsize=(20, 20), scheme='quantiles', k=5, legend=True) ax.set_axis_off() ax.get_legend().set_bbox_to_anchor((.12, .12)) ax.get_figure() "Карта номер рас:" st.pyplot() with st.echo(code_location='below'): # idea from https://plotly.com/python/choropleth-maps/ fig = go.Figure(data=go.Choropleth(locations = country['CODE'],z = country[0],text = country['index'],colorscale = "spectral", autocolorscale=False,reversescale=False, marker_line_color='darkgray',marker_line_width=0.5,colorbar_title = 'No.')) fig.update_layout( title_text='Nobel prizes per country',geo=dict(showframe=False,showcoastlines=False, projection_type='equirectangular'),annotations = [dict(x=0.55, y=0.1, xref='paper',yref='paper', text="Можно туда сюда поводить потыкать",showarrow = False)]) "Карта номер два:" st.write(fig) with st.echo(code_location='below'): s = st.selectbox("Chose your fighter:", b) text = "" for i in data[data["category"]==s]["motivation"]: text+= " "+i mask = np.array(Image.open("Literature.png")) #FROM https://towardsdatascience.com/create-word-cloud-into-any-shape-you-want-using-python-d0b88834bc32 mask_colors = ImageColorGenerator(mask) wc = WordCloud(stopwords=STOPWORDS, mask=mask, background_color="white", max_words=2000, max_font_size=256, random_state=42, width=mask.shape[1],height=mask.shape[0],color_func=mask_colors) #END FROM https://towardsdatascience.com/create-word-cloud-into-any-shape-you-want-using-python-d0b88834bc32 wc.generate(text) plt.imshow(wc, interpolation="bilinear") plt.axis('off') "Интересно, а о чём же вообще все эти научные работы? Наверняка у них очень заумные названия..." \ "Но должны же слова иногда повторяться?" st.pyplot() yes = st.checkbox("Хочу посмотреть сразу на все предметы") if yes == True: with st.echo(code_location='below'): for j in b: text = "" for i in data[data["category"] == j]["motivation"]: text += " " + i wordcloud = WordCloud(width=400, height=400, margin=0, background_color="white").generate(text) plt.subplot(2, 3, b.index(j) + 1) plt.imshow(wordcloud, interpolation='bilinear') plt.axis("off") plt.title(str(j)) plt.margins(x=0, y=0) st.pyplot() with st.echo(code_location='below'): univ = data_year[["Year", "male"]].copy() for i in set(data["category"].to_list()): univ[i] = 0 for j in ["Literature","Chemistry", "Physiology or Medicine", "Peace", "Physics", "Economic Sciences"]: for i in data_year["Year"]: if i == 1901: univ.loc[i - 1901 - 3 * int(i / 1943)][j] = int( data[data["awardYear"] == int(i)]["category"].to_list().count(j)) else: univ.loc[i - 1901 - 3 * int(i / 1943)][j] = int( data[data["awardYear"] == int(i)]["category"].to_list().count(j))+int(univ.loc[i - 1902 - 3 * int(i / 1943)][j]) univ = univ.set_index("Year").drop(columns="male") univ = univ.sort_values(univ.iloc[115].name,axis=1) # USED several comments from https://discuss.streamlit.io/t/how-to-animate-a-line-chart/164/2 #the_plot = st.pyplot(plt) #def animate(i, x, y, colors): #ax.barh(x, width=y, color=colors) #ax.set_title(i, ) #the_plot.pyplot(plt) #fig, ax = plt.subplots() #for i in range(110): #width = univ.iloc[i].values #animate(i, univ.iloc[i].index, width, colors) #time.sleep(0.1) "Код можно посмотреть в файле, ровно то что закомменчено, но из-за функции код не хочет работать внутри st.echo. Вообще " \ "для таких штук есть крутой пакет - celluloid, там есть camera которая позволяет делать красоту, но стрмлит ее поддерживает(((" "Придётся подождать... Это не конец, загрузится - появится продолжение" the_plot = st.pyplot(plt) def animate(i, x, y, colors): ax.barh(x, width=y, color=colors) ax.set_title(1901+i+ 3 * int(i / 39,)-3 * int(i / 78,)) the_plot.pyplot(plt) fig, ax = plt.subplots() for i in range(0,116): width = univ.iloc[i].values animate(i, univ.iloc[i].index, width, colors) time.sleep(0.001) aue = st.checkbox("Пропустил всю анимацию? Ну ладноооо, специально для тебя могу повторить, поставь галочку и наберись терпения") if aue == "Yes": fig, ax = plt.subplots() for i in range(110): width = univ.iloc[i].values animate(i, univ.iloc[i].index, width, colors) time.sleep(0.001)
malina-molodchina/letoproject
a.py
from operator import itemgetter import datetime import plotly.graph_objects as go import plotly.express as px from plotly.offline import download_plotlyjs, init_notebook_mode, plot, iplot init_notebook_mode(connected=False) def main(): df = pd.read_csv('./country_vaccinations.csv') datum = df.groupby('date')['daily_vaccinations'].sum() df_vacc_distr = df.groupby(['vaccines', 'date'])['daily_vaccinations'].sum() df_nan_distr = pd.read_csv('./country_vaccinations.csv', usecols=['country', 'total_vaccinations', 'daily_vaccinations']) # список стран countries = sorted(list(set(df['country'].tolist()))) # список вакцин vaccines = set() for i in df['vaccines']: sp = i.split(', ') for vaci in sp: vaccines.add(vaci) vaccines = list(sorted(vaccines)) # список дат date = set() for i in df['date']: date.add(i.replace('-', '')) date = sorted(list(date)) vaccinations = {} for i in range(len(date)): vaccinations[date[i]] = datum[i] page = st.sidebar.selectbox("Выберите страницу", ['Основные данные', 'Анализ кол-ва вакцинировавшихся', 'Список доступных вакцин', 'Вакцинация конкретной вакциной по дням', 'Наименее открытые по данным страны']) if page == 'Основные данные': with st.echo(code_location='below'): # подгружаем датасеты df = pd.read_csv('./country_vaccinations.csv') datum = df.groupby('date')['daily_vaccinations'].sum() df_vacc_distr = df.groupby(['vaccines', 'date'])['daily_vaccinations'].sum() df_nan_distr = pd.read_csv('./country_vaccinations.csv', usecols=['country', 'total_vaccinations', 'daily_vaccinations']) st.title('Анализ вакцинации от Covid-19') st.text('Выберите страницу') st.dataframe(df) # список стран countries = sorted(list(set(df['country'].tolist()))) # список вакцин vaccines = set() for i in df['vaccines']: sp = i.split(', ') for vaci in sp: vaccines.add(vaci) vaccines = list(sorted(vaccines)) # список дат date = set() for i in df['date']: date.add(i.replace('-', '')) date = sorted(list(date)) vaccinations = {} for i in range(len(date)): vaccinations[date[i]] = datum[i] elif page == 'Анализ кол-ва вакцинировавшихся': with st.echo(code_location='below'): st.title('Анализ кол-ва вакцинировавшихся') st.text('Обратите внимание, что перемещаться может как левый, так и правый ползунок') st.dataframe(datum) a = st.slider('Дата', 5, 15, (0, 115), 5) start_date = datetime.datetime.strptime(date[a[0]], '%Y%m%d').strftime("%d. %B %Y") end_date = datetime.datetime.strptime(date[a[1] - 1], '%Y%m%d').strftime("%d. %B %Y") st.info('Начало: **%s** Конец: **%s**' % (start_date, end_date)) x = list(dict(sorted(vaccinations.items(), key=itemgetter(1))[a[0]:a[1]:5]).keys()) y = list(dict(sorted(vaccinations.items(), key=itemgetter(1))[a[0]:a[1]:5]).values()) fig_daily_vacc = go.Figure(data=[go.Bar(x=x, y=y)]) fig_daily_vacc.update_layout(title='Рост количества вакцинировавшихся', autosize=False, width=800, height=800) st.plotly_chart(fig_daily_vacc) st.write( 'Мы можем заметить что в декабре рост был линейным.(просто разместите ползунок так,чтобы показывался только месяц декабрь). В январе произошел резкий рост(почти квадратичный). В феврале же кол-во вакцинировшихся осталось постоянным. В марте-апреле кол-во вакцинировшихся снова начало экспоненциально расти') elif page == 'Список доступных вакцин': with st.echo(code_location='below'): st.title('Список доступных вакцин') st.text('В последнее время появилолсь множество вакцин, в этой работе я хочу рассмотреть их популярность') for i in vaccines: st.markdown(i) elif page == 'Вакцинация конкретной вакциной по дням': with st.echo(code_location='below'): st.title('Вакцинация конкретной вакциной по дням') aaa = df.groupby(['vaccines', 'date'])['daily_vaccinations'].sum().to_dict() vaccines_distr = {} for i in vaccines: vaccines_distr[i] = {} for k in date: vaccines_distr[i][k] = 0 aaa1 = {} for i in aaa: temp = i[0].split(', ') temp = tuple(temp) i1 = (temp, i[1].replace('-', '')) aaa1[i1] = aaa[i] / len(i[0].split(', ')) for i in aaa1: for k in i[0]: vaccines_distr[k][i[1]] += aaa1[i] vaccines_distr_pie = {} for i in vaccines_distr: vaccines_distr_pie[i] = vaccines_distr[i][list(vaccines_distr[i].keys())[-1]] st.dataframe(df_vacc_distr) x1_pie = vaccines y1_pie = list(vaccines_distr_pie.values()) num = len(x1_pie) theta = [(i + 1.5) * 360 / num for i in range(num)] width = [360 / num for _ in range(num)] color_seq = px.colors.qualitative.Pastel color_ind = range(0, len(color_seq), len(color_seq) // num) colors = [color_seq[i] for i in color_ind] a_ticks = [(i + 1) * 360 / num for i in range(num)] plotss = [go.Barpolar(r=[r], theta=[t], width=[w], name=n, marker_color=[c]) for r, t, w, n, c in zip(y1_pie, theta, width, x1_pie, colors)] fig_vacc_distr_pie = go.Figure(plotss) fig_vacc_distr_pie.update_layout(polar_angularaxis_tickvals=a_ticks, title='Общая популярность вакцин', autosize=False, width=800, height=800, polar=dict(angularaxis=dict(showticklabels=False, ticks=''))) st.plotly_chart(fig_vacc_distr_pie) option_1 = st.selectbox( 'Выберите вакцину №1', vaccines, key='1') 'Вы выбрали: ', option_1 option_2 = st.selectbox( 'Выберите вакцину №2', vaccines, key='2') 'Вы выбрали: ', option_2 x1_1 = list(vaccines_distr[option_1].keys())[::5] y1_1 = list(vaccines_distr[option_1].values())[::5] y1_2 = list(vaccines_distr[option_2].values())[::5] fig_vacc_distr = go.Figure() fig_vacc_distr.add_trace(go.Bar(name=option_1, x=x1_1, y=y1_1)) fig_vacc_distr.add_trace(go.Bar(name=option_2, x=x1_1, y=y1_2)) fig_vacc_distr.update_layout(title='Популярность вакцин ' + option_1 + ' и ' + option_2 + ' со временем', autosize=False, width=800, height=800) st.plotly_chart(fig_vacc_distr) st.text( 'Сейчас самой популярной вакциной является Оксфорд.Затем идет Синовак, который почти в 2 раза менее популярный. Вместе они покрывают более 1/4 рынка. Затем все идет согласно диаграмме о вакцинах') else: with st.echo(code_location='below'): st.title('Наименее открытые по данным страны') st.dataframe(df_nan_distr) all_nans = 0 for i in df_nan_distr.isna().sum(): all_nans += i nan_distr = {} for i in countries: nan_distr[i] = 0 for i in range(len(df_nan_distr)): nan_distr[df_nan_distr.iloc[i, 0]] += df_nan_distr.iloc[i].isna().sum() for i in nan_distr: nan_distr[i] = nan_distr[i] * 100 / all_nans x2 = list(dict(sorted(nan_distr.items(), key=itemgetter(1), reverse=True)[:10]).keys()) y2 = list(dict(sorted(nan_distr.items(), key=itemgetter(1), reverse=True)[:10]).values()) help_dict = {"countries": list(range(len(x2))), "perc": y2} help_fig = px.scatter(help_dict, x="countries", y="perc", trendline="ols") x_trend = help_fig["data"][1]['x'] y_trend = help_fig["data"][1]['y'] fig_nan = go.Figure() fig_nan.add_trace(go.Bar(x=x2, y=y2, name="Распределение закрытости данных")) fig_nan.add_trace(go.Line(x=x2, y=y_trend, name="Линия тренда")) # fig_nan.add_trace(go.Scatter(name = 'Линия тренда', x = x2, y = y_pred, mode = 'lines')) fig_nan.update_layout(title='Топ 10 стран по закрытости данных, %', autosize=False, width=800, height=800) fig_nan.update_layout(xaxis_title='countries', yaxis_title='%') st.plotly_chart(fig_nan) if __name__ == '__main__': main()
ojuba-org/ojuba-virtual-cd
setup.py
#! /usr/bin/python import sys, os, os.path from distutils.core import setup from glob import glob # to install type: # python setup.py install --root=/ def no_empty(l): return filter(lambda (i,j): j, l) def recusive_data_dir(to, src, l=None): D=glob(os.path.join(src,'*')) files=filter( lambda i: os.path.isfile(i), D ) dirs=filter( lambda i: os.path.isdir(i), D ) if l==None: l=[] l.append( (to , files ) ) for d in dirs: recusive_data_dir( os.path.join(to,os.path.basename(d)), d , l) return l locales=map(lambda i: ('share/'+i,[''+i+'/ojuba-virtual-cd.mo',]),glob('locale/*/LC_MESSAGES')) data_files=[] data_files.extend(locales) setup (name='OjubaVirtualCD', version='0.2.0', description='Ojuba Virtual CD/DVD using fuseiso', author='<NAME>', author_email='<EMAIL>', url='http://git.ojuba.org/', license='Waqf', #packages=['OjubaVirtualCD'], py_modules = ['OjubaVirtualCD'], scripts=['ojuba-virtual-cd'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: End Users/Desktop', 'Operating System :: POSIX', 'Programming Language :: Python', ], data_files=data_files )
ojuba-org/ojuba-virtual-cd
OjubaVirtualCD.py
<gh_stars>0 # -*- coding: UTF-8 -*- # -*- Mode: Python; py-indent-offset: 4 -*- """ Ojuba Virtual CD Copyright © 2011, <NAME> <<EMAIL>> PyGtk+ front-end for fuseiso Released under terms of Waqf Public License. This program is free software; you can redistribute it and/or modify it under the terms of the latest version Waqf Public License as published by Ojuba.org. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. The Latest version of the license can be found on "http://www.ojuba.org/wiki/doku.php/رخصة_وقف_العامة" """ import sys,os,os.path import time from gi.repository import Gtk, GObject from subprocess import Popen,PIPE import gettext import re from glob import glob label_re=re.compile(r"""'([^']+)'""") mount_prefix=os.path.expanduser('~/.virtuals') _ps=[] gettext.install('ojuba-virtual-cd', "/usr/share/locale", unicode=0) def run_in_bg(cmd): global _ps setsid = getattr(os, 'setsid', None) if not setsid: setsid = getattr(os, 'setpgrp', None) _ps=filter(lambda x: x.poll()!=None,_ps) # remove terminated processes from _ps list _ps.append(Popen(cmd,0,'/bin/sh',shell=True, preexec_fn=setsid)) def get_pids(l): pids=[] for i in l: p=Popen(['/sbin/pidof',i], 0, stdout=PIPE) l=p.communicate()[0].strip().split() r=p.returncode if r==0: pids.extend(l) pids.sort() return pids def get_desktop(): """return 1 for kde, 0 for gnome, -1 none of them""" l=get_pids(('kwin','ksmserver',)) if l: kde=l[0] else: kde=None l=get_pids(('gnome-session',)) if l: gnome=l[0] else: gnome=None if kde: if not gnome or kde<gnome: return 1 else: return 0 if gnome: return 0 else: return -1 def run_file_man(mp): # TODO: add Dolphin here if get_desktop()==0: run_in_bg("nautilus --no-desktop '%s'" % mp) elif get_desktop()==1: run_in_bg("konqueror '%s'" % mp) elif os.path.exists('/usr/bin/thunar'): run_in_bg("thunar '%s'" % mp) elif os.path.exists('/usr/bin/pcmanfm'): run_in_bg("pcmanfm '%s'" % mp) elif os.path.exists('/usr/bin/nautilus'): run_in_bg("nautilus --no-desktop '%s'" % mp) elif os.path.exists('/usr/bin/konqueror'): run_in_bg("konqueror '%s'" % mp) def bad(msg): dlg = Gtk.MessageDialog (None, Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT, Gtk.MessageType.ERROR, Gtk.ButtonsType.CLOSE, msg) dlg.run() dlg.destroy() def check_mount_prefix(): if not os.path.exists(mount_prefix): try: os.makedirs(mount_prefix) except OSError: bad( _("Mount prefix [%s] is not found, please create it.") % mount_prefix ) sys.exit(1) class VCDAbout(Gtk.AboutDialog): def __init__(self, parent=None): Gtk.AboutDialog.__init__(self, parent=parent) self.set_default_response(Gtk.ResponseType.CLOSE) self.connect('delete-event', lambda w, *a: w.hide() or True) self.connect('response', lambda w, *a: w.hide() or True) try: self.set_program_name("ojuba-virtual-cd") except: pass self.set_name(_("Ojuba Virtual CD")) #about_dlg.set_version(version) self.set_copyright("Copyright (c) 2008-2009 <NAME> <<EMAIL>>") self.set_comments(_("Mount CD/DVD images (iso, nrg, bin, mdf, img, ..etc.)")) self.set_license(""" Released under terms on Waqf Public License. This program is free software; you can redistribute it and/or modify it under the terms of the latest version Waqf Public License as published by Ojuba.org. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. The Latest version of the license can be found on "http://www.ojuba.org/wiki/doku.php/waqf/license" """) self.set_website("http://virtualcd.ojuba.org/") self.set_website_label("http://virtualcd.ojuba.org") self.set_authors(["<NAME> <<EMAIL>>", "a.atalla <<EMAIL>>"]) self.run() self.destroy() class VCD_mount_dlg(Gtk.FileChooserDialog): def __init__(self): Gtk.FileChooserDialog.__init__(self,_("Select CD/DVD image file"),buttons=(Gtk.STOCK_CANCEL, Gtk.ResponseType.REJECT, Gtk.STOCK_OK, Gtk.ResponseType.ACCEPT)) ff=Gtk.FileFilter() ff.add_mime_type('application/x-cd-image') for i in ('iso','nrg', 'bin','mdf','img'): l=list(i) ff.add_pattern('*.[%s%s][%s%s][%s%s]' % ( l[0],l[0].upper(), l[1],l[1].upper(), l[2],l[2].upper())) self.set_filter(ff) self.connect('delete-event', lambda w, *a: w.hide() or True) self.connect('response', lambda w, *a: w.hide() or True) class VCDStatusIcon(Gtk.StatusIcon): def __init__(self): Gtk.StatusIcon.__init__(self) self.connect ('popup-menu', self.right_click_event) self.set_title(_("OjubaVirtualCD")) self.set_from_stock(Gtk.STOCK_CDROM) self.mount_dlg = VCD_mount_dlg() #self.about_dlg = VCDAbout() self.setup_popup_menu() self.startUP() self.refresh_cb() self.set_visible(True) GObject.timeout_add(15000, self.refresh_timer) def startUP(self): if len(sys.argv)>1: if (sys.argv[1]!='--hidden'): for i in sys.argv[1:]: self.mount_f(i) else: self.mount_cb() def setup_popup_menu(self): self.popup_menu = Gtk.Menu() self.mounted_menu = Gtk.Menu() self.open_menu = Gtk.Menu() i = Gtk.MenuItem(_("Mount image")) i.connect('activate', self.mount_cb) self.popup_menu.add(i) # self.mounted_menu.add(Gtk.SeparatorMenuItem()) i = Gtk.ImageMenuItem.new_from_stock(Gtk.STOCK_REFRESH, None) i.connect('activate', self.refresh_cb) i.set_always_show_image(True) self.mounted_menu.add(i) self.open_menu.add(Gtk.SeparatorMenuItem.new()) i = Gtk.ImageMenuItem.new_from_stock(Gtk.STOCK_REFRESH, None) i.connect('activate', self.refresh_cb) i.set_always_show_image(True) self.open_menu.add(i) self.popup_menu.add(Gtk.SeparatorMenuItem.new()) self.open_menu_item=i= Gtk.MenuItem(_("Open mounted image")) i.set_submenu(self.open_menu) self.popup_menu.add(i) self.umount_menu_item=i= Gtk.MenuItem(_("Unmount")) i.set_submenu(self.mounted_menu) self.popup_menu.add(i) self.popup_menu.add(Gtk.SeparatorMenuItem.new()) i = Gtk.ImageMenuItem.new_from_stock(Gtk.STOCK_ABOUT, None) i.connect('activate', self.about_cb) i.set_always_show_image(True) self.popup_menu.add(i) i = Gtk.ImageMenuItem.new_from_stock(Gtk.STOCK_QUIT, None) i.connect('activate', Gtk.main_quit) i.set_always_show_image(True) self.popup_menu.add(i) def right_click_event(self, icon, button, time): self.popup_menu.show_all() self.popup_menu.popup(None, None, Gtk.StatusIcon.position_menu, icon, button, time) def refresh_timer(self): self.refresh_cb(); return True; def refresh_cb(self, *args): self.popup_menu.popdown() mm = Gtk.Menu() oo = Gtk.Menu() for i in os.listdir(mount_prefix): mp = os.path.join(mount_prefix,i) if (os.path.ismount(mp)): j = Gtk.MenuItem(i.decode(sys.getfilesystemencoding())) o = Gtk.MenuItem(i.decode(sys.getfilesystemencoding())) j.connect('activate', self.umount_cb, i) o.connect('activate', lambda a: run_file_man(mp)) mm.add(j) oo.add(o) mm.add(Gtk.SeparatorMenuItem()) oo.add(Gtk.SeparatorMenuItem()) i = Gtk.ImageMenuItem(Gtk.STOCK_REFRESH) i.connect('activate', self.refresh_cb) mm.add(i) i = Gtk.ImageMenuItem(Gtk.STOCK_REFRESH) i.connect('activate', self.refresh_cb) oo.add(i) mounted_menu = mm open_menu = oo g = self.open_menu_item.get_submenu() s = self.umount_menu_item.get_submenu() self.umount_menu_item.set_submenu(mm) self.open_menu_item.set_submenu(oo) del s, g def mount_f(self, fn): if not os.path.exists(fn): bad(_("File does not exist")); return -1 l=self.get_label(fn) if not l: l=os.path.basename(fn) mp=os.path.join( mount_prefix, l ) if os.path.exists(mp): if os.path.ismount(os.path.join(mp)): bad(_("Already mounted")); return -2 try: os.rmdir(mp) except OSError: bad(_("Mount point [%s] already exists, remove it please!") % mp); return -1 try: os.mkdir(mp) except: bad(_('Could not create folder [%s]') % mp.decode(sys.getfilesystemencoding()) ); return -1 r=os.system('fuseiso -c UTF8 "%s" "%s"' % (fn, mp)) if r: bad(_("Could not mount [%s]") % mp); return -1 else: run_file_man(mp) self.refresh_cb() return 0 def mount_cb(self, *args): if (self.mount_dlg.run()==Gtk.ResponseType.ACCEPT): self.mount_f(self.mount_dlg.get_filename()) self.mount_dlg.hide() def get_label_from_blkid(self, fn): try: p=Popen(['blkid','-o','value','-s','LABEL',fn], 0, stdout=PIPE) l=p.communicate()[0].strip() except: return None r=p.returncode if r==0 and l and len(l)>0: return l else: return None def get_label_from_file(self, fn): try: p=Popen(['file',fn], 0, stdout=PIPE) o=p.communicate()[0].split(':',1)[1].strip() l=label_re.findall(o)[0].strip() except: return None r=p.returncode if r==0 and l and len(l)>0: return l else: return None def get_label(self, fn): return self.get_label_from_blkid(fn) or self.get_label_from_file(fn) def umount_cb(self, i, mp): mpp=os.path.join(mount_prefix,mp.encode(sys.getfilesystemencoding())) r=os.system("fusermount -u '%s'" % mpp) if r: bad(_("Could not unmount [%s]") % mp) else: os.rmdir(mpp) self.refresh_cb() def about_cb(self, *args): #self.about_dlg.run() return VCDAbout() bus, bus_name, bus_object=None,None,None try: import dbus import dbus.service #import GObject # for GObject.MainLoop() if no Gtk is to be used from dbus.mainloop.glib import DBusGMainLoop dbus_loop = DBusGMainLoop(set_as_default=True) bus = dbus.SessionBus() except ImportError: pass def init_dbus(): global bus_name, bus_object, app if not bus: return class Manager(dbus.service.Object): def __init__(self, bus, path): dbus.service.Object.__init__(self,bus,path) @dbus.service.method("org.ojuba.VirtualCD", in_signature='as', out_signature='i') def Mount(self,a): r=0 for fn in a: r|=app.mount_f(fn) return r @dbus.service.method("org.ojuba.VirtualCD", in_signature='', out_signature='s') def Version(self): return "0.3.0" # values from /usr/include/dbus-1.0/dbus/dbus-shared.h r=bus.request_name('org.ojuba.VirtualCD', flags=0x4) if r!=1: print "Another process own OjubaVirtualCD Service, pass request to it: " trials=0; appletbus=False while(appletbus==False and trials<20): print ".", try: appletbus=bus.get_object("org.ojuba.VirtualCD","/Manager"); break except: appletbus=False time.sleep(1); trials+=1 print "*" if len(sys.argv)==1: print "already running and no arguments passed"; exit(-1) if appletbus: exit(appletbus.Mount(sys.argv[1:],dbus_interface='org.ojuba.VirtualCD')) else: print "unable to connect" exit(-1) bus_name = dbus.service.BusName("org.ojuba.VirtualCD", bus) bus_object = Manager(bus, '/Manager') def main(): global app check_mount_prefix() for i in glob(os.path.join(mount_prefix,'*')): if os.path.isdir(i): try: os.rmdir(i) except: pass init_dbus() app = VCDStatusIcon() try: Gtk.main() except KeyboardInterrupt: print "Exiting..." if __name__ == '__main__': main()
mbarkhau/pylev
tests.py
<gh_stars>0 import itertools import unittest import pylev test_data = [ ('classic', "kitten", "sitting", 3), ('same', "kitten", "kitten", 0), ('empty', "", "", 0), ('a', "meilenstein", "levenshtein", 4), ('b', "levenshtein", "frankenstein", 6), ('c', "confide", "deceit", 6), ('d', "CUNsperrICY", "conspiracy", 8), ] test_functions = [ # pylev.classic_levenshtein, # disabled because it is so slow pylev.recursive_levenshtein, pylev.wf_levenshtein, pylev.wfi_levenshtein ] class Tests(unittest.TestCase): def test_damerau_levenshtein(seld): assert pylev.damerau_levenshtein("ba", "abc") == 2 assert pylev.damerau_levenshtein("foobar", "foobra") == 1 assert pylev.damerau_levenshtein("fee", "deed") == 2 def _mk_test_fn(fn, a, b, expected): def _test_fn(self): self.assertEqual(fn(a, b), expected) self.assertEqual(fn(b, a), expected) return _test_fn for lev_fn, data in itertools.product(test_functions, test_data): name, a, b, expected = data test_fn = _mk_test_fn(lev_fn, a, b, expected) setattr(Tests, "test_%s_%s" % (name, lev_fn.__name__), test_fn) if __name__ == '__main__': unittest.main()
mbarkhau/pylev
pylev.py
<reponame>mbarkhau/pylev<gh_stars>1-10 """ pylev ===== A pure Python Levenshtein implementation that's not freaking GPL'd. Based off the Wikipedia code samples at http://en.wikipedia.org/wiki/Levenshtein_distance. Usage ----- Usage is fairly straightforward.:: import pylev distance = pylev.levenshtein('kitten', 'sitting') assert(distance, 3) """ __author__ = '<NAME>' __version__ = (1, 3, 0) __license__ = 'New BSD' import sys PY2 = sys.version_info[0] == 2 if PY2: range = xrange def classic_levenshtein(string_1, string_2): """ Calculates the Levenshtein distance between two strings. This version is easier to read, but significantly slower than the version below (up to several orders of magnitude). Useful for learning, less so otherwise. Usage:: >>> classic_levenshtein('kitten', 'sitting') 3 >>> classic_levenshtein('kitten', 'kitten') 0 >>> classic_levenshtein('', '') 0 """ len_1 = len(string_1) len_2 = len(string_2) cost = 0 if len_1 and len_2 and string_1[0] != string_2[0]: cost = 1 if len_1 == 0: return len_2 elif len_2 == 0: return len_1 else: return min( classic_levenshtein(string_1[1:], string_2) + 1, classic_levenshtein(string_1, string_2[1:]) + 1, classic_levenshtein(string_1[1:], string_2[1:]) + cost, ) def recursive_levenshtein(string_1, string_2, len_1=None, len_2=None, offset_1=0, offset_2=0, memo=None): """ Calculates the Levenshtein distance between two strings. Usage:: >>> recursive_levenshtein('kitten', 'sitting') 3 >>> recursive_levenshtein('kitten', 'kitten') 0 >>> recursive_levenshtein('', '') 0 """ if len_1 is None: len_1 = len(string_1) if len_2 is None: len_2 = len(string_2) if memo is None: memo = {} key = ','.join([str(offset_1), str(len_1), str(offset_2), str(len_2)]) if memo.get(key) is not None: return memo[key] if len_1 == 0: return len_2 elif len_2 == 0: return len_1 cost = 0 if string_1[offset_1] != string_2[offset_2]: cost = 1 dist = min( recursive_levenshtein(string_1, string_2, len_1 - 1, len_2, offset_1 + 1, offset_2, memo) + 1, recursive_levenshtein(string_1, string_2, len_1, len_2 - 1, offset_1, offset_2 + 1, memo) + 1, recursive_levenshtein(string_1, string_2, len_1 - 1, len_2 - 1, offset_1 + 1, offset_2 + 1, memo) + cost, ) memo[key] = dist return dist def wf_levenshtein(string_1, string_2): """ Calculates the Levenshtein distance between two strings. This version uses the Wagner-Fischer algorithm. Usage:: >>> wf_levenshtein('kitten', 'sitting') 3 >>> wf_levenshtein('kitten', 'kitten') 0 >>> wf_levenshtein('', '') 0 """ len_1 = len(string_1) + 1 len_2 = len(string_2) + 1 d = [0] * (len_1 * len_2) for i in range(len_1): d[i] = i for j in range(len_2): d[j * len_1] = j for j in range(1, len_2): for i in range(1, len_1): if string_1[i - 1] == string_2[j - 1]: d[i + j * len_1] = d[i - 1 + (j - 1) * len_1] else: d[i + j * len_1] = min( d[i - 1 + j * len_1] + 1, # deletion d[i + (j - 1) * len_1] + 1, # insertion d[i - 1 + (j - 1) * len_1] + 1, # substitution ) return d[-1] def wfi_levenshtein(string_1, string_2): """ Calculates the Levenshtein distance between two strings. This version uses an iterative version of the Wagner-Fischer algorithm. Usage:: >>> wfi_levenshtein('kitten', 'sitting') 3 >>> wfi_levenshtein('kitten', 'kitten') 0 >>> wfi_levenshtein('', '') 0 """ if string_1 == string_2: return 0 len_1 = len(string_1) len_2 = len(string_2) if len_1 == 0: return len_2 if len_2 == 0: return len_1 if len_1 > len_2: string_2, string_1 = string_1, string_2 len_2, len_1 = len_1, len_2 d0 = [i for i in range(len_2 + 1)] d1 = [j for j in range(len_2 + 1)] for i in range(len_1): d1[0] = i + 1 for j in range(len_2): cost = d0[j] if string_1[i] != string_2[j]: # substitution cost += 1 # insertion x_cost = d1[j] + 1 if x_cost < cost: cost = x_cost # deletion y_cost = d0[j + 1] + 1 if y_cost < cost: cost = y_cost d1[j + 1] = cost d0, d1 = d1, d0 return d0[-1] def damerau_levenshtein(string_1, string_2): """ Calculates the Damerau-Levenshtein distance between two strings. In addition to insertions, deletions and substitutions, Damerau-Levenshtein considers adjacent transpositions. This version is based on an iterative version of the Wagner-Fischer algorithm. Usage:: >>> damerau_levenshtein('kitten', 'sitting') 3 >>> damerau_levenshtein('kitten', 'kittne') 1 >>> damerau_levenshtein('', '') 0 """ if string_1 == string_2: return 0 len_1 = len(string_1) len_2 = len(string_2) if len_1 == 0: return len_2 if len_2 == 0: return len_1 if len_1 > len_2: string_2, string_1 = string_1, string_2 len_2, len_1 = len_1, len_2 prev_cost = 0 d0 = [i for i in range(len_2 + 1)] d1 = [j for j in range(len_2 + 1)] dprev = d0[:] s1 = string_1 s2 = string_2 for i in range(len_1): d1[0] = i + 1 for j in range(len_2): cost = d0[j] if s1[i] != s2[j]: # substitution cost += 1 # insertion x_cost = d1[j] + 1 if x_cost < cost: cost = x_cost # deletion y_cost = d0[j + 1] + 1 if y_cost < cost: cost = y_cost # transposition if i > 0 and j > 0 and s1[i] == s2[j - 1] and s1[i - 1] == s2[j]: transp_cost = dprev[j - 1] + 1 if transp_cost < cost: cost = transp_cost d1[j + 1] = cost dprev, d0, d1 = d0, d1, dprev return d0[-1] levenshtein = wfi_levenshtein # Backward-compatibilty because I misspelled. classic_levenschtein = classic_levenshtein levenschtein = levenshtein
mbarkhau/pylev
setup.py
import os try: from setuptools import setup except ImportError: from distutils.core import setup setup( name='pylev', version='1.3.0', description="A pure Python Levenshtein implementation that's not freaking GPL'd.", author='<NAME>', author_email='<EMAIL>', long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst'), 'r').read(), py_modules=['pylev'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', # That's right, works in Py3 (& PyPy) too! "Programming Language :: Python :: 3", ], url='http://github.com/toastdriven/pylev' )
dvsu/image-manipulation
main.py
#!/usr/bin/python3 import os from PIL import Image class ImageManipulation: def __init__(self, source_dir: str, output_dir: str, target_extension: str): self.__source_dir = source_dir self.__output_dir = output_dir self.__target_extension = target_extension self.__recognized_extensions = ["", "jpg", "jpeg", "png", "bmp"] self._directory_check() self._extension_check() def _directory_check(self) -> None: if not os.path.exists(self.__source_dir): raise NameError("Directory not found") if not os.listdir(self.__source_dir): raise FileNotFoundError( "No image files found in the source directory") if not os.path.exists(self.__output_dir): os.mkdir(self.__output_dir) def _extension_check(self) -> None: if self.__target_extension not in self.__recognized_extensions: raise NameError( f"Target extension '{self.__target_extension}' is not recognized") def _rotate(self, img: Image, degrees: int) -> Image: return img.rotate(degrees) def _resize(self, img: Image, target_size: tuple) -> Image: return img.resize(target_size) def _resize_percent(self, img: Image, resize_percent_width: int, resize_percent_length: int) -> Image: return img.resize((resize_percent_width, resize_percent_length)) def transform(self, rotate: int = 0, resize: tuple = (), resize_percent: int = 0) -> None: for file in os.listdir(self.__source_dir): filename, file_ext = os.path.splitext(file) if file_ext.replace(".", "") not in self.__recognized_extensions: continue with Image.open(f"{self.__source_dir}/{file}") as img: if rotate: img = self._rotate(img=img, degrees=rotate) if resize_percent: width, length = img.size img = self._resize_percent(img=img, resize_percent_width=int( width*resize_percent/100), resize_percent_length=int(length*resize_percent/100)) if resize: if len(resize) != 2: raise TypeError("resize has to be in '(width,length)'") img = self._resize(img=img, target_size=resize) img.convert("RGB").save( f"{self.__output_dir}/{filename}.{self.__target_extension}", self.__target_extension.upper(), quality=100) im = ImageManipulation(source_dir='path/to/source', output_dir='path/to/output', target_extension="jpeg") im.transform(resize=(1080, 720))
yokoyama-minami/toggl-to-kosu_sheet
toggleToKosu.py
import pandas as pd import datetime as dt # Toggl-report.csv 読み込み df = pd.read_csv('Toggl-report.csv', sep=',') # 必要なデータの変数定義 date = (df['Start date']) time = (df['Duration']) description = (df['Description']) division = (df['Tags']) replace_date = date.str.replace('-', '/') split_time = time.str.split(':') hr = [] min = [] sec = [] kosu_data = [] now = dt.datetime.now() def main(): res_kosu() # DataFrameを作成してCSV書き出し kosu_df = (pd.DataFrame(kosu_data, columns=['日付', '業務内容', '工数(時間)', '工数区分'])) kosu_df.to_csv('Toggl-to-kosu_{:%Y%m%d-%H%M%S}.csv'.format(now), sep=',', index=False) print ('工数書き出し完了!', now) # 工数シートデータの生成 def res_kosu(): for index, row in enumerate(df.index): hr = (int(split_time[index][0])) min = (int(split_time[index][1])) sec = (int(split_time[index][2])) # 30秒以上の場合は1分繰り上げ if sec > int(30): min = min + int(1) # 55分以上の場合は1時間繰り上げ if min >= int(55): hr = hr + int(1) min = int(0) # 分を時間に変換 min_to_hr = "{:.2f}".format((min / (int(60)))) # データ配列の生成 kosu = float(hr) + float(min_to_hr) kosu_data.append([replace_date[index], description[index], str(kosu), division[index]]) return kosu_data # main関数呼び出し if __name__ == "__main__": main()
Detry322/map-creator
app/models/best_dcgan.py
from keras.models import Sequential, load_model from keras.layers import Dense, Reshape, Flatten, Conv2D, Conv2DTranspose, BatchNormalization, Activation from keras.layers.advanced_activations import LeakyReLU from keras.optimizers import Adam, SGD from keras.backend import clear_session import numpy as np import os import logging import time import tempfile from scipy import misc from app.models.base import BaseModel from app.utils import mkdir_p from app import GENERATED_TILES_FOLDER def make_untrainable(model): model.trainable = False for layer in model.layers: layer.trainable = False class BestDCGAN(BaseModel): EPOCHS = 1000 NOISE_SIZE = 100 MAX_BATCH_SIZE = 128 def _construct_model(self): self.trainable_discriminator = self._construct_discriminator() self.untrainable_discriminator = self._construct_discriminator() self.generator = self._construct_generator() self.model = self._construct_full(self.generator, self.untrainable_discriminator) self._compile() def _construct_from_file(self, filename): model = load_model(filename) self.generator = model.layers[0] self.untrainable_discriminator = model.layers[1] make_untrainable(self.untrainable_discriminator) self.model = model model_copy = load_model(filename) self.trainable_discriminator = model_copy.layers[1] self._compile() def _construct_generator(self): model = Sequential() model.add(Dense(input_dim=self.NOISE_SIZE, units=(4*4*1024))) model.add(Reshape((4, 4, 1024))) model.add(LeakyReLU(0.2)) model.add(Conv2DTranspose(512, 5, strides=2, padding='same')) model.add(LeakyReLU(0.2)) model.add(Conv2DTranspose(256, 5, strides=2, padding='same')) model.add(LeakyReLU(0.2)) model.add(Conv2DTranspose(128, 5, strides=2, padding='same')) model.add(LeakyReLU(0.2)) model.add(Conv2DTranspose(3, 5, strides=2, padding='same', activation='tanh')) return model def _construct_discriminator(self): model = Sequential() model.add(Conv2D(64, 5, strides=2, padding='same', input_shape=self.image_size)) model.add(LeakyReLU(0.2)) model.add(Conv2D(128, 5, strides=2, padding='same')) model.add(BatchNormalization()) model.add(LeakyReLU(0.2)) model.add(Conv2D(256, 5, strides=2, padding='same')) model.add(BatchNormalization()) model.add(LeakyReLU(0.2)) model.add(Conv2D(512, 5, strides=2, padding='same')) model.add(BatchNormalization()) model.add(LeakyReLU(0.2)) model.add(Reshape((4*4*512,))) model.add(Dense(1, activation='sigmoid')) return model def _construct_full(self, generator, discriminator): make_untrainable(discriminator) model = Sequential() model.add(generator) model.add(discriminator) return model def _compile(self): self.trainable_discriminator.compile(loss='binary_crossentropy', optimizer=Adam(lr=0.00001), metrics=['accuracy']) self.model.compile(loss='binary_crossentropy', optimizer=Adam(lr=0.0001), metrics=['accuracy']) def _copy_weights(self): self.untrainable_discriminator.set_weights(self.trainable_discriminator.get_weights()) def _generate_noise(self, num): return np.random.normal(0.0, 1.0, (num, self.NOISE_SIZE)) def _generate_batch(self, num): return self.generator.predict(self._generate_noise(num)) def generate_image(self): return np.clip(((self._generate_batch(1)[0] + 1)*128.0), 0, 255).astype('uint8') def _load_batch(self, size): return np.array([(next(self.image_loader)/127.5) - 1 for _ in range(size)]) def train(self): i = 0 model_name = "best_dcgan-{}".format(time.time()) folder = os.path.join(GENERATED_TILES_FOLDER, model_name) mkdir_p(folder) for epoch in range(self.EPOCHS): logging.info("=== Epoch {}".format(epoch)) for batch_base in range(0, len(self.image_loader), self.MAX_BATCH_SIZE): i += 1 batch_size = min(len(self.image_loader) - batch_base, self.MAX_BATCH_SIZE) logging.info("Training {} images".format(batch_size)) g_loss = float('inf') r_loss = float('inf') while g_loss + r_loss > 0.8: if g_loss >= r_loss: generated_images_batch_size = batch_size generated_images_X = self._generate_batch(generated_images_batch_size) generated_images_Y = np.array([0.0]*generated_images_batch_size) gen_loss = self.trainable_discriminator.train_on_batch(generated_images_X, generated_images_Y) logging.info("Discriminator gen. loss: {}".format(gen_loss)) g_loss = gen_loss[0] else: real_images_batch_size = batch_size real_images_X = self._load_batch(real_images_batch_size) real_images_Y = np.array([1.0]*real_images_batch_size) real_loss = self.trainable_discriminator.train_on_batch(real_images_X, real_images_Y) logging.info("Discriminator real loss: {}".format(real_loss)) r_loss = real_loss[0] logging.info("Copying weights...") self._copy_weights() generator_loss = float('inf') while generator_loss > (15.0 if i == 1 else 4.0): generator_batch_size = batch_size generator_X = self._generate_noise(generator_batch_size) generator_Y = np.array([1.0]*generator_batch_size) g_loss = self.model.train_on_batch(generator_X, generator_Y) generator_loss = g_loss[0] logging.info("Generator loss: {}".format(g_loss)) logging.info("Generating image...") filename = os.path.join(folder, '{:06d}.png'.format(i)) image = self.generate_image() misc.imsave(filename, image) misc.imsave(os.path.join(folder, '000000__current.png'), image) if i % 1000 == 0: logging.info("=== Writing model to disk") self.model.save("models/" + model_name + "-{}.h5".format(i)) logging.info("=== Writing model to disk") self.model.save(model_name)
Detry322/map-creator
app/data.py
import os, glob import numpy as np import random from scipy import misc from app import OUTPUT_TILES_FOLDER class ZoomLoader(object): def __init__(self, zoom): self.files = glob.glob(os.path.join(OUTPUT_TILES_FOLDER, str(zoom), '*', '*.png')) self.i = -1 def __iter__(self): return self def __len__(self): return len(self.files) def random(self): return misc.imread(random.choice(self.files)) def next(self): self.i += 1 self.i %= len(self) return misc.imread(self.files[self.i])
Detry322/map-creator
app/download.py
import os, requests, subprocess, tempfile, math, glob from PIL import Image from app import CITIES, INPUT_TILES_FOLDER, TILE_DOWNLOADER MAX_TILES_PER_CITY = 5000 TILE_SERVER = "OpenCycleMap" MAP_API_BASE = 'http://maps.googleapis.com/maps/api/geocode/json' XML_BASE = """<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE properties SYSTEM "http://java.sun.com/dtd/properties.dtd"> <properties> <entry key="Type">BBoxLatLon</entry> <entry key="OutputLocation">{}</entry> <entry key="TileServer">{}</entry> <entry key="MaxLat">{}</entry> <entry key="MaxLon">{}</entry> <entry key="MinLat">{}</entry> <entry key="MinLon">{}</entry> <entry key="OutputZoomLevel">{}</entry> </properties> """ def deg2num(lat_deg, lon_deg, zoom): lat_rad = math.radians(float(lat_deg)) n = 2.0 ** zoom xtile = int((float(lon_deg) + 180.0) / 360.0 * n) ytile = int((1.0 - math.log(math.tan(lat_rad) + (1 / math.cos(lat_rad))) / math.pi) / 2.0 * n) return (xtile, ytile) def get_city_bounds(city): result = requests.get(MAP_API_BASE, params={'address': city}) assert result.status_code == 200, "Maps API could not find city" assert len(result.json()['results']) > 0, "Maps API could not find city" return result.json()['results'][0]['geometry']['bounds'] def check_city_bounds(city_bounds, zoom): xne, yne = deg2num(city_bounds['northeast']['lat'], city_bounds['northeast']['lng'], zoom) xsw, ysw = deg2num(city_bounds['southwest']['lat'], city_bounds['southwest']['lng'], zoom) return xne > xsw and yne < ysw and (xne - xsw)*(ysw - yne) < MAX_TILES_PER_CITY def get_xml(city_bounds, zoom): return XML_BASE.format( INPUT_TILES_FOLDER, TILE_SERVER, city_bounds['northeast']['lat'], city_bounds['northeast']['lng'], city_bounds['southwest']['lat'], city_bounds['southwest']['lng'], zoom ) def create_city_xml_file(city_bounds, zoom): fd, filename = tempfile.mkstemp() with os.fdopen(fd, 'w') as f: f.write(get_xml(city_bounds, zoom)) return filename def download_city(city, city_bounds, zoom): if not check_city_bounds(city_bounds, zoom): raise Exception("City's limits are invalid") xml_file = create_city_xml_file(city_bounds, zoom) subprocess.check_call( ['java', '-jar', TILE_DOWNLOADER, 'dl=' + xml_file] ) os.unlink(xml_file) def prune_tiles(): glob_path = os.path.join(INPUT_TILES_FOLDER, '*', '*', '*.png') for filename in glob.glob(glob_path): try: Image.open(filename).verify() except IOError: print "Pruning {}...".format(filename) os.unlink(filename) def download_tiles(zoom): for city in CITIES: try: print city city_bounds = get_city_bounds(city) download_city(city, city_bounds, zoom) except KeyboardInterrupt: raise except: print "Error downloading city: " + city
Detry322/map-creator
app/generate.py
<filename>app/generate.py from app.models import all_models from app.utils import mkdir_p from app import GENERATED_TILES_FOLDER from scipy import misc import os import time def generate_tiles(model_type, model_file): ModelClass = all_models[model_type] model = ModelClass(None, None, model_file=model_file) folder = os.path.join(GENERATED_TILES_FOLDER, '{}'.format(time.time())) mkdir_p(folder) i = 0 while True: try: print "Generating {}...".format(i) filename = os.path.join(folder, '{}.png'.format(i)) misc.imsave(filename, model.generate_image()) i += 1 except KeyboardInterrupt: exit(0)
Detry322/map-creator
app/__main__.py
import argparse import logging logging.basicConfig(level=logging.INFO) import numpy as np np.random.seed(234421) from app.data import ZoomLoader from app.download import download_tiles, prune_tiles from app.preprocess import preprocess_tiles from app.train import train_model from app.generate import generate_tiles from app.backprop import backprop from app.forwardprop import forwardprop from app.random import random def get_args(): parser = argparse.ArgumentParser(description='map-creator uses DCGANs to generate pictures of map tiles') parser.add_argument('--zoom', help='Zoom size', type=str, default='13') parser.add_argument('--download', help='Download tiles', action='store_true') parser.add_argument('--preprocess', help='Preprocessing args', nargs='+') parser.add_argument('--backprop', help='backprop', action='store_true') parser.add_argument('--random', help='random', action='store_true') parser.add_argument('--forwardprop', help='forwardprop', action='store_true') parser.add_argument('--train', help='Train tiles', action='store_true') parser.add_argument('--model_type', help='The model to train/generate with', type=str, default='BestDCGAN') parser.add_argument('--model_file', help='The h5 model file', type=str) parser.add_argument('--generate', help='Generating tiles', action='store_true') return parser.parse_args() def main(): args = get_args() if args.download: print 'Downloading tiles...' download_tiles(args.zoom) print 'Pruning tiles...' prune_tiles() if args.preprocess: print 'Preprocessing tiles...' preprocess_tiles(args.zoom, *args.preprocess) if args.train: print "Training model..." train_model(args.model_type, args.zoom, model_file=args.model_file) if args.generate: print "Generating tiles until Control-C'd..." generate_tiles(args.model_type, args.model_file) if args.backprop: print "Backproping until Control-C'd..." backprop(args.model_file, args.zoom) if args.forwardprop: print "Forwardproping until Control-C'd..." forwardprop(args.model_file, args.zoom) if args.random: print "Doing random stuff" random(args.model_file) if __name__ == '__main__': main()
Detry322/map-creator
app/models/autoencoder.py
from keras.models import Sequential from keras.callbacks import ModelCheckpoint from keras.layers import Dense, Reshape, Flatten from keras.layers.advanced_activations import LeakyReLU from keras.layers.normalization import BatchNormalization from keras.layers.convolutional import UpSampling2D, Convolution2D, MaxPooling2D from keras.layers.core import Flatten from keras.optimizers import SGD import time import numpy as np import logging from app.models.base import BaseModel class Autoencoder(BaseModel): EPOCHS = 100 NOISE_SIZE = 100 MAX_BATCH_SIZE = 100 VALIDATION_SIZE = 10 def _construct_model(self): model = Sequential() model.add(Convolution2D(32, 5, 5, border_mode='same', subsample=(2,2), input_shape=(64,64,3))) model.add(LeakyReLU(0.2)) model.add(MaxPooling2D(pool_size=(2, 2), dim_ordering="tf")) model.add(Convolution2D(32, 5, 5)) model.add(LeakyReLU(0.2)) model.add(MaxPooling2D(pool_size=(2, 2), dim_ordering="tf")) model.add(Convolution2D(32, 5, 5)) model.add(LeakyReLU(0.2)) model.add(MaxPooling2D(pool_size=(2, 2), dim_ordering="tf")) model.add(Flatten()) model.add(Dense(8*8)) model.add(LeakyReLU(0.2)) model.add(Reshape((8, 8, 1))) model.add(Convolution2D(32, 5, 5, border_mode='same')) model.add(LeakyReLU(0.2)) model.add(UpSampling2D(size=(2, 2))) model.add(Convolution2D(32, 5, 5, border_mode='same')) model.add(LeakyReLU(0.2)) model.add(UpSampling2D(size=(2, 2))) model.add(Convolution2D(32, 5, 5, border_mode='same')) model.add(LeakyReLU(0.2)) model.add(UpSampling2D(size=(2, 2))) model.add(Convolution2D(3, 5, 5, border_mode='same', activation='sigmoid')) model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']) self.model = model def _train_generator(self): while True: image = np.array([next(self.loader) for i in range(7)]) yield (image, image) def train(self): validation_images = np.array([self.loader.random()/255.0 for _ in range(self.VALIDATION_SIZE)]) checkpoint = ModelCheckpoint('autoencoder-{}.h5'.format(time.time()), monitor='val_acc', verbose=1, save_best_only=True, mode='max') self.model.fit_generator( self._train_generator(), samples_per_epoch=len(self.loader), nb_epoch=self.EPOCHS, validation_data=(validation_images, validation_images), callbacks=[checkpoint])
Detry322/map-creator
app/models/basic_dcgan.py
from keras.models import Sequential, load_model from keras.layers import Dense, Reshape, Flatten from keras.layers.advanced_activations import LeakyReLU from keras.layers.normalization import BatchNormalization from keras.layers.convolutional import UpSampling2D, Convolution2D, MaxPooling2D from keras.layers.core import Flatten from keras.optimizers import SGD from keras.backend import clear_session import numpy as np import os import logging import time import tempfile from app.models.base import BaseModel class BasicDCGAN(BaseModel): EPOCHS = 1000 NOISE_SIZE = 100 MAX_BATCH_SIZE = 512 def _construct_model(self): self.generator_model = self._construct_generator() self.discriminator_model = self._construct_discriminator() self.model = self._construct_full(self.generator_model, self.discriminator_model) self._compile() def _construct_from_file(self, filename): self.model = load_model(filename) self.generator_model = self.model.layers[0] self.discriminator_model = self.model.layers[1] self._compile() def _construct_generator(self): model = Sequential() model.add(Dense(input_dim=self.NOISE_SIZE, output_dim=(16*16), activation='relu')) model.add(Reshape((16, 16, 1))) model.add(UpSampling2D(size=(2, 2))) model.add(Convolution2D(64, 5, 5, border_mode='same', activation='relu')) model.add(UpSampling2D(size=(2, 2))) model.add(Convolution2D(3, 5, 5, border_mode='same', activation='sigmoid')) return model def _construct_discriminator(self): model = Sequential() model.add(Convolution2D(64, 5, 5, border_mode='same', subsample=(2,2), input_shape=self.image_size)) model.add(LeakyReLU(0.2)) model.add(Convolution2D(64, 5, 5, border_mode='same', subsample=(2,2))) model.add(LeakyReLU(0.2)) model.add(Flatten()) model.add(Dense(128)) model.add(Dense(1, activation='sigmoid')) return model def _construct_full(self, generator, discriminator): model = Sequential() model.add(generator) model.add(discriminator) return model def _compile(self): self.generator_model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']) self.discriminator_model.compile(loss='binary_crossentropy', optimizer=SGD(lr=0.0005, momentum=0.9, nesterov=True), metrics=['accuracy']) self.model.compile(loss='binary_crossentropy', optimizer=SGD(lr=0.0005, momentum=0.9, nesterov=True), metrics=['accuracy']) def _reset_memory(self): logging.info("=== Resetting memory footprint") t, name = tempfile.mkstemp() os.close(t) self.model.save(name) clear_session() self._construct_from_file(name) os.unlink(name) def _generate_batch(self, num): noise = np.random.uniform(-1, 1, (num, self.NOISE_SIZE)) return self.generator_model.predict(noise) def generate_image(self): return (self._generate_batch(1)[0]*256.0).astype('uint8') def train(self): model_name = "basic_dcgan-{}.h5".format(time.time()) i = 0 for epoch in range(self.EPOCHS): logging.info("=== Epoch {}".format(epoch)) for batch_base in range(0, len(self.image_loader), self.MAX_BATCH_SIZE): if i % 32 == 0: self._reset_memory() i += 1 batch_size = min(len(self.image_loader) - batch_base, self.MAX_BATCH_SIZE) logging.info("Training {} images".format(batch_size)) # first, train discriminator self.discriminator_model.trainable = True self.discriminator_model.compile(loss='binary_crossentropy', optimizer=SGD(lr=0.0005, momentum=0.9, nesterov=True), metrics=['accuracy']) images = np.array([next(self.image_loader)/255.0 for _ in range(batch_size)]) generated_images = self._generate_batch(batch_size) discriminator_X = np.concatenate((images, generated_images)) discriminator_Y = np.array([1]*batch_size + [0]*batch_size) discriminator_loss = self.discriminator_model.train_on_batch(discriminator_X, discriminator_Y) logging.info("Discriminator Loss: {}".format(discriminator_loss)) # next, train generator self.discriminator_model.trainable = False self.model.compile(loss='binary_crossentropy', optimizer=SGD(lr=0.0005, momentum=0.9, nesterov=True), metrics=['accuracy']) full_X = np.random.uniform(-1, 1, (batch_size, self.NOISE_SIZE)) full_Y = np.array([1]*batch_size) full_loss = self.model.train_on_batch(full_X, full_Y) logging.info("Full loss: {}".format(full_loss)) logging.info("=== Writing model to disk") self.model.save(model_name)
Detry322/map-creator
app/models/base.py
class BaseModel(object): def __init__(self, image_size, image_loader, model_file=None): self.model = None self.image_size = image_size self.image_loader = image_loader if model_file is not None: self._construct_from_file(model_file) else: self._construct_model() def _construct_from_file(self, filename): raise NotImplementedError("This was never overridden.") def _construct_model(self, image_size): raise NotImplementedError("This was never overridden.") def train(self): raise NotImplementedError("This was never overridden.") def generate_image(self): raise NotImplementedError("This was never overridden.")
Detry322/map-creator
app/preprocess.py
import os, glob import numpy as np from scipy import misc from app import INPUT_TILES_FOLDER, OUTPUT_TILES_FOLDER from app.utils import mkdir_p def greyscale(img, *args): return img.mean(axis=2) def resize(img, *args): p = float(args[0]) if len(args) > 0 else .5 return misc.imresize(img, p, interp='lanczos') all_functions = { 'greyscale': greyscale, 'resize': resize, } def make_func(func, f_args): return lambda x: func(x, *f_args) def parse_args(args): functions = [] func = None f_args = [] for arg in args: if arg in all_functions: if func: functions.append(make_func(func, f_args)) func = all_functions[arg] f_args = [] else: f_args.append(arg) functions.append(make_func(func, f_args)) return functions def write_processed_image(processed_image, input_filename): output_filename = input_filename.replace(INPUT_TILES_FOLDER, OUTPUT_TILES_FOLDER) folder, _ = os.path.split(output_filename) mkdir_p(folder) with open(output_filename, 'w+') as f: misc.imsave(f, processed_image) def preprocess_tile(functions, input_filename): try: img = misc.imread(input_filename, mode='RGB') for func in functions: img = func(img) write_processed_image(img, input_filename) except: pass def preprocess_tiles(zoom, *args): functions = parse_args(args) input_files = glob.glob(os.path.join(INPUT_TILES_FOLDER, str(zoom), '*', '*.png')) for input_filename in input_files: preprocess_tile(functions, input_filename)
Detry322/map-creator
app/train.py
from app.data import ZoomLoader from app.models import all_models def train_model(model_type, zoom_level, model_file=None): ModelClass = all_models[model_type] loader = ZoomLoader(zoom_level) image_size = loader.random().shape model = ModelClass(image_size, loader, model_file=model_file) model.train()
Detry322/map-creator
app/backprop.py
from app.models import all_models from app.utils import mkdir_p from app import GENERATED_TILES_FOLDER from scipy import misc import glob import numpy as np import os from keras.models import load_model, Model from keras.optimizers import Adam, SGD, Adagrad from keras.layers import LocallyConnected1D, Input, Reshape from app import OUTPUT_TILES_FOLDER, BACKPROPS_FOLDER from app.utils import mkdir_p NOISE_SIZE = 100 def backprop_single_image(generator, image): i = Input(shape=(100, 1)) local = LocallyConnected1D(1, 1, use_bias=False) l = local(i) r = Reshape((100,))(l) output = generator(r) model = Model(inputs=i, outputs=output) model.compile(loss='mean_squared_error', optimizer=SGD(lr=75)) X = np.array([[[1.0]]*NOISE_SIZE]) Y = np.array([image]) model.fit(X, Y, epochs=200) return local.get_weights() def backprop(model_file, zoom): input_files = glob.glob(os.path.join(OUTPUT_TILES_FOLDER, str(zoom), '*', '*.png')) model = load_model(model_file) generator = model.layers[0] generator.trainable = False for layer in generator.layers: layer.trainable = False for filname in input_files: output_filename = filname.replace(OUTPUT_TILES_FOLDER, BACKPROPS_FOLDER) + '.txt' folder, _ = os.path.split(output_filename) mkdir_p(folder) image = misc.imread(filname, mode='RGB') / 127.5 - 1 weights = backprop_single_image(generator, image) np.savetxt(output_filename, weights)
Detry322/map-creator
app/random.py
from app.models import all_models from app.utils import mkdir_p from app import GENERATED_TILES_FOLDER, RANDOM_FOLDER, BACKPROPS_FOLDER from scipy import misc import glob import numpy as np import os from keras.models import load_model, Model from keras.optimizers import Adam, SGD, Adagrad from keras.layers import LocallyConnected1D, Input, Reshape from app import BACKPROPS_FOLDER, FORWARDPROPS_FOLDER, RANDOM_FOLDER from app.utils import mkdir_p from app.forwardprop import forwardprop_single_image NOISE_SIZE = 100 import time def random(model_file): model = load_model(model_file) generator = model.layers[0] generator.trainable = False for layer in generator.layers: layer.trainable = False api_key_water = [np.loadtxt(filename) for filename in glob.glob(os.path.join(BACKPROPS_FOLDER, 'api_key', 'water', '*.txt'))] no_api_key_water = [np.loadtxt(filename) for filename in glob.glob(os.path.join(BACKPROPS_FOLDER, 'no_api_key', 'water', '*.txt'))] no_api_key_trees = np.loadtxt(os.path.join(BACKPROPS_FOLDER, 'no_api_key', 'trees', '3391.png.txt')) folder = os.path.join(RANDOM_FOLDER, '{}'.format(time.time())) mkdir_p(folder) for a in api_key_water: for na in no_api_key_water: api_key_trees = a - na + no_api_key_trees image = forwardprop_single_image(generator, api_key_trees) misc.imsave(os.path.join(folder, 'land-{}.png'.format(time.time())), ((image + 1)*128).astype('uint8'))
Detry322/map-creator
app/__init__.py
<reponame>Detry322/map-creator import os BASE_FOLDER = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) INPUT_TILES_FOLDER = os.path.join(BASE_FOLDER, 'input_tiles') OUTPUT_TILES_FOLDER = os.path.join(BASE_FOLDER, 'output_tiles') BACKPROPS_FOLDER = os.path.join(BASE_FOLDER, 'backprops') FORWARDPROPS_FOLDER = os.path.join(BASE_FOLDER, 'forwardprops') GENERATED_TILES_FOLDER = os.path.join(BASE_FOLDER, 'generated_tiles') DEPENDENCY_FOLDER = os.path.join(BASE_FOLDER, 'deps') RANDOM_FOLDER = os.path.join(BASE_FOLDER, 'random') TILE_DOWNLOADER = os.path.join(DEPENDENCY_FOLDER, 'jTileDownloader-0-6-1.jar') CITIES = [ "Bangkok, Thailand", "Barcelona, Spain", "Berlin, Germany", "Bogota, Colombia", "Boston, Massachusetts", "Buenos Aires, Argentina", "Cairo, Egypt", "Chicago, IL", "Delhi, India", "Dhaka, Bangladesh", "Hanoi, Vietnam", "Ho Chi Minh City, Vietnam", "Hong Kong", "Houston, Texas", "Istanbul, Turkey", "Kolkata, India", "Kuala Lumpur, Malaysia", "Kyoto, Japan", "Lagos, Nigeria", "Las Vegas, Nevada", "Lima, Peru", "London, England", "Los Angeles, California", "Madrid, Spain", "Manila, Philippines", "Melbourne, Australia", "Mexico City, Mexico", "Milan, Italy", "Montreal, Canada", "Moscow, Russia", "Mumbai, India", "New Orleans, Louisiana", "New York, New York", "Osaka, Japan", "Paris, France", "Philadelphia, Pennsylvania", "Phoenix, Arizona", "Rio de Janeiro, Brazil", "San Antonio, Texas", "San Diego, California", "San Francisco, California", "Santiago, Chile", "Sao Paulo, Brazil", "Seattle, Washington", "Seoul, South Korea", "Shanghai, China", "Singapore, Singapore", "Taipei, Taiwan", "Tokyo, Japan", "Toronto, Canada", "Washington D.C.", "Wuhan, China", ]
Detry322/map-creator
app/models/__init__.py
from app.models.basic_dcgan import BasicDCGAN from app.models.better_dcgan import BetterDCGAN from app.models.best_dcgan import BestDCGAN from app.models.autoencoder import Autoencoder all_models = { 'BasicDCGAN': BasicDCGAN, 'Autoencoder': Autoencoder, 'BetterDCGAN': BetterDCGAN, 'BestDCGAN': BestDCGAN }
Detry322/map-creator
app/forwardprop.py
from app.models import all_models from app.utils import mkdir_p from app import GENERATED_TILES_FOLDER from scipy import misc import glob import numpy as np import os from keras.models import load_model, Model from keras.optimizers import Adam, SGD, Adagrad from keras.layers import LocallyConnected1D, Input, Reshape from app import BACKPROPS_FOLDER, FORWARDPROPS_FOLDER from app.utils import mkdir_p NOISE_SIZE = 100 def forwardprop_single_image(generator, inp): return generator.predict(np.array([inp]))[0] def forwardprop(model_file, zoom): input_files = glob.glob(os.path.join(BACKPROPS_FOLDER, str(zoom), '*', '*.txt')) model = load_model(model_file) generator = model.layers[0] generator.compile(loss='mean_squared_error', optimizer=Adagrad(lr=1)) for filname in input_files: print "Generating..." output_filename = filname.replace(BACKPROPS_FOLDER, FORWARDPROPS_FOLDER) + '.png' folder, _ = os.path.split(output_filename) mkdir_p(folder) inp = np.loadtxt(filname) image = forwardprop_single_image(generator, inp) image = ((image + 1)*128).astype('uint8') misc.imsave(output_filename, image)
sanzoghenzo/poetry2conda
poetry2conda/convert.py
import argparse import contextlib import pathlib import sys from datetime import datetime from typing import Mapping, TextIO, Tuple, Iterable, Optional import semver import toml from poetry2conda import __version__ def convert( file: TextIO, include_dev: bool = False, extras: Optional[Iterable[str]] = None ) -> str: """ Convert a pyproject.toml file to a conda environment YAML This is the main function of poetry2conda, where all parsing, converting, etc. gets done. Parameters ---------- file A file-like object containing a pyproject.toml file. include_dev Whether to include the dev dependencies in the resulting environment. extras The name of extras to include in the output. Can be None or empty for no extras. Returns ------- The contents of an environment.yaml file as a string. """ if extras is None: extras = [] poetry2conda_config, poetry_config = parse_pyproject_toml(file) env_name = poetry2conda_config["name"] poetry_dependencies = poetry_config.get("dependencies", {}) if include_dev: poetry_dependencies.update(poetry_config.get("dev-dependencies", {})) poetry_extras = poetry_config.get("extras", {}) # We mark the items listed in the selected extras as non-optional for extra in extras: for item in poetry_extras[extra]: dep = poetry_dependencies[item] if isinstance(dep, dict): dep["optional"] = False conda_constraints = poetry2conda_config.get("dependencies", {}) dependencies, pip_dependencies = collect_dependencies( poetry_dependencies, conda_constraints ) conda_yaml = to_yaml_string(env_name, dependencies, pip_dependencies) return conda_yaml def convert_version(spec_str: str) -> str: """ Convert a poetry version spec to a conda-compatible version spec. Poetry accepts tilde and caret version specs, but conda does not support them. This function uses the `poetry-semver` package to parse it and transform it to regular version spec ranges. Parameters ---------- spec_str A poetry version specification string. Returns ------- The same version specification without tilde or caret. """ spec = semver.parse_constraint(spec_str) if isinstance(spec, semver.Version): converted = f"=={str(spec)}" elif isinstance(spec, semver.VersionRange): converted = str(spec) elif isinstance(spec, semver.VersionUnion): raise ValueError("Complex version constraints are not supported at the moment.") return converted def parse_pyproject_toml(file: TextIO) -> Tuple[Mapping, Mapping]: """ Parse a pyproject.toml file This function assumes that the pyproject.toml contains a poetry and poetry2conda config sections. Parameters ---------- file A file-like object containing a pyproject.toml file. Returns ------- A tuple with the poetry2conda and poetry config. Raises ------ RuntimeError When an expected configuration section is missing. """ pyproject_toml = toml.loads(file.read()) poetry_config = pyproject_toml.get("tool", {}).get("poetry", {}) if not poetry_config: raise RuntimeError(f"tool.poetry section was not found on {file.name}") poetry2conda_config = pyproject_toml.get("tool", {}).get("poetry2conda", {}) if not poetry2conda_config: raise RuntimeError(f"tool.poetry2conda section was not found on {file.name}") if "name" not in poetry2conda_config or not isinstance( poetry2conda_config["name"], str ): raise RuntimeError(f"tool.poetry2conda.name entry was not found on {file.name}") return poetry2conda_config, poetry_config def collect_dependencies( poetry_dependencies: Mapping, conda_constraints: Mapping ) -> Tuple[Mapping, Mapping]: """ Organize and apply conda constraints to dependencies Parameters ---------- poetry_dependencies A dictionary with dependencies as declared with poetry. conda_constraints A dictionary with conda constraints as declared with poetry2conda. Returns ------- A tuple with the modified dependencies and the dependencies that must be installed with pip. """ dependencies = {} pip_dependencies = {} # 1. Do a first pass to change pip to conda packages for name, conda_dict in conda_constraints.items(): if name in poetry_dependencies and "git" in poetry_dependencies[name]: poetry_dependencies[name] = conda_dict["version"] # 2. Now do the conversion for name, constraint in poetry_dependencies.items(): if isinstance(constraint, str): dependencies[name] = convert_version(constraint) elif isinstance(constraint, dict): if constraint.get("optional", False): continue if "git" in constraint: git = constraint["git"] tag = constraint["tag"] pip_dependencies[f"git+{git}@{tag}#egg={name}"] = None elif "version" in constraint: dependencies[name] = convert_version(constraint["version"]) else: raise ValueError( f"This converter only supports normal dependencies and " f"git dependencies. No path, url, python restricted, " f"environment markers or multiple constraints. In your " f'case, check the "{name}" dependency. Sorry.' ) else: raise ValueError( f"This converter only supports normal dependencies and " f"git dependencies. No multiple constraints. In your " f'case, check the "{name}" dependency. Sorry.' ) if name in conda_constraints: conda_dict = conda_constraints[name] if "name" in conda_dict: new_name = conda_dict["name"] dependencies[new_name] = dependencies.pop(name) name = new_name # do channel last, because it may move from dependencies to pip_dependencies if "channel" in conda_dict: channel = conda_dict["channel"] if channel == "pip": pip_dependencies[name] = dependencies.pop(name) else: new_name = f"{channel}::{name}" dependencies[new_name] = dependencies.pop(name) if pip_dependencies: dependencies["pip"] = None return dependencies, pip_dependencies def to_yaml_string( env_name: str, dependencies: Mapping, pip_dependencies: Mapping ) -> str: """ Converts dependencies to a string in YAML format. Note that there is no third party library to manage the YAML format. This is to avoid an additional package dependency (like pyyaml, which is already one of the packages that behaves badly in conda+pip mixed environments). But also because our YAML is very simple Parameters ---------- env_name Name for the conda environment. dependencies Regular conda dependencies. pip_dependencies Pure pip dependencies. Returns ------- A string with an environment.yaml definition usable by conda. """ deps_str = [] for name, version in dependencies.items(): version = version or "" deps_str.append(f" - {name}{version}") if pip_dependencies: deps_str.append(f" - pip:") for name, version in pip_dependencies.items(): version = version or "" deps_str.append(f" - {name}{version}") deps_str = "\n".join(deps_str) date_str = datetime.now().strftime("%c") conda_yaml = f""" ############################################################################### # NOTE: This file has been auto-generated by poetry2conda # poetry2conda version = {__version__} # date: {date_str} ############################################################################### # If you want to change the contents of this file, you should probably change # the pyproject.toml file and then use poetry2conda again to update this file. # Alternatively, stop using (ana)conda. ############################################################################### name: {env_name} dependencies: {deps_str} """.lstrip() return conda_yaml def write_file(filename: str, contents: str) -> None: context = contextlib.ExitStack() if filename == '-': f = sys.stdout else: environment_yaml = pathlib.Path(filename) if not environment_yaml.exists(): environment_yaml.parent.mkdir(parents=True, exist_ok=True) f = context.enter_context(environment_yaml.open('w')) with context: f.write(contents) def main(): parser = argparse.ArgumentParser( description="Convert a poetry-based pyproject.toml " "to a conda environment.yaml" ) parser.add_argument( "pyproject", metavar="TOML", type=argparse.FileType("r"), help="pyproject.toml input file.", ) parser.add_argument( "environment", metavar="YAML", type=str, help="environment.yaml output file.", ) parser.add_argument( "--dev", action="store_true", help="include dev dependencies", ) parser.add_argument( "--extras", "-E", action="append", help="Add extra requirements", ) parser.add_argument( "--version", action="version", version=f"%(prog)s (version {__version__})" ) args = parser.parse_args() converted_obj = convert(args.pyproject, include_dev=args.dev, extras=args.extras) write_file(args.environment, converted_obj) if __name__ == "__main__": main()
sanzoghenzo/poetry2conda
tests/test_example.py
<filename>tests/test_example.py import io import yaml from poetry2conda.convert import main # foo, bar, baz, qux, quux, quuz, corge, grault, garply, waldo, fred, plugh, xyzzy, and thud; SAMPLE_TOML = """\ [tool.poetry] name = "bibimbap" version = "1.2.3" description = "Delicious korean food" authors = ["<NAME> <<EMAIL>>"] license = "MIT" [tool.poetry.dependencies] python = "^3.7" foo = "^0.2.3" # Example of a caret requirement whose major version is zero bar = "^1.2.3" # Example of a caret requirement whose major version is not zero baz = "~0.4.5" # Example of a tilde requirement whose major version is zero qux = "~1.4.5" # Example of a tilde requirement whose major version is not zero quux = "2.34.5" # Example of an exact version quuz = ">=3.2" # Example of an inequality xyzzy = ">=2.1,<4.2" # Example of two inequalities spinach = "^19.10b0" # Previously non-working version spec grault = { git = "https://github.com/organization/repo.git", tag = "v2.7.4"} # Example of a git package pizza = {extras = ["pepperoni"], version = "^1.2.3"} # Example of a package with extra requirements chameleon = { git = "https://github.com/org/repo.git", tag = "v2.3" } pudding = { version = "^1.0", optional = true } [tool.poetry.extras] dessert = ["pudding"] [tool.poetry.dev-dependencies] fork = "^1.2" [tool.poetry2conda] name = "bibimbap-env" [tool.poetry2conda.dependencies] bar = { channel = "conda-forge" } # Example of a package on conda-forge baz = { channel = "pip" } # Example of a pure pip package qux = { name = "thud" } # Example of a package that changes names in conda chameleon = { name = "lizard", channel = "animals", version = "^2.5.4" } # Example of a package that changes from git to regular conda [build-system] requires = ["poetry>=0.12"] build-backend = "poetry.masonry.api" """ SAMPLE_YAML = """\ name: bibimbap-env dependencies: - python>=3.7,<4.0 - foo>=0.2.3,<0.3.0 - conda-forge::bar>=1.2.3,<2.0.0 - thud>=1.4.5,<1.5.0 - quux==2.34.5 - quuz>=3.2 - xyzzy>=2.1,<4.2 - spinach>=19.10b0,<20.0 - pizza>=1.2.3,<2.0.0 # Note that extra requirements are not supported on conda :-( - animals::lizard>=2.5.4,<3.0.0 - pip - pip: - baz>=0.4.5,<0.5.0 - git+https://github.com/organization/repo.git@v2.7.4#egg=grault """ SAMPLE_YAML_EXTRA = """\ name: bibimbap-env dependencies: - python>=3.7,<4.0 - foo>=0.2.3,<0.3.0 - conda-forge::bar>=1.2.3,<2.0.0 - thud>=1.4.5,<1.5.0 - quux==2.34.5 - quuz>=3.2 - xyzzy>=2.1,<4.2 - spinach>=19.10b0,<20.0 - pizza>=1.2.3,<2.0.0 # Note that extra requirements are not supported on conda :-( - animals::lizard>=2.5.4,<3.0.0 - pudding>=1.0,<2.0 - pip - pip: - baz>=0.4.5,<0.5.0 - git+https://github.com/organization/repo.git@v2.7.4#egg=grault """ SAMPLE_YAML_DEV = """\ name: bibimbap-env dependencies: - python>=3.7,<4.0 - foo>=0.2.3,<0.3.0 - conda-forge::bar>=1.2.3,<2.0.0 - thud>=1.4.5,<1.5.0 - quux==2.34.5 - quuz>=3.2 - xyzzy>=2.1,<4.2 - spinach>=19.10b0,<20.0 - pizza>=1.2.3,<2.0.0 # Note that extra requirements are not supported on conda :-( - animals::lizard>=2.5.4,<3.0.0 - fork>=1.2,<2.0 - pip - pip: - baz>=0.4.5,<0.5.0 - git+https://github.com/organization/repo.git@v2.7.4#egg=grault """ def test_sample(tmpdir, mocker): toml_file = tmpdir / "pyproject.toml" yaml_file = tmpdir / "environment.yaml" with toml_file.open("w") as fd: fd.write(SAMPLE_TOML) expected = yaml.safe_load(io.StringIO(SAMPLE_YAML)) mocker.patch("sys.argv", ["poetry2conda", str(toml_file), str(yaml_file)]) main() with yaml_file.open("r") as fd: result = yaml.safe_load(fd) assert result == expected def test_sample_extra(tmpdir, mocker): toml_file = tmpdir / "pyproject.toml" yaml_file = tmpdir / "environment.yaml" with toml_file.open("w") as fd: fd.write(SAMPLE_TOML) expected = yaml.safe_load(io.StringIO(SAMPLE_YAML_EXTRA)) mocker.patch( "sys.argv", ["poetry2conda", str(toml_file), str(yaml_file), "-E", "dessert"] ) main() with yaml_file.open("r") as fd: result = yaml.safe_load(fd) assert result == expected def test_sample_dev(tmpdir, mocker): toml_file = tmpdir / "pyproject.toml" yaml_file = tmpdir / "environment.yaml" with toml_file.open("w") as fd: fd.write(SAMPLE_TOML) expected = yaml.safe_load(io.StringIO(SAMPLE_YAML_DEV)) mocker.patch("sys.argv", ["poetry2conda", str(toml_file), str(yaml_file), "--dev"]) main() with yaml_file.open("r") as fd: result = yaml.safe_load(fd) assert result == expected
sanzoghenzo/poetry2conda
poetry2conda/__init__.py
# Note the version managed by bump2version. # Do not change it manually here, use bump2version to change it. __version__ = "0.3.0"
Shopzilla-Ops/gitlab-jenkins-connector
gitlabjenkinsweb/gitlabjenkinsweb.py
<filename>gitlabjenkinsweb/gitlabjenkinsweb.py #!/usr/bin/env python # * Copyright 2014 Shopzilla.com # * # * Licensed under the Apache License, Version 2.0 (the "License"); # * you may not use this file except in compliance with the License. # * You may obtain a copy of the License at # * # * http://www.apache.org/licenses/LICENSE-2.0 # * # * Unless required by applicable law or agreed to in writing, software # * distributed under the License is distributed on an "AS IS" BASIS, # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # * See the License for the specific language governing permissions and # * limitations under the License. # * # * http://tech.shopzilla.com # * import tornado.gen import tornado.httpclient from gitlabjenkins import BaseHandler from gitlabjenkins import GitLab import gitlab import logging import yaml import sys class WebHookHandler(BaseHandler): @tornado.gen.coroutine def post(self): data = self.parse_body() if data['after'] == '0000000000000000000000000000000000000000': job_name = self.jenkins.get_job_name(data) yield self.jenkins.delete_job(job_name) return tree = data['repository']['homepage'] + '/raw/' + data['after'] + '/' triggers = ['jenkins.xml'] for filename in triggers: try: response = yield self.httpclient.fetch(tree + filename) break # Go to else clause except tornado.httpclient.HTTPError as e: if str(e) != 'HTTP 404: Not Found': logger.error('Error: ' + str(e)) continue else: logger.info('No trigger files found') return if filename == 'jenkins.xml': raw_config_xml = response.body (job_name, config_xml) = self.jenkins.process_config_xml( self.gl, data, raw_config_xml) yield self.jenkins.create_or_update_job(job_name, config_xml) yield self.jenkins.trigger_build(job_name) class SystemHookHandler(BaseHandler): def _dispatch(self, m): self.body = self.parse_body() func = getattr(self, self.body.get('event_name'), None) if func: return func() else: logger.error('Undefined event ' + self.body.get('event_name')) raise tornado.web.HTTPError(404) # other seen events: user_add_to_team, user_remove_from_team def project_create(self): project_id = self.body.get('project_id') self.gl.register_webhook(project_id) def user_remove_from_team(self): pass def user_add_to_team(self): pass def project_destroy(self): pass if __name__ == '__main__': logger = logging.getLogger() with open('/etc/gitlabjenkins/settings.yaml') as _file: settings = yaml.load(_file) log_file = settings.get('log_file') log_max_bytes = settings.get('log_max_bytes') log_max_files = settings.get('log_max_files') if not all([log_file, log_max_bytes, log_max_files]): raise Exception('log_file, log_max_bytes, or log_max_files not ' 'defined in /etc/gitlabjenkins/settings.yaml') formatter = logging.Formatter('[%(asctime)s] %(levelname)s: %(message)s') logfh = logging.handlers.RotatingFileHandler( log_file, maxBytes=log_max_bytes, backupCount=log_max_files ) logfh.setFormatter(formatter) logger = logging.getLogger('') logger.setLevel(logging.INFO) logger.addHandler(logfh) logging.getLogger('requests').setLevel(logging.ERROR) logger.info('Starting gitlabjenkins service') gitlab_server = settings.get('gitlab_server', '').rstrip('/') gitlab_private_token = settings.get('gitlab_private_token') gitlabjenkins_port = settings.get('gitlabjenkins_server_port') if not gitlab_server or not gitlab_private_token or not gitlabjenkins_port: raise Exception('gitlab_server, gitlab_private_token, ' 'or gitlabjenkins_port not defined in ' '/etc/gitlabjenkins/settings.yaml') gl = GitLab(gitlab_server, gitlab_private_token) try: gl.auth() except gitlab.GitlabAuthenticationError: logger.error('Invalid authentication token') sys.exit(1) gl.register_server() gl.register_webhook() application = tornado.web.Application([ (r'/webhook', WebHookHandler), (r'/systemhook', SystemHookHandler), ]) application.listen(gitlabjenkins_port) tornado.ioloop.IOLoop.instance().start()
Shopzilla-Ops/gitlab-jenkins-connector
gitlabjenkins/szgitlab.py
# * Copyright 2014 Shopzilla.com # * # * Licensed under the Apache License, Version 2.0 (the "License"); # * you may not use this file except in compliance with the License. # * You may obtain a copy of the License at # * # * http://www.apache.org/licenses/LICENSE-2.0 # * # * Unless required by applicable law or agreed to in writing, software # * distributed under the License is distributed on an "AS IS" BASIS, # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # * See the License for the specific language governing permissions and # * limitations under the License. # * # * http://tech.shopzilla.com # * import gitlab import logging import yaml logger = logging.getLogger(__name__) logger.addHandler(logging.NullHandler()) class GitLab(gitlab.Gitlab): '''Provides a connector to the gitlab api''' def __init__(self, server, token): '''Import settings''' super(GitLab, self).__init__(server, token) with open('/etc/gitlabjenkins/settings.yaml') as _file: settings = yaml.load(_file) self.gitlabjenkins_server = settings.get('gitlabjenkins_server_url', '').rstrip('/') self.gitlabjenkins_port = settings.get('gitlabjenkins_server_port') if not self.gitlabjenkins_server or not self.gitlabjenkins_port: raise Exception('gitlabjenkins_server or gitlabjenkins_port not ' 'defined in /etc/gitlabjenkins/settings.yaml') def register_server(self): '''Sets up a gitlab system hook to contact this service''' logger.info("Validating system hook configuration") server_hook = '{}:{}/systemhook'.format(self.gitlabjenkins_server, self.gitlabjenkins_port) configured_hooks = [h.url for h in self.Hook()] if server_hook not in configured_hooks: logger.info("System hook was not configured, adding") system_hook = self.Hook({"url": server_hook}) system_hook.save() logger.info("Done") return def register_webhook(self, project_id=None, page_limit=1000): '''Sets up a gitlab web hooks to this service on new repo creation''' projects = [] webhook = '{}:{}/webhook'.format(self.gitlabjenkins_server, self.gitlabjenkins_port) if project_id is None: logger.info("Registering Webhook on existing repositories") for page_num in range(1, page_limit): some_projects = self.Project(page=page_num, per_page=100) if not some_projects: break projects.extend(some_projects) else: try: p = self.Project(id=project_id) except gitlab.GitlabGetError: logger.error("Invalid project id (%d)" % project_id) return projects.append(p) for project in projects: logger.info("Ensuring webhook on " + project.name) configured_webhooks = [h.url for h in project.Hook()] if webhook not in configured_webhooks: logger.info("Adding webhook to " + project.name) hook = project.Hook({"url": webhook}) hook.save() logger.info("Done")
Shopzilla-Ops/gitlab-jenkins-connector
gitlabjenkins/jenkins.py
# * Copyright 2014 Shopzilla.com # * # * Licensed under the Apache License, Version 2.0 (the "License"); # * you may not use this file except in compliance with the License. # * You may obtain a copy of the License at # * # * http://www.apache.org/licenses/LICENSE-2.0 # * # * Unless required by applicable law or agreed to in writing, software # * distributed under the License is distributed on an "AS IS" BASIS, # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # * See the License for the specific language governing permissions and # * limitations under the License. # * # * http://tech.shopzilla.com # * import tornado.gen import tornado.httpclient import logging import urllib import yaml logger = logging.getLogger(__name__) logger.addHandler(logging.NullHandler()) class Jenkins(object): '''Provides a connector to the jenkins api''' def __init__(self): '''Import settings''' with open('/etc/gitlabjenkins/settings.yaml') as _file: settings = yaml.load(_file) self.jenkins_server = settings.get('jenkins_server', '').rstrip('/') if not self.jenkins_server: raise Exception('jenkins_server not defined in ' '/etc/gitlabjenkins/settings.yaml') def get_job_name(self, data): '''Get job name from data dict''' group = data['repository']['url'].split(':')[1].split('/')[0] project = data['repository']['url'].split(':')[1].split( '/')[1].split('.')[0] branch = data['ref'].split('/')[-1] name = '{%s}-%s' % (group, project) if branch and branch != 'master': name = name + '-{%s}' % branch return name @tornado.gen.coroutine def fetch(self, url): '''Fetch a page asynchronously''' http_client = tornado.httpclient.AsyncHTTPClient() try: response = yield http_client.fetch(url) raise tornado.gen.Return(response) except tornado.httpclient.HTTPError as e: logger.error('Error: ' + str(e) + ' on ' + urllib.unquote(url)) raise tornado.gen.Return(None) @tornado.gen.coroutine def authorized_post(self, url, data): '''Send POST with added jenkins authentication parameters''' http_client = tornado.httpclient.AsyncHTTPClient() request = tornado.httpclient.HTTPRequest( url, method='POST', body=data, headers={'content-type': 'text/xml'} ) try: logger.debug('Posting to ' + urllib.unquote(url)) response = yield http_client.fetch(request) logger.debug('Post to ' + urllib.unquote(url) + ' completed.') raise tornado.gen.Return(response) except tornado.httpclient.HTTPError as e: logger.error('Error doing authorized post: ' + str(e)) return @tornado.gen.coroutine def fetch_config_xml(self, job_name): '''Get jenkins config.xml file for a job''' url = '{}/job/{}/config.xml'.format(self.jenkins_server, urllib.quote(job_name)) response = yield self.fetch(url) if response is None: raise tornado.gen.Return(None) config_xml = response.body raise tornado.gen.Return(config_xml) def process_config_xml(self, gitlab, data, config_xml): '''Replace jenkins.xml variables with data from GitLab''' job_name = self.get_job_name(data) user = gitlab.User(id=data['user_id']) repo = data['repository']['url'].replace(':', '/').replace( 'git@', 'http://') ref, branch = data['ref'].split('/')[1:3] vars = { 'USER_EMAIL': user.email, 'GIT_REPOSITORY': repo, 'GIT_BRANCH': branch, 'GIT_URL': data['repository']['homepage'], 'PROJECT_NAME': job_name } for k, v in vars.iteritems(): config_xml = config_xml.replace(k, v) return (job_name, config_xml) @tornado.gen.coroutine def trigger_build(self, job_name): '''Trigger a jenkins build for the job associated with a repo''' url = '{}/job/{}/build'.format(self.jenkins_server, urllib.quote(job_name)) logger.info('Triggering a build of %s', job_name) yield self.authorized_post(url, '') return @tornado.gen.coroutine def create_job(self, job_name, config_xml): '''Create a jenkins build job for a repo''' url = '{}/createItem?name={}'.format(self.jenkins_server, urllib.quote(job_name)) yield self.authorized_post(url, config_xml) return @tornado.gen.coroutine def delete_job(self, job_name): '''Delete a jenkins build job for a repo''' url = '{}/job/{}/doDelete'.format(self.jenkins_server, urllib.quote(job_name)) logger.info('Deleting Job ' + urllib.unquote(url)) yield self.authorized_post(url, '') return @tornado.gen.coroutine def update_job(self, job_name, config_xml): '''Update settings for a jenkins build job for a repo''' url = '{}/job/{}/config.xml'.format(self.jenkins_server, urllib.quote(job_name)) yield self.authorized_post(url, config_xml) return @tornado.gen.coroutine def create_or_update_job(self, job_name, config_xml): '''Update a jenkins job for a repo or build one if none exists''' current_config = yield self.fetch_config_xml(job_name) if current_config is None: logger.info('Creating ' + job_name) yield self.create_job(job_name, config_xml) # there may be a whitespace problem with this if current_config != config_xml: logger.info('Updating ' + job_name) yield self.update_job(job_name, config_xml)
Shopzilla-Ops/gitlab-jenkins-connector
gitlabjenkins/basehandler.py
<reponame>Shopzilla-Ops/gitlab-jenkins-connector # * Copyright 2014 Shopzilla.com # * # * Licensed under the Apache License, Version 2.0 (the "License"); # * you may not use this file except in compliance with the License. # * You may obtain a copy of the License at # * # * http://www.apache.org/licenses/LICENSE-2.0 # * # * Unless required by applicable law or agreed to in writing, software # * distributed under the License is distributed on an "AS IS" BASIS, # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # * See the License for the specific language governing permissions and # * limitations under the License. # * # * http://tech.shopzilla.com # * from szgitlab import GitLab from jenkins import Jenkins import json import tornado.httpclient import tornado.web import yaml class BaseHandler(tornado.web.RequestHandler): '''Request handler modified to abstract away gitlab web/service calls''' method_index = 2 # /handler/<method> def __init__(self, application, request, **kwargs): '''Setup gitlab and jenkins connectors''' super(BaseHandler, self).__init__(application, request, **kwargs) with open('/etc/gitlabjenkins/settings.yaml') as _file: settings = yaml.load(_file) gitlab_server = settings.get('gitlab_server', '').rstrip('/') gitlab_private_token = settings.get('gitlab_private_token') if not gitlab_server or not gitlab_private_token: raise Exception('gitlab_server or gitlab_private_token not ' 'defined in /etc/gitlabjenkins/settings.yaml') self.gl = GitLab(gitlab_server, gitlab_private_token) self.httpclient = tornado.httpclient.AsyncHTTPClient() self.jenkins = Jenkins() def _dispatch(self, m): '''Route requests to methods based on http verb and hook uri''' if self.request.uri.endswith('/'): func = getattr(self, 'index', None) return func() path = self.request.uri.split('?')[0] try: method = path.split('/')[self.method_index].split(".")[0] if not method.startswith('_'): func = getattr(self, "_" + m + "_" + method, None) if func: return func() else: raise tornado.web.HTTPError(404) else: raise tornado.web.HTTPError(404) except: return self._index() def get(self): """Returns self._dispatch()""" return self._dispatch("get") def post(self): """Returns self._dispatch()""" return self._dispatch("post") def parse_body(self): try: r = json.loads(self.request.body) except: r = {} return r
ArtellaPipe/artellapipe-tools-bugtracker
tests/test_general.py
#! /usr/bin/env python # -*- coding: utf-8 -*- """ Module that contains tests for artellapipe-tools-bugtracker """ import pytest from artellapipe.tools.bugtracker import __version__ def test_version(): assert __version__.get_version()
ArtellaPipe/artellapipe-tools-bugtracker
artellapipe/tools/bugtracker/core/bugtracker.py
<gh_stars>0 #!/usr/bin/env python # -*- coding: utf-8 -*- """ Tool to handle bugs and requests for the different tools of the pipeline """ from __future__ import print_function, division, absolute_import __author__ = "<NAME>" __license__ = "MIT" __maintainer__ = "<NAME>" __email__ = "<EMAIL>" from artellapipe.core import tool # Defines ID of the tool TOOL_ID = 'artellapipe-tools-bugtracker' class BugTrackerTool(tool.ArtellaTool, object): def __init__(self, *args, **kwargs): super(BugTrackerTool, self).__init__(*args, **kwargs) @classmethod def config_dict(cls, file_name=None): base_tool_config = tool.ArtellaTool.config_dict(file_name=file_name) tool_config = { 'name': 'Bug Tracker', 'id': 'artellapipe-tools-bugtracker', 'logo': 'bugtracker_logo', 'icon': 'bugtracker', 'tooltip': 'Tool to handle bugs and requests for the different tools of the pipeline', 'tags': ['error', 'bug', 'report'], 'sentry_id': 'https://9d2160bb725a4fbcacb4d72aa9df6eaf@sentry.io/1797903', 'is_checkable': False, 'is_checked': False, 'menu_ui': {'label': 'Bug Tracker', 'load_on_startup': False, 'color': '', 'background_color': ''}, 'menu': [ {'label': 'General', 'type': 'menu', 'children': [{'id': 'artellapipe-tools-bugtracker', 'type': 'tool'}]}], 'shelf': [ {'name': 'General', 'children': [{'id': 'artellapipe-tools-bugtracker', 'display_label': False, 'type': 'tool'}]} ] } base_tool_config.update(tool_config) return base_tool_config class BugTrackerToolset(tool.ArtellaToolset, object): ID = TOOL_ID def __init__(self, *args, **kwargs): self._tool = kwargs.pop('tool', None) self._traceback = kwargs.pop('traceback', None) super(BugTrackerToolset, self).__init__(*args, **kwargs) def contents(self): from artellapipe.tools.bugtracker.widgets import bugtracker bug_tracker = bugtracker.ArtellaBugTracker( project=self._project, config=self._config, settings=self._settings, parent=self, tool=self._tool, traceback=self._traceback) return [bug_tracker]
ArtellaPipe/artellapipe-tools-bugtracker
artellapipe/tools/bugtracker/widgets/bugtracker.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Tool that allows to detect errors and trace calls and easily them to TDs """ from __future__ import print_function, division, absolute_import __author__ = "<NAME>" __license__ = "MIT" __maintainer__ = "<NAME>" __email__ = "<EMAIL>" import os import sys import getpass import logging import platform import datetime import traceback import subprocess import webbrowser import collections try: from urllib import quote except ImportError: from urllib2 import quote from Qt.QtCore import * from Qt.QtWidgets import * import tpDcc as tp from tpDcc.libs.python import osplatform from tpDcc.libs.qt.widgets import dividers, stack from tpDcc.libs.qt.core import base, qtutils import artellapipe from artellapipe.core import tool LOGGER = logging.getLogger() class ArtellaBugTracker(tool.ArtellaToolWidget): BUG_TYPES = ['Bug', 'Request'] ATTACHER_TYPE = tool.ToolAttacher.Dialog CPU_INFO = None GPU_INFO = None def __init__(self, project, config, settings, parent, tool=None, traceback=None): self._tool = tool self._trace = traceback self._bug_data = dict() super(ArtellaBugTracker, self).__init__(project=project, config=config, settings=settings, parent=parent) def ui(self): super(ArtellaBugTracker, self).ui() top_layout = QGridLayout() type_lbl = QLabel('Type') self._types_combo = QComboBox() self._types_combo.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred) tool_lbl = QLabel('Tool: ') self._tools_combo = QComboBox() self._tools_combo.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred) department_lbl = QLabel('Department: ') self._departments_combo = QComboBox() self._departments_combo.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred) top_layout.setColumnStretch(1, 1) top_layout.addWidget(type_lbl, 0, 0) top_layout.addWidget(self._types_combo, 0, 1) top_layout.addWidget(tool_lbl, 1, 0) top_layout.addWidget(self._tools_combo, 1, 1) top_layout.addWidget(department_lbl, 2, 0) top_layout.addWidget(self._departments_combo, 2, 1) self._stack = stack.SlidingStackedWidget() self._stack.set_vertical_mode() self._bug_widget = BugWidget(main=self, project=self._project, traceback=self._trace) self._request_widget = RequestWidget(main=self, project=self._project) self._stack.addWidget(self._bug_widget) self._stack.addWidget(self._request_widget) self.main_layout.addLayout(top_layout) self.main_layout.addWidget(self._stack) self._fill_combos() @property def tool(self): return self._tool def has_tool(self): if self._tool and hasattr(self._tool, 'config'): return True return False def get_current_data(self): data = { 'department': self._departments_combo.currentText(), 'tool': { 'name': self._tools_combo.currentData().get('name'), 'version': self._tools_combo.currentData().get('version'), 'config': self._tools_combo.currentData().get('config'), 'data': self._tools_combo.currentData(), } } return data def _fill_combos(self): self._tools_combo.clear() self._departments_combo.clear() self._types_combo.clear() # Types for t in self.BUG_TYPES: self._types_combo.addItem(t) # Tools valid_tool = False tool_info = None if self._tool is not None: if not hasattr(self._tool, 'config'): LOGGER.warning('Given Tool is not a valid one. Specify manually the tool in the Bug Tracker ...') else: tool_name = self._tool.config.data.get('name', None) if tool_name: tool_info = artellapipe.ToolsMgr().get_tool_data_from_tool(self._tool, as_dict=True) if tool_info: valid_tool = True else: LOGGER.warning( 'Impossible to retrieve tool information. Specify manually the tool in the Bug Tracker ...') else: LOGGER.warning('Impossible to retrieve tool name. Specify manually the tool in the Bug Tracker ...') if valid_tool and tool_info: all_tools = tool_info else: all_tools = dict() for package_name in ['artellapipe', artellapipe.project.get_clean_name()]: package_tools = tp.ToolsMgr().get_package_tools(package_name) or list() all_tools.update(package_tools) for tool_id, tool_info in all_tools.items(): tool_name = tool_info.get('name', None) if not tool_name: continue tool_icon_name = tool_info.get('icon', None) tool_version = tool_info.get('version', None) if tool_version: tool_name = '{} - {}'.format(tool_name, tool_version) if tool_icon_name: tool_icon = tp.ResourcesMgr().icon(tool_icon_name) self._tools_combo.addItem(tool_icon, tool_name, userData=tool_info) else: self._tools_combo.addItem(tool_name, userData=tool_info) # Departments all_departents = self._project.departments for department in all_departents: self._departments_combo.addItem(department) def setup_signals(self): self._types_combo.currentIndexChanged.connect(self._on_type_index_changed) self._stack.animFinished.connect(self._on_stack_anim_finished) def _on_type_index_changed(self, index): self._types_combo.setEnabled(False) self._stack.slide_in_index(index) def _on_stack_anim_finished(self): self._types_combo.setEnabled(True) class BugWidget(base.BaseWidget, object): def __init__(self, main, project, traceback=None, parent=None): self._project = project self._main = main super(BugWidget, self).__init__(parent=parent) self.set_trace(trace=traceback) def ui(self): super(BugWidget, self).ui() bug_data_frame = QFrame() bug_data_frame.setFrameStyle(QFrame.Raised | QFrame.StyledPanel) self._bug_data_layout = QGridLayout() bug_data_frame.setLayout(self._bug_data_layout) self._trace_text = QTextEdit() self._trace_text.setMinimumHeight(100) self._trace_text.setReadOnly(True) self._trace_text.setEnabled(False) self._title_line = QLineEdit() self._title_line.setPlaceholderText('Short title for the bug ...') self._steps_area = QTextEdit() txt_msg = 'Explain with details how to reproduce the error ...' steps_lbl = QLabel(txt_msg) if qtutils.is_pyside2(): self._steps_area.setPlaceholderText(txt_msg) self._steps_area.setMinimumHeight(350) self._send_btn = QPushButton('Send Bug') self._send_btn.setIcon(tp.ResourcesMgr().icon('bug')) self._send_btn.setEnabled(False) self.main_layout.addWidget(dividers.Divider('Bug Data')) self.main_layout.addWidget(bug_data_frame) self.main_layout.addWidget(dividers.Divider('Error Trace')) self.main_layout.addWidget(self._trace_text) self.main_layout.addLayout(dividers.DividerLayout()) self.main_layout.addWidget(self._title_line) if qtutils.is_pyside(): self.main_layout.addWidget(steps_lbl) self.main_layout.addWidget(self._steps_area) self.main_layout.addLayout(dividers.DividerLayout()) self.main_layout.addWidget(self._send_btn) self._fill_bug_data() def setup_signals(self): self._title_line.textChanged.connect(self._update_ui) self._steps_area.textChanged.connect(self._update_ui) self._send_btn.clicked.connect(self._on_send_bug) def set_trace(self, trace): """ Sets the traceback text :param trace: str """ self._trace_text.setPlainText(str(trace)) self._update_ui() def _update_ui(self): """ Internal function that updates Artella Bug Tracker UI """ self._send_btn.setEnabled(self._steps_area.toPlainText() != '' and self._title_line.text() != '') def _get_cpu_info(self): cpu_info = dict() try: import cpuinfo cpuinfo_py = os.path.join(os.path.dirname(os.path.abspath(cpuinfo.__file__)), 'cpuinfo.py') out = subprocess.check_output('python {}'.format(cpuinfo_py), creationflags=0x08000000) cpu_data = str(out).split('\n') for inf in cpu_data: inf = inf.rstrip() inf_split = inf.split(':') if len(inf_split) != 2: continue cpu_info[inf_split[0]] = inf_split[1].lstrip() except Exception as exc: LOGGER.warning('Impossible to retrieve CPU info: {} | {}'.format(exc, traceback.format_exc())) return dict() return cpu_info def _get_gpu_info(self): gpu_info = { 'gpus': {} } try: import GPUtil GPUtil.showUtilization() gpus_list = GPUtil.getGPUs() for gpu in gpus_list: gpu_info['gpus'][gpu.uuid] = { 'name': gpu.name, 'driver': gpu.driver, 'memoryTotal': gpu.memoryTotal, 'memoryUsed': gpu.memoryUsed, 'memoryUtil': gpu.memoryUtil, 'load': gpu.load } except Exception as exc: LOGGER.warning('Impossible to retrieve GPU info: {} | {}'.format(exc, traceback.format_exc())) return dict() return gpu_info def _get_disk_usage(self): try: _ntuple_diskusage = collections.namedtuple('usage', 'total used free') def bytes2human(n): symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') prefix = dict() for i, s in enumerate(symbols): prefix[s] = 1 << (i + 1) * 10 for s in reversed(symbols): if n >= prefix[s]: value = float(n) / prefix[s] return '%.1f%s' % (value, s) return "%sB" % n def get_usage_and_free_percentages(total, used, free): return str(round((used / total) * 100, 2)), str(round((free / total) * 100, 2)) if hasattr(os, 'statvfs'): # POSIX def disk_usage(path): st = os.statvfs(path) free = st.f_bavail * st.f_frsize total = st.f_blocks * st.f_frsize used = (st.f_blocks - st.f_bfree) * st.f_frsize return _ntuple_diskusage(total, used, free) elif os.name == 'nt': # Windows import ctypes def disk_usage(path): _, total, free = ctypes.c_ulonglong(), ctypes.c_ulonglong(), ctypes.c_ulonglong() if sys.version_info >= (3,) or isinstance(path, unicode): fun = ctypes.windll.kernel32.GetDiskFreeSpaceExW else: fun = ctypes.windll.kernel32.GetDiskFreeSpaceExA ret = fun(path, ctypes.byref(_), ctypes.byref(total), ctypes.byref(free)) if ret == 0: raise ctypes.WinError() used = total.value - free.value return _ntuple_diskusage(total.value, used, free.value) else: raise NotImplementedError("platform not supported") disk_usage_dict = { 'drives': {} } paths_to_get_disk_usage_of = [ os.path.dirname(sys.executable), self._project.get_project_path() ] for p in paths_to_get_disk_usage_of: path_drive = os.path.splitdrive(p)[0] usage = disk_usage(p) if not usage: continue usage_percentage, free_percentage = get_usage_and_free_percentages( usage.total, usage.used, usage.free ) disk_usage_dict['drives'][path_drive] = { 'total': bytes2human(usage.total), 'used': bytes2human(usage.used), 'free': bytes2human(usage.free), 'usage_percentage': usage_percentage, 'free_percentage': free_percentage } return disk_usage_dict except Exception as exc: LOGGER.warning('Impossible to retrieve Disk Usage info: {} | {}'.format(exc, traceback.format_exc())) return None def _get_base_bug_data(self): bug_data = { 'user': getpass.getuser(), 'time': str(datetime.datetime.now()), 'pythonVersion': sys.version, 'friendlyPythonVersion': "{0}.{1}.{2}.{3}.{4}".format(*sys.version_info), 'node': platform.node(), 'OSRelease': platform.release(), 'OSVersion': platform.platform(), 'processor': platform.processor(), 'machineType': platform.machine(), 'env': os.environ, 'syspaths': sys.path, 'executable': sys.executable, 'dcc_name': tp.Dcc.get_name(), 'dcc_version': tp.Dcc.get_version() } return bug_data def _get_bug_data(self): bug_data = self._get_base_bug_data() if not ArtellaBugTracker.CPU_INFO: ArtellaBugTracker.CPU_INFO = self._get_cpu_info() if ArtellaBugTracker.CPU_INFO: bug_data.update(ArtellaBugTracker.CPU_INFO) if not ArtellaBugTracker.GPU_INFO: ArtellaBugTracker.GPU_INFO = self._get_gpu_info() if ArtellaBugTracker.GPU_INFO: bug_data.update(ArtellaBugTracker.GPU_INFO) disk_usage = self._get_disk_usage() if disk_usage: bug_data.update(disk_usage) return bug_data def _fill_bug_data(self): qtutils.clear_layout(self._bug_data_layout) def _add_info(title, data, row, column): title_lbl = QLabel('{}: '.format(title)) title_lbl.setStyleSheet('font-weight: bold') title_lbl.setAlignment(Qt.AlignRight) data_text = str(bug_data.get(data, '-not found-')) data_lbl = QLabel(data_text) data_lbl.setToolTip(data_text) data_lbl.setStatusTip(data_text) data_lbl.setStyleSheet('background-color: rgba(45, 85, 45, 50);') self._bug_data_layout.setColumnStretch(column + 1, 1) self._bug_data_layout.addWidget(title_lbl, row, column) self._bug_data_layout.addWidget(data_lbl, row, column + 1) def _add_drives_info(row, column): drives = bug_data.get('drives') if not drives: return i = 0 for drive_letter, drive_info in drives.items(): title_lbl = QLabel('Hard Drive Usage ({})'.format(drive_letter)) title_lbl.setStyleSheet('font-weight: bold') title_lbl.setAlignment(Qt.AlignRight) data_text = '{} of {} ({}%)'.format( drive_info['used'], drive_info['total'], drive_info['usage_percentage']) data_lbl = QLabel(data_text) data_lbl.setToolTip(data_text) data_lbl.setStatusTip(data_text) data_lbl.setStyleSheet('background-color: rgba(45, 85, 45, 50);') self._bug_data_layout.setColumnStretch(column + i + 1, 1) self._bug_data_layout.addWidget(title_lbl, row + i, column) self._bug_data_layout.addWidget(data_lbl, row + i, column + 1) i += 1 def _add_gpu_info(row, column): def _add_info(gpu_id, title, data, index, row, column): title_lbl = QLabel('{}: '.format(title)) title_lbl.setStyleSheet('font-weight: bold') title_lbl.setAlignment(Qt.AlignRight) data_text = str(gpus[gpu_id].get(data, '-not found-')) data_lbl = QLabel(data_text) data_lbl.setToolTip(data_text) data_lbl.setStatusTip(data_text) data_lbl.setStyleSheet('background-color: rgba(45, 85, 45, 50);') self._bug_data_layout.setColumnStretch(column + index + 1, 1) self._bug_data_layout.addWidget(title_lbl, row + index, column) self._bug_data_layout.addWidget(data_lbl, row + index, column + 1) gpus = bug_data.get('gpus') if not gpus: return row_index = 0 for i, gpu_id in enumerate(gpus.keys()): _add_info(gpu_id, 'GPU Name ({})'.format(i), 'name', row_index, row, column) row_index += 1 _add_info(gpu_id, 'GPU Driver ({})'.format(i), 'driver', row_index, row, column) row_index += 1 title_lbl = QLabel('GPU Usage ({})'.format(i)) title_lbl.setStyleSheet('font-weight: bold') title_lbl.setAlignment(Qt.AlignRight) data_text = '{} of {} ({}%)'.format( str(round((gpus[gpu_id]['memoryUsed'] / 1000), 2)) + 'GB', str(round((gpus[gpu_id]['memoryTotal'] / 1000), 2)) + 'GB', str(round(float(gpus[gpu_id]['memoryUtil']) * 100, 2))) data_lbl = QLabel(data_text) data_lbl.setToolTip(data_text) data_lbl.setStatusTip(data_text) data_lbl.setStyleSheet('background-color: rgba(45, 85, 45, 50);') self._bug_data_layout.setColumnStretch(column + row_index + 1, 1) self._bug_data_layout.addWidget(title_lbl, row + row_index, column) self._bug_data_layout.addWidget(data_lbl, row + row_index, column + 1) bug_data = self._get_bug_data() _add_info('User', 'user', 0, 0) _add_info('Time', 'time', 1, 0) _add_info('Computer Type', 'machineType', 2, 0) _add_info('Platform OS', 'OSVersion', 3, 0) _add_info('Platform Version', 'OSRelease', 4, 0) _add_info('Python Version', 'friendlyPythonVersion', 5, 0) _add_drives_info(6, 0) _add_info('DCC Name', 'dcc_name', 0, 2) _add_info('DCC Version', 'dcc_version', 1, 2) _add_info('CPU Cores', 'Count', 2, 2) _add_info('CPU Bits', 'Bits', 3, 2) _add_info('CPU Vendor', 'Brand', 4, 2) _add_gpu_info(5, 2) self._bug_data = bug_data def _send_email_bug(self): if not self._project: LOGGER.warning('Impossible to send bug because there is project defined') return project_name = self._project.name.title() if not self._project.emails: LOGGER.warning( 'Impossible to send bug because there is no emails defined in the project: {}'.format(project_name)) return project_name = self._project.name.title() current_data = self._main.get_current_data() if not current_data: LOGGER.warning('No data available to send ...') return False tool_name = current_data.get('tool', {}).get('name', None) tool_version = current_data.get('tool', {}).get('version', 'unknown') department = current_data.get('department', None) steps = self._steps_area.toPlainText() user = str(osplatform.get_user()) title = self._title_line.text() current_time = str(datetime.datetime.now()) node = platform.node() os_release = platform.release() os_version = platform.platform() os_processor = platform.processor() os_machine = platform.machine() executable = sys.executable dcc_name = tp.Dcc.get_name() dcc_version = tp.Dcc.get_version() msg = self._trace_text.toPlainText() msg += '\n----------------------------\n' msg += 'User: {}\n'.format(user) msg += 'Time: {}\n'.format(current_time) msg += 'Tool: {}\n'.format(tool_name) msg += 'Version: {}\n'.format(tool_version) msg += 'Project: {}\n'.format(project_name) msg += 'DCC Name: {}\n'.format(dcc_name) msg += 'DCC Version: {}\n'.format(dcc_version) msg += 'Department: {}\n'.format(department) msg += 'Computer Name: {}\n'.format(node) msg += 'Platform Release: {}\n'.format(os_release) msg += 'Platform Version: {}\n'.format(os_version) msg += 'Processor: {}\n'.format(os_processor) msg += 'Machine: {}\n'.format(os_machine) msg += 'Python Executable: {}\n'.format(executable) msg += 'Steps: \n{}\n'.format(steps) if tool_name: subject = '[{}][Bug][{}]({}) - {}'.format(project_name, tool_name, user, title) else: subject = '[{}][Bug]({}) - {}'.format(project_name, user, title) webbrowser.open( "mailto:{}?subject={}&body={}".format(';'.join(self._project.emails), subject, quote(str(msg)))) def _get_bug_data_for_sentry(self): os_data = self._get_base_bug_data() cpu_data = ArtellaBugTracker.CPU_INFO if ArtellaBugTracker.CPU_INFO else dict() gpu_data = ArtellaBugTracker.GPU_INFO if ArtellaBugTracker.GPU_INFO else dict() disk_data = self._get_disk_usage() disk_data = disk_data if disk_data else dict() return os_data, cpu_data, gpu_data, disk_data def _send_sentry_bug(self): if not self._project: LOGGER.warning('Impossible to send bug because there is project defined') return False if not self._main: LOGGER.warning('No main widget defined') return False current_data = self._main.get_current_data() if not current_data: LOGGER.warning('No data available to send ...') return False tool_name = current_data.get('tool', {}).get('name', None) tool_version = current_data.get('tool', {}).get('version', 'unknown') if not tool_name: LOGGER.warning('Impossible to send bug because tool name ({}) is not valid ({})'.format(tool_name)) return False import sentry_sdk from sentry_sdk import push_scope, capture_message bugtracker_sentry_id = self._main.config.data.get('sentry_id') sentry_id = None if self._main.has_tool(): sentry_id = self._main.tool.config.data.get('sentry_id', None) else: tool_config = current_data.get('tool', {}).get('config', None) if tool_config and hasattr(tool_config, 'data'): sentry_id = tool_config.data.get('sentry_id', None) if not sentry_id: LOGGER.warning('No Sentry ID found for tool: "{}". Bug will be reported as a generic one ...') sentry_id = bugtracker_sentry_id if not sentry_id: LOGGER.warning("Sentry ID is not available! Sending request using email ...") return self._send_email_request() sentry_sdk.init(sentry_id) project_name = self._project.name.title() department = current_data.get('department', None) steps = self._steps_area.toPlainText() user = str(osplatform.get_user()) title = self._title_line.text() dcc_name = tp.Dcc.get_name() dcc_version = tp.Dcc.get_version() if not tool_name or not department: LOGGER.warning( 'Impossible to send request because tool name ({}) or department are not valid ({})'.format( tool_name, department)) return False msg = '[{}][Bug][{}]({}) - {}'.format(project_name, tool_name, user, title) with push_scope() as scope: scope.user = {'username': user} scope.level = 'error' scope.set_tag('type', 'bug') scope.set_tag('project', project_name) scope.set_tag('department', department) scope.set_tag('dcc', dcc_name) scope.set_tag('dcc_version', dcc_version) scope.set_tag('version', tool_version) scope.set_tag('tool', tool_name) scope.set_extra('project', project_name) scope.set_extra('department', department) scope.set_extra('tool', tool_name) scope.set_extra('version', tool_version) scope.set_extra('steps', steps) scope.set_extra('trace', self._trace_text.toPlainText()) scope.set_extra('dcc_data', {'name': dcc_name, 'version': dcc_version}) os_data, cpu_data, gpu_data, disk_data = self._get_bug_data_for_sentry() if os_data: scope.set_extra('os_data', os_data) if cpu_data: scope.set_extra('cpu_data', cpu_data) if gpu_data: scope.set_extra('gpu_data', gpu_data.get('gpus', dict())) if disk_data: scope.set_extra('disk_data', disk_data.get('drives', dict())) capture_message(msg) sentry_sdk.init(bugtracker_sentry_id) return True def _on_send_bug(self): """ Internal callback function that is called when the user press Send Bug button """ try: import sentry_sdk except ImportError as exc: self._send_email_bug() LOGGER.info('Bug send through email successfully!') self._main.close_tool_attacher() return self._send_sentry_bug() LOGGER.info('Bug send successfully!') self._main.close_tool_attacher() class RequestWidget(base.BaseWidget, object): def __init__(self, main, project, parent=None): self._project = project self._main = main super(RequestWidget, self).__init__(parent=parent) def ui(self): super(RequestWidget, self).ui() self._title_line = QLineEdit() self._title_line.setPlaceholderText('Short title for the request ...') self._request_area = QTextEdit() txt_msg = 'Explain with details your request ...' request_lbl = QLabel(txt_msg) if qtutils.is_pyside2(): self._request_area.setPlaceholderText(txt_msg) self._request_area.setMinimumHeight(100) self._send_btn = QPushButton('Send Request') self._send_btn.setIcon(tp.ResourcesMgr().icon('message')) self._send_btn.setEnabled(False) self.main_layout.addWidget(self._title_line) if qtutils.is_pyside(): self.main_layout.addWidget(request_lbl) self.main_layout.addWidget(self._request_area) self.main_layout.addLayout(dividers.DividerLayout()) self.main_layout.addWidget(self._send_btn) def setup_signals(self): self._title_line.textChanged.connect(self._update_ui) self._request_area.textChanged.connect(self._update_ui) self._send_btn.clicked.connect(self._on_send_request) def _update_ui(self): """ Internal function that updates Artella Bug Tracker UI """ self._send_btn.setEnabled(self._request_area.toPlainText() != '' and self._title_line.text() != '') def _send_sentry_request(self): if not self._project: LOGGER.warning('Impossible to send bug because there is project defined') return False if not self._main: LOGGER.warning('No main widget defined') return False current_data = self._main.get_current_data() if not current_data: LOGGER.warning('No data available to send ...') return False tool_name = current_data.get('tool', {}).get('name', None) tool_version = current_data.get('tool', {}).get('version', 'unknown') if not tool_name: LOGGER.warning('Impossible to send request because tool name ({}) is not valid ({})'.format(tool_name)) return False import sentry_sdk from sentry_sdk import push_scope, capture_message bugtracker_sentry_id = self._main.config.data.get('sentry_id') sentry_id = None if self._main.has_tool(): sentry_id = self._main.tool.config.data.get('sentry_id', None) else: tool_data = artellapipe.ToolsMgr().get_tool_data_from_name(tool_name) if not tool_data: LOGGER.warning('No data found for tool: "{}"'.format(tool_name)) return False tool_config = tool_data.get('config', None) if tool_config and hasattr(tool_config, 'data'): sentry_id = tool_config.data.get('sentry_id', None) if not sentry_id: LOGGER.warning('No Sentry ID found for tool: "{}". Bug will be reported as a generic one ...') sentry_id = bugtracker_sentry_id if not sentry_id: LOGGER.warning("Sentry ID is not available! Sending request using email ...") return self._send_email_request() sentry_sdk.init(sentry_id) project_name = self._project.name.title() department = current_data.get('department', None) request = self._request_area.toPlainText() user = str(osplatform.get_user()) title = self._title_line.text() dcc_name = tp.Dcc.get_name() dcc_version = tp.Dcc.get_version() if not tool_name or not department: LOGGER.warning( 'Impossible to send request because tool name ({}) or department are not valid ({})'.format( tool_name, department)) return False msg = '[{}][Request][{}]({}) - {}'.format(project_name, tool_name, user, title) with push_scope() as scope: scope.user = {'username': user} scope.level = 'info' scope.set_tag('type', 'request') scope.set_tag('project', project_name) scope.set_tag('dcc', dcc_name) scope.set_tag('dcc_version', dcc_version) scope.set_tag('version', tool_version) scope.set_tag('tool', tool_name) scope.set_tag('department', department) scope.set_tag('tool', tool_name) scope.set_extra('project', project_name) scope.set_extra('department', department) scope.set_extra('tool', tool_name) scope.set_extra('version', tool_version) scope.set_extra('request', request) scope.set_extra('dcc_data', {'name': tp.Dcc.get_name(), 'version': tp.Dcc.get_version()}) capture_message(msg) sentry_sdk.init(bugtracker_sentry_id) return True def _send_email_request(self): if not self._project: LOGGER.warning('Impossible to send bug because there is project defined') return project_name = self._project.name.title() if not self._project.emails: LOGGER.warning( 'Impossible to send bug because there is no emails defined in the project: {}'.format(project_name)) return project_name = self._project.name.title() current_data = self._main.get_current_data() if not current_data: LOGGER.warning('No data available to send ...') return False tool_name = current_data.get('tool', {}).get('name', None) tool_version = current_data.get('tool', {}).get('version', 'unknown') department = current_data.get('department', None) request = self._request_area.toPlainText() user = str(osplatform.get_user()) title = self._title_line.text() current_time = str(datetime.datetime.now()) node = platform.node() os_release = platform.release() os_version = platform.platform() os_processor = platform.processor() os_machine = platform.machine() executable = sys.executable dcc_name = tp.Dcc.get_name() dcc_version = tp.Dcc.get_version() msg = '' msg += 'User: {}\n'.format(user) msg += 'Time: {}\n'.format(current_time) msg += 'Tool: {}\n'.format(tool_name) msg += 'Version: {}\n'.format(tool_version) msg += 'Project: {}\n'.format(project_name) msg += 'DCC Name: {}\n'.format(dcc_name) msg += 'DCC Version: {}\n'.format(dcc_version) msg += 'Department: {}\n'.format(department) msg += 'Computer Name: {}\n'.format(node) msg += 'Platform Release: {}\n'.format(os_release) msg += 'Platform Version: {}\n'.format(os_version) msg += 'Processor: {}\n'.format(os_processor) msg += 'Machine: {}\n'.format(os_machine) msg += 'Python Executable: {}\n'.format(executable) msg += 'Request: \n{}\n'.format(request) if tool_name: subject = '[{}][Request][{}]({}) - {}'.format(project_name, tool_name, user, title) else: subject = '[{}][Request]({}) - {}'.format(project_name, user, title) webbrowser.open( "mailto:{}?subject={}&body={}".format(';'.join(self._project.emails), subject, quote(str(msg)))) def _on_send_request(self): """ Internal callback function that is called when the user press Send Request button """ try: import sentry_sdk except ImportError as exc: self._send_email_request() LOGGER.info('Request send through email successfully!') self._main.close_tool_attacher() return self._send_sentry_request() LOGGER.info('Request send successfully!') self._main.close_tool_attacher()
gregoiredervaux/face_recognition
apps/train_new_face.py
<filename>apps/train_new_face.py<gh_stars>0 import cv2 import numpy as np from models import face_track_server, face_describer_server, nn, camera_server, dataloader from configs import configs import os import sys ''' The demo app utilize all servers in model folder with simple business scenario/logics: I have a camera product and I need to use it to find all visitors in my store who came here before. Main logics is in the process function, where you can further customize. ''' class TrainNewFace(camera_server.CameraServer): def __init__(self, name, *args, **kwargs): super(TrainNewFace, self).__init__(*args, **kwargs) self.name = name self.face_tracker = face_track_server.FaceTrackServer() self.face_describer = face_describer_server.FDServer( model_fp=configs.face_describer_model_fp, input_tensor_names=configs.face_describer_input_tensor_names, output_tensor_names=configs.face_describer_output_tensor_names, device=configs.face_describer_device) self.nn_model = nn.Model(path_to_model="../pretrained/init.hdf5") try: os.mkdir(configs.db_custom_path + name) except: print(name + "existe déjà") def processs(self, frame): self.face_tracker.process(frame) _faces = self.face_tracker.get_faces() _face_descriptions = [] _num_faces = len(_faces) if _num_faces == 0: return for _face in _faces: dir = os.listdir(configs.db_custom_path + self.name) cv2.imwrite(configs.db_custom_path + self.name + "/" + str(len(dir) + 1) + ".jpg", frame) def run(self): print('[Camera Server] Camera is initializing ...') if self.camera_address is not None: self.cam = cv2.VideoCapture(self.camera_address) else: print('[Camera Server] Camera is not available!') return while len(os.listdir(configs.db_custom_path + self.name)) <= 100: self.in_progress = True ret, frame = self.cam.read() self.processs(frame) self.nn_model.add_class() data_custom = dataloader.DataLoader(self.face_describer) data_custom.load_class_data(configs.db_custom_path + self.name, self.nn_model.get_nb_classes() - 1, False) data_custom.serialyse(configs.model_pretrained_path, self.name) #data_custom.deserialyse(configs.model_pretrained_path, self.name) #data_custom.split_from_index(128) data_init = dataloader.DataLoader(self.face_describer) data_init.deserialyse(configs.model_pretrained_path) data_init.shuffle() #data_init.split_from_index(128) data_custom.X = data_init.X[:min(len(data_custom.X)*8, len(data_init.X))] + data_custom.X data_custom.Y = data_init.Y[:min(len(data_custom.Y)*8, len(data_init.X))] + data_custom.Y data_custom.shuffle() self.nn_model.train_model_from_data(np.array(data_custom.X), np.array(data_custom.Y), epoch=40) self.nn_model.save_model(configs.model_pretrained_path + "init_custom" + configs.save_model_format) with self.nn_model.new_Graph.as_default(): loss, accuray = self.nn_model.model.evaluate(np.array(data_custom.X), np.array(data_custom.Y)) print(loss) print(accuray) if __name__ == '__main__': train = TrainNewFace(sys.argv[1], camera_address=0) train.run()
gregoiredervaux/face_recognition
apps/train_init.py
import numpy as np from models import face_describer_server, nn, dataloader, face_track_server from configs import configs ''' the module is used to build the initial model and to save the initial ''' if __name__ == '__main__': face_describer = face_describer_server.FDServer( model_fp=configs.face_describer_model_fp, input_tensor_names=configs.face_describer_input_tensor_names, output_tensor_names=configs.face_describer_output_tensor_names, device=configs.face_describer_device) face_tracker = face_track_server.FaceTrackServer() data = dataloader.DataLoader(face_describer) data.deserialyse(configs.model_pretrained_path) #data.load_data(configs.db_path) #data.serialyse(configs.model_pretrained_path) data.shuffle() nn_model = nn.Model(output_shape=len(np.unique(np.array(data.Y)))) #nn_model = nn.Model(path_to_model="../pretrained/init.hdf5") #data.split_from_index(128) nn_model.train_model_from_data(np.array(data.X), np.array(data.Y), 500) nn_model.save_model(configs.model_pretrained_path + "init" + configs.save_model_format) nn_model.model.summary() with nn_model.new_Graph.as_default(): loss, test_accuracy = nn_model.model.evaluate(np.array(data.X[:1000]), np.array(data.Y[:1000])) print(test_accuracy)
gregoiredervaux/face_recognition
configs/configs.py
<reponame>gregoiredervaux/face_recognition import os BASE_PATH = '/'.join(os.getcwd().split('/')[:-1]) # Using ubuntu machine may require removing this -1 face_describer_input_tensor_names = ['img_inputs:0', 'dropout_rate:0'] face_describer_output_tensor_names = ['resnet_v1_50/E_BN2/Identity:0'] face_describer_device = '/cpu:0' face_describer_model_fp = '{}/pretrained/insightface.pb'.format(BASE_PATH) face_describer_tensor_shape = (112, 112) face_describer_drop_out_rate = 0.5 test_img_fp = '{}/tests/test.jpg'.format(BASE_PATH) db_path = '{}/db/'.format(BASE_PATH) db_custom_path = '{}/db_custom/'.format(BASE_PATH) model_pretrained_path = '{}/pretrained/'.format(BASE_PATH) save_model_format = ".hdf5" face_similarity_threshold = 800
gregoiredervaux/face_recognition
models/nn.py
<filename>models/nn.py ''' A dummy db storaing faces in memory Feel free to make it fancier like hooking with postgres or whatever This model here is just for simple demo app under apps Don't use it for production dude. ''' import numpy as np import keras import tensorflow as tf import matplotlib.pyplot as plt class Model(object): def __init__(self, output_shape=None, path_to_model=None): self.new_Graph = tf.Graph() with self.new_Graph.as_default(): self.tfSess = tf.Session(graph=self.new_Graph) if path_to_model is None: self.model = keras.Sequential([ keras.layers.Dropout(0.4), keras.layers.Dense(64, activation=tf.nn.sigmoid, name="hidden"), keras.layers.Dropout(0.4), keras.layers.Dense(output_shape, activation=tf.nn.softmax, name="output") ]) else: self.model = keras.models.load_model(path_to_model) self.model.compile(optimizer=keras.optimizers.Adam(lr=0.0001), loss="sparse_categorical_crossentropy", metrics=['accuracy']) def train_model_from_weights(self, path_to_weight): with self.new_Graph.as_default(): self.model.fit(np.array([]), np.array([]), epochs=0) self.model.load_weights(path_to_weight) def train_model_from_data(self, X, Y, epoch=100): with self.new_Graph.as_default(): history = self.model.fit(X, Y, epochs=epoch, validation_split=0.2) weights = self.model.layers[-1].get_weights() plt.matshow(weights[0]) plt.show() plt.figure(figsize=(10, 8)) fig, ax = plt.subplots() ax.set(xlabel="Epochs", ylabel="Accuracy", title="Training") ax.grid() val = plt.plot(history.epoch, history.history['val_acc'], '--', label=' val_acc') plt.plot(history.epoch, history.history['acc'], color=val[0].get_color(), label='Train') plt.show() plt.figure(figsize=(10, 8)) fig, ax = plt.subplots() ax.set(xlabel="Epochs", ylabel="Loss", title="Training") ax.grid() val = plt.plot(history.epoch, history.history['val_loss'], '--', label=' val_loss') plt.plot(history.epoch, history.history['loss'], color=val[0].get_color(), label='Train') plt.show() def add_class(self): with self.new_Graph.as_default(): weights = self.model.layers[-1].get_weights() mean = np.mean(weights[0]) std = np.std(weights[0]) weights[0] = np.array([np.append(weights_ligne, np.random.normal(mean, std, 1)) for weights_ligne in weights[0]]) weights[1] = np.append(weights[1], 0) self.model.pop() new_layer = keras.layers.Dense(weights[1].shape[0], activation=tf.nn.softmax, name="output") self.model.add(new_layer) self.model.get_layer("output").set_weights(weights) self.model.compile(optimizer=keras.optimizers.Adam(lr=0.0001), loss="sparse_categorical_crossentropy", metrics=['accuracy']) self.model.summary() def get_nb_classes(self): return self.model.layers[-1].output_shape[1] def save_weights(self, path): self.model.save_weights(path) def save_model(self, path): self.model.save(path)
gregoiredervaux/face_recognition
apps/face_recognition_.py
<filename>apps/face_recognition_.py<gh_stars>0 import cv2 import numpy as np from models import face_track_server, face_describer_server, nn, camera_server from configs import configs ''' The demo app utilize all servers in model folder with simple business scenario/logics: I have a camera product and I need to use it to find all visitors in my store who came here before. Main logics is in the process function, where you can further customize. ''' class Demo(camera_server.CameraServer): def __init__(self, *args, **kwargs): super(Demo, self).__init__(*args, **kwargs) self.face_tracker = face_track_server.FaceTrackServer() self.face_describer = face_describer_server.FDServer( model_fp=configs.face_describer_model_fp, input_tensor_names=configs.face_describer_input_tensor_names, output_tensor_names=configs.face_describer_output_tensor_names, device=configs.face_describer_device) self.nn_model = nn.Model(path_to_model="../pretrained/init_custom.hdf5") def processs(self, frame): self.face_tracker.process(frame) _faces = self.face_tracker.get_faces() _num_faces = len(_faces) if _num_faces == 0: return for _face in _faces: cv2.imshow("img", _face) cv2.waitKey(0) cv2.destroyAllWindows() _face_resize = cv2.resize(_face, configs.face_describer_tensor_shape) _data_feed = [np.expand_dims(_face_resize.copy(), axis=0), configs.face_describer_drop_out_rate] _face_description = self.face_describer.inference(_data_feed)[0][0] with self.nn_model.new_Graph.as_default(): Y = self.nn_model.model.predict(np.matrix(_face_description)) print(Y[0]) print(np.argmax(Y[0])) if __name__ == '__main__': demo = Demo(camera_address=0) demo.run()
gregoiredervaux/face_recognition
models/face_track_server.py
<gh_stars>0 import cv2 from faced import FaceDetector from faced.utils import annotate_image ''' This server: Input: Camera frame Output: Relative locations for each face, with [(tr_x, tr_y, bl_x, bl_y)] x1,y1 ------ | | | | | | --------x2,y2 ''' class FaceTrackServer(object): faces = [] face_locations = [] face_relative_locations = [] cam_h = None cam_w = None camera_address = None def __init__(self, down_scale_factor=0.25): assert 0 <= down_scale_factor <= 1 self.down_scale_factor = down_scale_factor self.face_detector = FaceDetector() def get_cam_info(self): return {'camera': {'width': self.cam_w, 'height': self.cam_h, 'address': self.camera_address}} def reset(self): self.face_relative_locations = [] self.face_locations = [] self.faces = [] def process(self, frame): self.reset() self.cam_h, self.cam_w, _ = frame.shape # Resize frame of video to 1/4 size for faster face recognition processing # Convert the image from BGR color (which OpenCV uses) to RGB color (which face_recognition uses) rgb_img = cv2.cvtColor(frame.copy(), cv2.COLOR_BGR2RGB) self.face_locations = self.face_detector.predict(rgb_img) # Display the results if len(self.face_locations) > 1: self.face_locations = [] for x, y, w, h, _ in self.face_locations: # Scale back up face locations since the frame we detected in was scaled to 1/4 size x1 = int((x - int(w / 2))*(1 - 0.1)) y1 = int((y - int(h / 2))*(1 - 0.1)) x2 = int((x + int(w / 2))*(1 + 0.1)) y2 = int((y + int(h / 2))*(1 + 0.1)) _face_area = frame[y1:y2, x1:x2, :] if _face_area.size != 0: self.faces.append(_face_area) print('[FaceTracker Server] Found {} faces!'.format(len(self.faces))) return self.faces def get_faces_loc(self): return self.face_locations def get_faces(self): return self.faces
gregoiredervaux/face_recognition
models/dataloader.py
<filename>models/dataloader.py import numpy as np import cv2 from configs import configs import os from keras.preprocessing.image import ImageDataGenerator import json from models import face_track_server class DataLoader: def __init__(self, face_describer): self.face_describer = face_describer self.face_tracker = face_track_server.FaceTrackServer() self.image_generator = ImageDataGenerator( rotation_range=20, width_shift_range=0., height_shift_range=0.2, horizontal_flip=True, fill_mode='nearest', brightness_range=(0.5, 1), featurewise_center=False ) self.X = [] self.Y = [] self.dict_Celeb = {} def load_data(self, path_to_data): dir_list = os.listdir(path_to_data) for directory in dir_list: index = dir_list.index(directory) self.dict_Celeb[index] = directory self.load_class_data(path_to_data + directory, index) def load_class_data(self, db_class_path, class_index, with_generator=True): X = [] Y = [] for filename in os.listdir(db_class_path): print(db_class_path + "/" + filename) img = cv2.imread(db_class_path + "/" + filename) self.face_tracker.process(img) faces = self.face_tracker.get_faces() if len(faces) == 1: X.append(self.getFeatures(faces[0])) Y.append(class_index) if with_generator: img_augmented_array = self.image_generator.flow(np.array([img])) i = 0 for img_aug in img_augmented_array: X.append(self.getFeatures(img_aug[0]/255)) Y.append(class_index) i += 1 if i > 10: break self.X += X self.Y += Y def getFeatures(self, img): img_resize = cv2.resize(img, configs.face_describer_tensor_shape) data_feed = [np.expand_dims(img_resize.copy(), axis=0), configs.face_describer_drop_out_rate] face_description = self.face_describer.inference(data_feed)[0][0] return face_description def serialyse(self, path, suffix=""): np.savetxt(path + "X" + suffix + ".txt", self.X) np.savetxt(path + "Y" + suffix + ".txt", self.Y) if not self.dict_Celeb: with open(path + "dict_name.json", "w") as file: json.dump(self.dict_Celeb, file) def deserialyse(self, path, suffix=""): self.X = np.loadtxt(path + "X" + suffix + ".txt").tolist() self.Y = np.loadtxt(path + "Y" + suffix + ".txt").astype(int).tolist() with open(path + "dict_name.json", "r") as file: self.dict_Celeb = json.load(file) def split_from_index(self, index): self.X = [x[:index] for x in self.X] def shuffle(self): X = np.append(self.X, np.reshape(self.Y, (len(self.Y), 1)), axis=1) np.random.shuffle(X) self.Y = X[:, -1].astype(int).tolist() self.X = np.delete(X, -1, axis=1).tolist()
DavidTorresOcana/pointcloud_streamer
app.py
<gh_stars>10-100 ## (c) 2020 <NAME> ## This code is licensed under MIT license (see LICENSE.txt for details) from flask import Flask, render_template import argparse from matplotlib import cm from flask_socketio import SocketIO, emit import time import threading import pykitti import itertools from utils import * # App configuration: app = Flask(__name__) socketio = SocketIO(app, async_handlers=True, ping_timeout = 600)# use gevent https://flask-socketio.readthedocs.io/en/latest/ # KITTI Velo: Configure here you sequence. Read https://github.com/utiasSTARS/pykitti basedir = 'data' date = '2011_09_26' drive = '0001' data = pykitti.raw(basedir, date, drive, frames=range(0, 107, 1)) velo_iter = itertools.cycle(data.velo) # Downsampling params: You can increase this up to Maximum points in your pointcloud (see utils.py) # but performance may suffer because of Client's computing capability or bandwidth limitations samples_size = 20000 # Same as in Threejs_app idx_downsample = get_skew_sample_idx(samples_size, np.array(range(64)))[0] @app.route('/') def index(): return render_template('points.html') def get_cloud_packed(): ''' Iterate through Pointcloud generator which provides data in the format [x,y,z,intensity] and packs (pointcloud, colors) into binaries ready for client's renderer''' velo_data = next(velo_iter) sampling = np.clip(idx_downsample, 0, velo_data.shape[0] - 1 ) points = velo_data[sampling, :3].astype(np.float32) intensities = (255.*velo_data[sampling, -1]).astype(int) # change to THREEjs coordinates points[:,1] *= -1. points[:,1:] = points[:,2:0:-1] # Package into binary straight to client's GPU points_bin = points.flatten().tobytes() color_map = cm.jet(intensities)[:,:3] # color_map = cm.jet( (127.*points[:,1] + 255.).astype(int) )[:,:3] # Z as color colors_bin = (255.*color_map.flatten()).astype(np.uint8).tobytes() return (points_bin, colors_bin) # Return (False, False) to stop the streaming at any time @socketio.on('socket_ready') def confirmation_socket(in_data): print(' Client says: ', in_data) def cloud_stream_request_thread(): ''' Thread pushing data in Async. manner: push at a set frequency''' time_before = time.time() while True: velo_bins = get_cloud_packed() if velo_bins[0]==False: socketio.emit('push_cloud_stream', False) # send False to stop the streaming at any time return 0 else: socketio.emit('push_cloud_stream', [velo_bins[0],velo_bins[1]]) time_before = time.time() while (time.time() - time_before) < 0.1: # 10Hz socketio.sleep(0.005) # Launch Async. push @socketio.on('cloud_stream_hungry') def cloud_stream_request(in_data): x = threading.Thread(target=cloud_stream_request_thread) x.start() return 'ok', 200 @socketio.on('cloud_hungry') def cloud_request(in_data): '''Synchronous data pushing: Wait until client request (after receiving) data''' velo_bins = get_cloud_packed() emit('push_cloud', [velo_bins[0],velo_bins[1]]) socketio.sleep(0) if __name__ == '__main__': parser = argparse.ArgumentParser(prog='Pointcloud streamer', description=" Demo of pointcloud streaming with Flask and Three.js") parser.add_argument("--port", type=int, help="port to launch in", default=9001) parser.add_argument( '-d', '--debug', help="Launch in debug mode", action="store_true", required = False, default=False) parser.add_argument( '-p', '--production', help="Launch in production mode, otherwise development mode", action="store_true", required = False) args = parser.parse_args() app.config['ENV'] = 'production' if args.production else 'development' app.config['DEBUG'] = args.debug socketio.run(app, host='localhost', port=args.port)
DavidTorresOcana/pointcloud_streamer
utils.py
import numpy as np def get_skew_sample_idx(num_samples, velo_vertical_channel_shuffle): ''' Generate non-uniform Velodyne HDL-64 Poincloud sampling idxs''' H_velo = int(132864/64) # 132864 is aprox the maximum points for a Velodyne HDL-64 uniform_probs = np.random.uniform(0,1,(64,H_velo)) x = np.linspace(0, H_velo-1, H_velo) y = np.linspace(0, 1, 64) xv, yv = np.meshgrid(x, y) skew_probs = uniform_probs*np.log((1 - yv + 2)) # normalize skew_probs = skew_probs*0.5/skew_probs.mean() sample_grid = skew_probs>(1 - num_samples/(64*H_velo)) while np.sum(sample_grid)<num_samples: # Fill up to required sample size sample_grid[int(np.random.uniform(64)), int(np.random.uniform(H_velo))] = True return np.where(sample_grid[velo_vertical_channel_shuffle[::-1], :].T.flatten())[0], sample_grid
prakash218/Advanced-Invoice-generator
dependencies/__edit__address__.pyw
from tkinter import * from tkinter import messagebox from xlrd import open_workbook import openpyxl import runpy from os import path def close(): window.destroy() basepath = path.dirname(__file__) filepath = path.abspath(path.join(basepath, "..", "__invoice__generator__.pyw")) file_globals = runpy.run_path(filepath) def new_entry(n = 5): short = short_name.get() comp = company_name.get() add1 = add_1.get() add2 = add_2.get() add3 = add_3.get() gst = gst_num.get() if len(short) == 0: messagebox.showinfo("Empty Field","Fill all the fields"); return if len(comp) == 0: messagebox.showinfo("Empty Field","Fill all the fields"); return if len(add1) == 0: messagebox.showinfo("Empty Field","Fill all the fields"); return if len(add2) == 0: messagebox.showinfo("Empty Field","Fill all the fields"); return if len(add3) == 0: messagebox.showinfo("Empty Field","Fill all the fields"); return if len(gst) == 0: messagebox.showinfo("Empty Field","Fill all the fields"); return loc = ("dependencies\__companies.xlsx") workb = open_workbook(loc) sheet1 = workb.sheet_by_index(0) for i in range(10000): try: val = sheet1.cell_value(i,0) if val == short: break else: continue except IndexError: break to_write = [] xfile = openpyxl.load_workbook("dependencies\__companies.xlsx") sheet = xfile.get_sheet_by_name('companies') alpha = 'A' gst_no = "GST NO : " + str(gst) for j in range(6): val = alpha + str(i+1) nxt = ord(alpha) nxt += 1 alpha = chr(nxt) to_write.append(val) loc = ("dependencies\__companies.xlsx") workb = open_workbook(loc) sheet1 = workb.sheet_by_index(0) print(i) show = False for k in range(i+1): value = sheet1.cell_value(k,0) print(value) if value == short: show = True sheet[to_write[0]] = short sheet[to_write[1]] = comp sheet[to_write[2]] = add1 sheet[to_write[3]] = add2 sheet[to_write[4]] = add3 sheet[to_write[5]] = gst_no xfile.save("dependencies\__companies.xlsx") Label(frame1,text = "Saved Successfully.",bg = '#00bfff').grid(row = 10,column = 5,padx = 5, pady = 10,ipady = 7) Button(frame1,text = "Go back",command = close,bg = '#4d4dff',activebackground = 'blue').grid(row = 10,column = 6,padx = 15, pady = 10) if not show: messagebox.showinfo("Not Found","Company Not Found"); return window = Tk() window.title("Edit Adress..") window.geometry('500x400+100+100') window.iconbitmap(r'dependencies\icon.ico') window.configure(bg = '#4d4dff') window.focus_set() Label(window,text = "KKR Engineering", font = ('Calibri',16),bg = '#4d4dff',fg = 'white').grid() frame1 = Frame(window,relief = RAISED,bd = 10, bg = '#00bfff') window.bind('<Return>',new_entry) frame1.grid(row = 5,padx = 10,pady = 10) short_name = StringVar() company_name = StringVar() add_1 = StringVar() add_2 = StringVar() add_3 = StringVar() gst_num = StringVar() Label(frame1,text = "Short name:").grid(row = 1,column = 1,padx = 5,pady = 10) entry1 = Entry(frame1,textvariable = short_name) entry1.grid(row = 1,column = 4,padx = 5,pady = 10) entry1.focus_set() Label(frame1,text = "Company Name:").grid(row = 2,column = 1,padx = 5,pady = 10) entry1 = Entry(frame1,textvariable = company_name).grid(row = 2,column = 4,padx = 5,pady = 10) Label(frame1,text = "Address line 1:").grid(row = 3,column = 1,padx = 5,pady = 10) entry1 = Entry(frame1,textvariable = add_1).grid(row = 3,column = 4,padx = 5,pady = 10) Label(frame1,text = "Address line 2:").grid(row = 4,column = 1,padx = 5,pady = 10) entry1 = Entry(frame1,textvariable = add_2).grid(row = 4,column = 4,padx = 5,pady = 10) Label(frame1,text = "Address line 3:").grid(row = 5,column = 1,padx = 5,pady = 10) entry1 = Entry(frame1,textvariable = add_3).grid(row = 5,column = 4,padx = 5,pady = 10) Label(frame1,text = "GST Number:").grid(row = 6,column = 1,padx = 5,pady = 10) entry1 = Entry(frame1,textvariable = gst_num).grid(row = 6,column = 4,padx = 5,pady = 10) button1 = Button(frame1,text = "Update",command = new_entry,bg = '#4d4dff',activebackground = 'blue') button1.grid(row = 10,column = 5,padx = 5,pady = 10) button2 = Button(frame1,text = "Cancel",command = close,bg = '#4d4dff',activebackground = 'blue') button2.grid(row = 10 , column = 6,padx = 5,pady = 10) window.mainloop()
prakash218/Advanced-Invoice-generator
__invoice__generator__.pyw
import time t1 = time.time() from tkinter import * from tkinter import messagebox from tkcalendar import Calendar,DateEntry import shutil from time import localtime,strftime from xlrd import open_workbook from num2words import num2words import openpyxl from warnings import filterwarnings import runpy blue = "#4d4dff" lblue = '#00bfff' named_tuple = localtime() # get struct_time time_string = strftime("%d/%m/%Y", named_tuple) global amount,listitem,win,inp def Search(): screen.destroy() file_globals = runpy.run_path('dependencies\\__search__.pyw') #to get items for invoice def getitem(num): screen.destroy() file_globals = runpy.run_path('dependencies\\__list__.pyw') #to get items for invoice def delete(): screen.destroy() file_globals = runpy.run_path('dependencies\__delete__.pyw') # to delete an invoice def total(): global amount try: amount.destroy() except: a = 1 amount = Tk() loc = ("dependencies\_details.xlsx") workb = open_workbook(loc) sheet2 = workb.sheet_by_index(0) received = 0 pending = 0 for i in range(1,1000): try: val = sheet2.cell_value(i,8) except: val = "no" if val == 'yes': try: received += (sheet2.cell_value(i,5) + (sheet2.cell_value(i,5))*0.18) except: received += 0 else: try: pending += (sheet2.cell_value(i,5) + (sheet2.cell_value(i,5))*0.18) except: pending += 0 received1 = 'Rs. ' + str(int(received)) pending1 = 'Rs. ' + str(int(pending)) amount.title('Amount') amount.geometry('180x200+700+100') amount.resizable(False,False) amount.configure(bg = blue ) amount.iconbitmap(r'dependencies\icon.ico') label = Label(amount,text = "KKR Engineering",font = ('Calibri',15),bg = blue,fg = 'white') label.place(x = 20,y = 10) frame2 = Frame(amount,bg = '#00bfff',relief = RAISED , bd = 10) frame2.place(x = 10,y= 40) label = Label(frame2,text = "Received").grid(row = 1,column = 1,padx = 10,pady = 10) label = Label (frame2,text = received1).grid(row = 1,column = 4,padx = 10,pady = 10) label = Label (frame2,text = "Pending").grid(row = 2,column = 1,padx = 10,pady = 10) label = Label (frame2,text = pending1).grid(row = 2,column = 4,padx = 10,pady = 10) label = Label(frame2) button = Button(amount,text = "Close",bg = '#00bfff',activebackground = 'blue',command = amount.destroy) button.place(x = 70,y = 160) def edit(): screen.destroy() file_globals = runpy.run_path('dependencies\__edit__address__.pyw') def yesorno(): screen.destroy() file_globals = runpy.run_path('dependencies\__yes__or__no__.pyw') def new(): screen.destroy() file_globals = runpy.run_path('dependencies\__new__entry.pyw') def check(inv): loc = ("dependencies\_details.xlsx") workb = open_workbook(loc) sheet1 = workb.sheet_by_index(0) try: val = sheet1.cell_value(inv,0) if( val == inv): return 0 return 1 except IndexError: return 1 def submit1(n = 5): global inp filterwarnings("ignore", category=DeprecationWarning) try: in_no = int(invoice_number.get()) except: messagebox.showinfo("Invoice number","Please enter a valid Invoice number"); return if not check(in_no): messagebox.showinfo("Invoice number","Invoice already found"); return if in_no <= 0: messagebox.showinfo("Invoice number","Please enter a valid Invoice number"); return try: cmp = company.get() if cmp == "None": messagebox.showinfo("Company name","Please choose a valid company"); return except: messagebox.showinfo("Company name","Please enter a valid Company Name"); return invName = cmp po_num = po_number.get() if len(po_num) == 0: messagebox.showinfo("PO number","Please enter a valid PO Number"); return podate = po_date.get() try: invAmt = int(invoice_amount.get()) except: messagebox.showinfo("Invoice Amount","Please enter a valid Invoice Amount"); return #------------------path creation----------- var = str(in_no) if(len(var) == 1): var = '00' + str(in_no) elif (len(var) == 2): var = '0'+ str(in_no) var = var+cmp+'.xlsx' #---------------number to word----------------------- tax = invAmt * 0.18 tax = int(tax) tax_word = num2words(tax,lang = 'en_IN') total = invAmt + tax total = int(total) total_word = num2words(total,lang = 'en_IN') #-----------------creating new invoice----------- original = r'dependencies\_test_invoice.xlsx' target = var shutil.copyfile(original, target) #--------------------filling details in invoice---------- loc = ("dependencies\__companies.xlsx") workb = open_workbook(loc) sheet1 = workb.sheet_by_index(0) write = [] for i in range(10000): try: name = sheet1.cell_value(i,0) if invName == name: invName = name write.append(sheet1.cell_value(i,1)) write.append(sheet1.cell_value(i,2)) write.append(sheet1.cell_value(i,3)) write.append(sheet1.cell_value(i,4)) write.append(sheet1.cell_value(i,5)) break except: return break xfile = openpyxl.load_workbook(var) sheet = xfile.get_sheet_by_name('original') sheet['E5'] = in_no sheet['E7'] = po_num sheet['G5'] = time_string sheet['G7'] = podate sheet['A32']= 'INR '+total_word+' ONLY' sheet['A38']= 'INR '+tax_word+' ONLY' sheet['A9'] = write[0] sheet['A10']= write[1] sheet['A11']= write[2] sheet['A12']= write[3] sheet['A13']= write[4] xfile.save(var) details = [in_no,time_string,podate,po_num,invName,invAmt] #---------------------saving details.xlsx--------- new = openpyxl.load_workbook('dependencies\_details.xlsx') first = new.get_sheet_by_name('Details') list = ['A','B','C','D','E','F'] for i in range(6): to_write = list[i] + str(in_no+1) #print(to_write , details[i]) first[to_write] = details[i] new.save('dependencies\_details.xlsx') getitem(5) #----------------------------close button------------------ def cancel(): global amount MsgBox = messagebox.askquestion ('Exit Application','Are you sure you want to exit the application',icon = 'warning') if MsgBox == 'yes': try: amount.destroy() screen.destroy() except: screen.destroy() #--------------screen------------------ try: screen.destroy() except: a = 1 screen = Tk() screen.title("Invoice generator ") screen.configure(bg = blue) screen.iconbitmap(r'dependencies\icon.ico') screen.geometry('600x400+100+100') screen.resizable(False,False) screen.bind('<Return>',submit1) screen.focus_set() screen.protocol("WM_DELETE_WINDOW", cancel) Label(text = "KKR Engineering",bg = blue,fg = 'white',font = ("Times New Roman CE",16)).grid(row = 1) hi = Label(text = "Invoice generator",bg = blue, fg = 'black',font = ("calibri",15)).grid(row = 2) #label(text="").pack() #-------------------------------------- #---------------variables------------- loc = ("dependencies\__companies.xlsx") workb = open_workbook(loc) sheet1 = workb.sheet_by_index(0) companies = [] for i in range(1000): try: val = sheet1.cell_value(i,0) if len(val) > 0: companies.append(val) else: break except IndexError: break if len(companies) == 0: companies.append("No companies found..") loc = ("dependencies\_details.xlsx") workb = open_workbook(loc) sheet1 = workb.sheet_by_index(0) for i in range(1,1000): try: val = sheet1.cell_value(i,0) if val == i: continue else: break except IndexError: break invoice_number = StringVar() invoice_number.set(i) invoice_date = StringVar() po_number = StringVar() po_date = StringVar() invoice_amount = StringVar() company = StringVar() company.set(companies[0]) #--------------------details-------------------- frame = Frame(relief = RAISED,bd = 10, bg = lblue) frame.grid(row = 5,padx = 10,pady = 10) invoice = Label(frame,text = " Invoice Number: ").grid(row = 1,column = 1,pady = 5,padx = 5,sticky = W) entry1 = Entry(frame,textvariable = invoice_number) entry1.grid(row = 1,column = 4,pady = 5,sticky = W) invoice = Label(frame,text = "Company Name:").grid(row = 2,column = 1,pady = 5,padx = 5,sticky = W) entry = OptionMenu(frame,company,*companies) entry.grid(row = 2,column = 4,pady = 5,padx = 5,sticky = W) invoice = Label(frame,text = " PO Number: ").grid(row = 3,column = 1,pady = 5,padx = 5,sticky = W) entry1 = Entry(frame,textvariable = po_number) entry1.grid(row = 3,column = 4,pady = 5,sticky = W) entry1.focus_set() invoice = Label(frame,text = " PO Date: ").grid(row = 4,column = 1,pady = 5,padx = 5,sticky = W) cal = DateEntry(frame,locale = 'en_IN',date_pattern = "dd/mm/yyyy",width=30,bg="#4d4dff",fg="black",textvariable = po_date) cal.grid(row = 4,column = 4,pady = 5,sticky = W) invoice = Label(frame,text = " Invoice Amount:").grid(row = 5,column = 1,pady = 5,padx = 5,sticky = W) entry1 = Entry(frame,textvariable = invoice_amount).grid(row = 5,column = 4,pady = 5,sticky = W) #-------------buttons----------- newButton = Button(frame, text="Add New Company",background = blue,activebackground = 'blue',fg = 'white',command = new,cursor = 'hand2') newButton.grid(row = 10,column = 1,padx = 10,pady = 10,sticky = S) newButton = Button(frame, text="Add Received Payment",background = blue,activebackground = 'blue',fg = 'white',command = yesorno,cursor = 'hand2') newButton.grid(row = 10,column = 4,padx = 10,pady = 10,sticky = S) cancelButton = Button(frame, text="Cancel",background = blue,activebackground = 'blue',fg = 'white',command = cancel,cursor = 'hand2') cancelButton.grid(row = 11,column = 6,padx = 10,pady = 10,sticky = S) okButton = Button(frame, text="Edit Address",command = edit,background = blue,activebackground = 'blue',fg = 'white',cursor = 'hand2') okButton.grid(row = 10 ,column = 5,padx = 10,pady = 10,sticky = S) okButton = Button(frame, text="Submit",command = submit1,background = blue,activebackground = 'blue',fg = 'white',cursor = 'hand2') okButton.grid(row = 11 ,column = 5,pady = 10,padx = 10,sticky = S) totalButton = Button(frame,text = "Total and Pending", command = total,background = blue,activebackground = 'blue',fg = 'white',cursor = 'hand2') totalButton.grid(row = 10,column = 6,pady = 10, padx = 10,sticky = S) delete = Button(frame,text = "Delete an Invoice", command = delete,background = blue,activebackground = 'blue',fg = 'white',cursor = 'hand2') delete.grid(row = 11,column = 1,pady = 10, padx = 10,sticky = S) search = Button(frame,text = "Search details" ,command = Search,background = blue,activebackground = 'blue',fg = 'white',cursor = 'hand2') search.grid(row = 11,column = 4,pady = 10, padx = 10,sticky = S) #Label(frame,text = "",bg = "#00bfff").grid(row = 11,column = 1,ipady = 10) Label(text = "",bg = "#4d4dff").grid(row = 11,column = 1,ipady = 10) #----------------------------- t2 = time.time() print('time:%.2f'%(t2-t1)) screen.mainloop()
prakash218/Advanced-Invoice-generator
dependencies/__list__.pyw
<reponame>prakash218/Advanced-Invoice-generator<filename>dependencies/__list__.pyw from tkinter import * from tkinter import messagebox import openpyxl from xlrd import open_workbook import runpy from os import path global items,item,no,inputval,flag flag = 0 def setval(n = 5): global flag flag = 1 global items,no,item try: item = int(items.get()) except: messagebox.showinfo("Warning","Please Choose a valid Number") return print(item) no.destroy() def noof(): global no,items no = Tk() items = StringVar() no.geometry('300x100+100+100') no.configure(bg = '#00bfff') no.bind('<Return>',setval) no.focus_set() num = [1,2,3,4,5] items.set('none') label = Label(no,text = 'Enter the number of items:') label.place(x = 10,y = 20) entry = OptionMenu(no,items,*num) entry.focus_set() entry.place(x = 170,y = 20) button = Button(no,text = 'submit',bg = '#4d4dff',command = setval) button.place(x = 50,y = 50) no.mainloop() def getinp(n = 5): global inp1,inp2,inp3,inp4,inp5,item to_write=[] loc = ("dependencies\_details.xlsx") workb = open_workbook(loc) if item >= 1: item1 = inp1.get() item1 = list(item1.split(',')) if len(item1) < 4: messagebox.showinfo("Warning","Please enter the details correctly"); return to_write.append(item1) if item >= 2: item2 = inp2.get() item2 = list(item2.split(',')) if len(item2) < 4: messagebox.showinfo("Warning","Please enter the details correctly"); return to_write.append(item2) if item >= 3: item3 = inp3.get() item3 = list(item3.split(',')) if len(item3) < 4: messagebox.showinfo("Warning","Please enter the details correctly"); return to_write.append(item3) if item >= 4: item4 = inp4.get() item4 = list(item4.split(',')) if len(item4) < 4: messagebox.showinfo("Warning","Please enter the details correctly"); return to_write.append(item4) if item >= 5: item5 = inp5.get() item5 = list(item5.split(',')) if len(item5) < 4: messagebox.showinfo("Warning","Please enter the details correctly"); return to_write.append(item5) invno = 0 sheet1 = workb.sheet_by_index(0) for i in range(1,10000): try: val = sheet1.cell_value(i,0) try: val1 = sheet1.cell_value(i+1,0) except: invno = i break except: messagebox.showinfo("Warning","No invoice Found..."); win.destroy() basepath = path.dirname(__file__) filepath = path.abspath(path.join(basepath, "..", "__invoice__generator__.pyw")) file_globals = runpy.run_path(filepath) print(invno," ",to_write) cmp = str(sheet1.cell_value(invno,4)) var = str(invno) if(len(var) == 1): var = '00' + str(invno) elif (len(var) == 2): var = '0'+ str(invno) var = var+cmp+'.xlsx' xfile = openpyxl.load_workbook(var) sheet = xfile.get_sheet_by_name('original') if item >= 1: sheet['A15'] = '1' sheet['B15'] = to_write[0][0] sheet['D15'] = to_write[0][1] sheet['E15'] = '18%' sheet['G15'] = int(to_write[0][2]) sheet['F15'] = int(to_write[0][3]) sheet['H15'] = 'no.' sheet['I15'] = (int(to_write[0][2]) * int(to_write[0][3])) if item >= 2: sheet['A17'] = '2' sheet['B17'] = to_write[1][0] sheet['D17'] = to_write[1][1] sheet['E17'] = '18%' sheet['G17'] = int(to_write[1][2]) sheet['F17'] = int(to_write[1][3]) sheet['H17'] = 'no.' sheet['I17'] = (int(to_write[1][2]) * int(to_write[1][3])) if item >= 3: sheet['A19'] = '3' sheet['B19'] = to_write[2][0] sheet['D19'] = to_write[2][1] sheet['E19'] = '18%' sheet['G19'] = int(to_write[2][2]) sheet['F19'] = int(to_write[2][3]) sheet['H19'] = 'no.' sheet['I19'] = (int(to_write[2][2]) * int(to_write[2][3])) if item >= 4: sheet['A21'] = '4' sheet['B21'] = to_write[3][0] sheet['D21'] = to_write[3][1] sheet['E21'] = '18%' sheet['G21'] = int(to_write[3][2]) sheet['F21'] = int(to_write[3][3]) sheet['H21'] = 'no.' sheet['I21'] = (int(to_write[3][2]) * int(to_write[3][3])) if item >= 5: sheet['A23'] = '5' sheet['B23'] = to_write[4][0] sheet['D23'] = to_write[4][1] sheet['E23'] = '18%' sheet['G23'] = int(to_write[4][2]) sheet['F23'] = int(to_write[4][3]) sheet['H23'] = 'no.' sheet['I23'] = (int(to_write[4][2]) * int(to_write[4][3])) xfile.save(var) win.destroy() basepath = path.dirname(__file__) filepath = path.abspath(path.join(basepath, "..", "__invoice__generator__.pyw")) file_globals = runpy.run_path(filepath) noof() if flag: win = Tk() win.iconbitmap(r'dependencies\icon.ico') win.focus_set() win.title('Add items') win.bind('<Return>',getinp) inp1 = StringVar() inp2 = StringVar() inp3 = StringVar() inp4 = StringVar() inp5 = StringVar() win.geometry('500x300+100+100') win.configure(bg = '#00bfff') label = Label(win,text = 'Enter the item name,hsn,qty,price',fg = 'red', bg = '#00bfff',font = ('Calibri',14)).grid(row = 1, column = 4) if item >= 1: label = Label(win,text = 'List Item:').grid(row = 2,column = 1,padx = 5,pady = 10) entry = Entry(win,textvariable = inp1) entry.grid(row = 2,column = 4,pady = 10,padx = 5) entry.focus_set() if item >= 2: label = Label(win,text = 'List Item:').grid(row = 3,column = 1,pady = 10,padx = 5) entry = Entry(win,textvariable = inp2).grid(row = 3,column = 4,pady = 10,padx = 5) if item >= 3: label = Label(win,text = 'List Item:').grid(row = 4,column = 1,pady = 10,padx = 5) entry = Entry(win,textvariable = inp3).grid(row = 4,column = 4,pady = 10,padx = 5) if item >= 4: label = Label(win,text = 'List Item:').grid(row = 5,column = 1,pady = 10,padx = 5) entry = Entry(win,textvariable = inp4).grid(row = 5,column = 4,pady = 10,padx = 5) if item >= 5: label = Label(win,text = 'List Item:').grid(row = 6,column = 1,pady = 10,padx = 5) entry = Entry(win,textvariable = inp5).grid(row = 6,column = 4,pady = 10,padx = 5) button = Button(win,text = 'Submit',bg = '#4d4dff',command = getinp) button.grid(row = 10,column = 5) win.mainloop()
prakash218/Advanced-Invoice-generator
dependencies/__yes__or__no__.pyw
from tkinter import * from tkinter import messagebox import sys from xlrd import open_workbook import openpyxl import warnings from os import path import runpy def save(write): Label(text = 'YES ',bg = '#00bfff',fg = '#00bfff').place(x = 160,y = 330,height = 40) new = openpyxl.load_workbook('dependencies\_details.xlsx') first = new.get_sheet_by_name('Details') first[write] = "yes" new.save('dependencies\_details.xlsx') def no(write): Label(text = 'YES ',bg = '#00bfff',fg = '#00bfff').place(x = 160,y = 330,height = 40) new = openpyxl.load_workbook('dependencies\_details.xlsx') first = new.get_sheet_by_name('Details') first[write] = "no" new.save('dependencies\_details.xlsx') def check(n = 5): msg = StringVar() val = 0 TEXT = "" try: inv = int(inv_no.get()) except: messagebox.showinfo("Invoice number","Please enter a valid Invoice number"); return entry2 = Label(root,text = TEXT).place(x = 152,y = 206,height = 115,width = 200) loc = ("dependencies\_details.xlsx") workb = open_workbook(loc) label = Label(root,text = "Invoice details" ,fg = 'black').place(x = 10,y = 200) sheet1 = workb.sheet_by_index(0) try: if(sheet1.cell_value(1,0) == 1): try: val = sheet1.cell_value(inv,0) except IndexError: TEXT = "Invoice not found" if( val == inv): TEXT = '' TEXT = str(sheet1.cell_value(0,0)) + str(" - ") + str(int(sheet1.cell_value(inv,0)))+ '\n' TEXT = TEXT +str(sheet1.cell_value(0,1))+str(" - ")+str(sheet1.cell_value(inv,1))+'\n' TEXT = TEXT+str(sheet1.cell_value(0,2))+str(" - ")+str(sheet1.cell_value(inv,2))+'\n' TEXT = TEXT+str(sheet1.cell_value(0,3))+str(" - ")+str(sheet1.cell_value(inv,3)) +'\n' TEXT = TEXT+str(sheet1.cell_value(0,4))+str(" - ")+str(sheet1.cell_value(inv,4))+'\n' TEXT = TEXT+str(sheet1.cell_value(0,5))+str(" - ")+str(sheet1.cell_value(inv,5))+'\n' to_write = 'I' + str(inv+1) else: TEXT = "No Invoice Found ....."+"\n"+"Please Add a Invoice..." except IndexError: TEXT = "No Invoice Found .....Please Add a Invoice..." if len(TEXT) == 0: TEXT = "Invoice not found" entry2 = Label(root,text = TEXT).place(x = 152,y = 206) button = Button(root,text = "YES",command = lambda:save(to_write),background = '#4d4dff',activebackground = 'blue') entry3 = Button(root,text = "NO",command = lambda:no(to_write),background = '#4d4dff',activebackground = 'blue') if len(TEXT) > 100: button.place(x = 160,y = 330) entry3.place(x = 200,y = 330) else: Label(text = 'YES ',bg = '#00bfff',fg = '#00bfff').place(x = 160,y = 330,height = 40) def close(): root.destroy() basepath = path.dirname(__file__) filepath = path.abspath(path.join(basepath, "..", "__invoice__generator__.pyw")) file_globals = runpy.run_path(filepath) root = Tk() root.geometry('366x450+100+100') root.iconbitmap(r'dependencies\icon.ico') root.title('Add received payment') root.bind('<Return>',check) root.configure(bg = '#00bfff') root.focus_set() inv_no = StringVar() label = Label(root,text = "KKR Engineering",bg = '#00bfff',fg = 'white',font= ("calibri",16)).place(x = 102,y = 20) label = Label(root,text = "Invoice Number:",fg = 'black').place(x = 10,y = 100) entry = Entry(root,textvariable = inv_no) entry.place(x = 160,y = 100) entry.focus_set() submit = Button(root,text = "Search",command = check,background = '#4d4dff',activebackground = 'blue').place(x = 230,y = 400) cancel = Button(root,text = "Cancel",command = close,background = '#4d4dff',activebackground = 'blue').place(x = 290,y = 400) mainloop()
prakash218/Advanced-Invoice-generator
dependencies/__search__.pyw
from tkinter import * from xlrd import open_workbook import sys from tkinter import messagebox from os import path import runpy blue = "#4d4dff" lblue = '#00bfff' global root def submit(num = 5): # scrollbar = Scrollbar(search) # scrollbar.grid(row = 0, column = 1,stick = 'ns') # text = 'invoice number'+ '\t' +'invoice date'+ '\t' +'PO Date'+ '\t' +'PO Number'+ '\t' +'Company name'+ '\t' +'Amount'+ '\t' +'Received' global root try: root.destroy() except: a = 1 root = Tk() root.geometry('600x400+100+100') text = '' inno = "Invoice Number:" indate = "Invoce Date:" datepo = "PO Date:" po = "PO Number:" cmpy = "Company:" amount = "Amount:" recd = "Recieved:" s = searchfor.get() if len(s) == 0: messagebox.showinfo("Inavlid","Please enter a valid Search term"); return opt = choice.get() loc = ("dependencies\\_details.xlsx") workb = open_workbook(loc) sheet1 = workb.sheet_by_index(0) rcd = '' if opt == 'invoice number': try: s = int(s) except: messagebox.showinfo("Inavlid","Please enter a valid Invoice Number"); return for i in range(10000): try: inv = sheet1.cell_value(i,0) date = sheet1.cell_value(i,1) podate = sheet1.cell_value(i,2) ponum = sheet1.cell_value(i,3) comp = sheet1.cell_value(i,4) amt = sheet1.cell_value(i,5) + sheet1.cell_value(i,5) * 0.18 try: rcd = sheet1.cell_value(i,8) except IndexError: rcd ="no" if inv == int(s): amt = 'Rs.'+str(round(amt,2)) if rcd == '': rcd = 'no' # text = 'invoice number'+ '\t' +'invoice date'+ '\t' +'PO Date'+ '\t' +'PO Number'+ '\t' +'Company name'+ '\t' +'Amount'+ '\t' +'Received' inno += '\n' + str(int(inv)) indate += '\n' + str(date) datepo += '\n' + str(podate) po += '\n' +str(ponum) cmpy += '\n' + str(comp) amount += '\n'+(amt) recd += '\n' + str(rcd) print(str(int(inv)) + '\t' + str(date) + '\t' + str(podate) + '\t' + str(ponum) + '\t' + str(comp) + '\t' +str(amt) + '\t' +str(rcd)) break except Exception as e: print(e,i) continue if opt == 'company name': s = s.lower() for i in range(10000): try: inv = sheet1.cell_value(i,0) date = sheet1.cell_value(i,1) podate = sheet1.cell_value(i,2) ponum = sheet1.cell_value(i,3) comp = sheet1.cell_value(i,4) amt = sheet1.cell_value(i,5) + sheet1.cell_value(i,5) * 0.18 try: rcd = sheet1.cell_value(i,8) except IndexError: rcd ="no" #if found add the details if s in comp.lower(): amt = 'Rs.'+str(round(amt,2)) if rcd == '': rcd = 'no' # text = 'invoice number'+ '\t' +'invoice date'+ '\t' +'PO Date'+ '\t' +'PO Number'+ '\t' +'Company name'+ '\t' +'Amount'+ '\t' +'Received' inno += '\n' + str(int(inv)) indate += '\n' + str(date) datepo += '\n' + str(podate) po += '\n' +str(ponum) cmpy += '\n' + str(comp) amount += '\n'+str(amt) recd += '\n' + str(rcd) print(str(int(inv)) + '\t' + str(date) + '\t' + str(podate) + '\t' + str(ponum) + '\t' + str(comp) + '\t' +str(amt) + '\t' +str(rcd)) except Exception as e: print(e,i) continue if opt == 'PO number': for i in range(10000): try: inv = sheet1.cell_value(i,0) date = sheet1.cell_value(i,1) podate = sheet1.cell_value(i,2) ponum = sheet1.cell_value(i,3) comp = sheet1.cell_value(i,4) amt = sheet1.cell_value(i,5) + sheet1.cell_value(i,5) * 0.18 try: rcd = sheet1.cell_value(i,8) except IndexError: rcd ="no" if ponum == s: amt = 'Rs.'+str(round(amt,2)) if rcd == '': rcd = 'no' # text = 'invoice number'+ '\t' +'invoice date'+ '\t' +'PO Date'+ '\t' +'PO Number'+ '\t' +'Company name'+ '\t' +'Amount'+ '\t' +'Received' inno += '\n' + str(int(inv)) indate += '\n' + str(date) datepo += '\n' + str(podate) po += '\n' +str(ponum) cmpy += '\n' + str(comp) amount += '\n'+ amt recd += '\n' + str(rcd) print(str(int(inv)) + '\t' + str(date) + '\t' + str(podate) + '\t' + str(ponum) + '\t' + str(comp) + '\t' +str(amt) + '\t' +str(rcd)) break except Exception as e: print(e,i) continue canvas = Canvas(root,bg = lblue) scroll_y = Scrollbar(root, orient="vertical", command=canvas.yview) frame1 = Frame(canvas,relief = RAISED, bg = lblue) label = Label(frame1,text = inno,bg = lblue) label.grid(row = 5,column =1) label = Label(frame1,text = indate,bg = lblue) label.grid(row = 5,column =2) label = Label(frame1,text = datepo,bg = lblue) label.grid(row = 5,column =3) label = Label(frame1,text = po,bg = lblue) label.grid(row = 5,column =4) label = Label(frame1,text = cmpy,bg = lblue) label.grid(row = 5,column =5) label = Label(frame1,text = amount,bg = lblue) label.grid(row = 5,column =6) label = Label(frame1,text = recd,bg = lblue) label.grid(row = 5,column =7) canvas.create_window(0, 0, anchor='nw', window=frame1) # make sure everything is displayed before configuring the scrollregion canvas.update_idletasks() canvas.configure(scrollregion=canvas.bbox('all'), yscrollcommand=scroll_y.set) canvas.pack(fill='both', expand=True, side='left') scroll_y.pack(fill='y', side='right') def close(): search.destroy() basepath = path.dirname(__file__) filepath = path.abspath(path.join(basepath, "..", "__invoice__generator__.pyw")) file_globals = runpy.run_path(filepath) search = Tk() search.title("Search") searchfor = StringVar() choice = StringVar() search.iconbitmap(r'dependencies\icon.ico') search.geometry('350x250+100+100') search.bind('<Return>',submit) search.configure(bg = blue) Label(text = "KKR Engineering",bg = blue,fg = 'white',font = ("Times New Roman CE",16)).grid(row = 1,column = 8) options = ["invoice number","company name","PO number"] choice.set(options[0]) frame = Frame(search,bg = lblue,relief = RAISED,bd = 10) frame.grid(row = 3 ,padx = 25,column = 8) entry = Entry(frame,textvariable = searchfor).grid(row = 1, column = 2,padx = 10) label = Label(frame,text = "Search:").grid(row = 1,column = 1,padx = 25) label = Label(frame,text = "Search By:").grid(row = 2,column = 1,padx = 25,pady = 10) menu = OptionMenu(frame,choice,*options).grid(row = 2, column = 2, padx = 25, pady = 10) submitbutton = Button(frame,text = "Search", bg = blue,activebackground = lblue, command = submit).grid(row = 3, column = 1,padx = 10) cancel = Button(frame,text = "Cancel", bg = blue,activebackground = lblue, command = close).grid(row = 3, column = 2,padx = 10) search.mainloop()
prakash218/Advanced-Invoice-generator
__save__details.py
<filename>__save__details.py import glob import openpyxl from xlrd import open_workbook from warnings import filterwarnings filterwarnings("ignore", category=DeprecationWarning) while(1): try: start = int(input("Enter the Starting invoice number:")) break except: continue while(1): try: end = int(input("Enter the Ending invoice number:")) break except: continue for i in range(start ,end+1): if i < 10: a = '00' + str(i) elif i < 100: a = '0' + str(i) else: a = str(i) b = a a += '*' * 20 for fpath in glob.glob(a): amt = 0 print (fpath) comp = fpath.split('.') comp = str(comp[0]) comp = comp.split(b) comp = str(comp[1]) workb = open_workbook(fpath) sheet1 = workb.sheet_by_index(0) inv = int(sheet1.cell_value(4,4)) date = sheet1.cell_value(4,6) podate = sheet1.cell_value(6,6) ponum = sheet1.cell_value(6,4) for i in range(14,25): try: amt += float(sheet1.cell_value(i,8)) except: continue print(inv) print(date) print(podate) print(ponum) print(comp) print(amt) details = [inv,date,podate,ponum,comp,amt] #---------------------saving details.xlsx--------- new = openpyxl.load_workbook('dependencies\_details.xlsx') first = new.get_sheet_by_name('Details') list1 = ['A','B','C','D','E','F'] for i in range(6): to_write = list1[i] + str(inv+1) #print(to_write , details[i]) first[to_write] = details[i] new.save('dependencies\_details.xlsx')
prakash218/Advanced-Invoice-generator
dependencies/__delete__.pyw
<gh_stars>0 from tkinter import * from tkinter import messagebox import sys from xlrd import open_workbook import openpyxl import warnings from os import path from os import remove import runpy global passw,attempt,flag,password1,inv_no,root flag = 1 attempt = 0 def main(): global root global inv_no root = Tk() root.geometry('366x450+100+100') root.focus_set() root.iconbitmap(r'dependencies\icon.ico') root.title('Delete an Invoice') root.configure(bg = '#00bfff') root.bind('<Return>',check) inv_no = StringVar() label = Label(root,text = "KKR Engineering",bg = '#00bfff',fg = 'white',font= ("calibri",16)).place(x = 102,y = 20) label = Label(root,text = "Invoice Number:",fg = 'black').place(x = 10,y = 100) entry = Entry(root,textvariable = inv_no) entry.place(x = 160,y = 100) entry.focus_set() submit = Button(root,text = "Search",command = check,background = '#4d4dff',activebackground = 'blue').place(x = 230,y = 400) cancel = Button(root,text = "Cancel",command = close,background = '#4d4dff',activebackground = 'blue').place(x = 290,y = 400) root.mainloop() def enter(n=5): global password1 global attempt check = passw.get() if (check == 'kkr2008'): password1.destroy() main() else: attempt += 1 psw.delete(0,END) if attempt >= 3: password1.destroy() basepath = path.dirname(__file__) filepath = path.abspath(path.join(basepath, "..", "__invoice__generator__.pyw")) file_globals = runpy.run_path(filepath) def close2(): password1.destroy() basepath = path.dirname(__file__) filepath = path.abspath(path.join(basepath, "..", "__invoice__generator__.pyw")) file_globals = runpy.run_path(filepath) def save(write,cmp): global root in_no = int(write)-1 Label(text = 'YES ',bg = '#00bfff',fg = '#00bfff').place(x = 160,y = 330,height = 40) new = openpyxl.load_workbook('dependencies\_details.xlsx') first = new.get_sheet_by_name('Details') var = str(in_no) if(len(var) == 1): var = '00' + str(in_no) elif (len(var) == 2): var = '0'+ str(in_no) var = var+cmp+'.xlsx' list = ['A','B','C','D','E','F','I'] for i in range(7): to_write = list[i] + write #print(to_write , details[i]) first[to_write] = '' new.save('dependencies\_details.xlsx') basepath = path.dirname(__file__) filepath = path.abspath(path.join(basepath, "..", var)) try: remove(filepath) except: return def no(write): Label(text = 'YES ',bg = '#00bfff',fg = '#00bfff').place(x = 160,y = 330,height = 40) return def check(n=5): global root global inv_no msg = StringVar() val = 0 TEXT = "" try: inv = int(inv_no.get()) except: messagebox.showinfo("Invoice number","Please enter a valid Invoice number"); return entry2 = Label(root,text = TEXT).place(x = 152,y = 206,height = 115,width = 200) loc = ("dependencies\_details.xlsx") workb = open_workbook(loc) label = Label(root,text = "Invoice details" ,fg = 'black').place(x = 10,y = 200) sheet1 = workb.sheet_by_index(0) try: val = sheet1.cell_value(inv,0) except IndexError: TEXT = "Invoice not found" if( val == inv): TEXT = '' TEXT = str(sheet1.cell_value(0,0)) + str(" - ") + str(int(sheet1.cell_value(inv,0)))+ '\n' TEXT = TEXT +str(sheet1.cell_value(0,1))+str(" - ")+str(sheet1.cell_value(inv,1))+'\n' TEXT = TEXT+str(sheet1.cell_value(0,2))+str(" - ")+str(sheet1.cell_value(inv,2))+'\n' TEXT = TEXT+str(sheet1.cell_value(0,3))+str(" - ")+str(sheet1.cell_value(inv,3)) +'\n' TEXT = TEXT+str(sheet1.cell_value(0,4))+str(" - ")+str(sheet1.cell_value(inv,4))+'\n' TEXT = TEXT+str(sheet1.cell_value(0,5))+str(" - ")+str(sheet1.cell_value(inv,5))+'\n' to_write = str(inv+1) cmp = str(sheet1.cell_value(inv,4)) else: TEXT = "No Invoice Found ....."+"\n"+"Please Add a Invoice..." if len(TEXT) == 0: TEXT = "Invoice not found" entry2 = Label(root,text = TEXT).place(x = 152,y = 206) button = Button(root,text = "Delete",command = lambda:save(to_write,cmp),background = '#4d4dff',activebackground = 'blue') entry3 = Button(root,text = "Back",command = lambda:no(to_write),background = '#4d4dff',activebackground = 'blue') if len(TEXT) > 100: button.place(x = 160,y = 330) entry3.place(x = 210,y = 330) else: Label(text = 'YES ',bg = '#00bfff',fg = '#00bfff').place(x = 160,y = 330,height = 40) def close(): global root root.destroy() basepath = path.dirname(__file__) filepath = path.abspath(path.join(basepath, "..", "__invoice__generator__.pyw")) file_globals = runpy.run_path(filepath) password1 = Tk() passw = StringVar() password1.geometry('<PASSWORD>') password1.configure(bg = '#00bfff') password1.bind('<Return>',enter) password1.focus_set() hi = Label(password1,text='',bg='#00bfff') hi.grid(row = 1) label = Label(password1,text = 'Password:') label.grid(row= 2,column = 1,padx = 10) psw = Entry(password1,textvariable = passw,show='*') psw.grid(row=2,column = 2) psw.focus_set() hi1 = Label(password1,text='',bg='#00bfff') hi1.grid(row = 3) but1 = Button(password1,text = "Submit",command = enter,background = '#4d4dff',activebackground = 'blue') but1.grid(row = 5,column = 2,padx = 10) cancel = Button(password1,text = "Cancel",command = close2,background = '#4d4dff',activebackground = 'blue') cancel.grid(row = 5,column = 3) password1.mainloop()
jrblevin/scicomp
stat/ou_sim.py
<filename>stat/ou_sim.py<gh_stars>1-10 # ou_sim.py # # Simulates an Ornstein-Uhlenbeck process. # # <NAME> <<EMAIL>> # Durham, March 24, 2008 09:16 EDT import numpy from numpy import exp, sqrt def ou_sim(T, sigma=1.0, eta=1.0, mu=0.0): """Ornstein-Uhlenbeck Simulator Parameters ---------- T -- number of periods to simulate sigma -- standard deviation of the Brownian Motion eta -- speed of mean reversion mu -- mean Returns ------- x -- an array containing the realization of the process """ x = numpy.ones((T)) * mu # Initialize storage for x # Store a series of independent Normal(0,1) draws omega = numpy.random.normal(size=T) for t in numpy.arange(1,T): x[t] = x[t-1] * exp(-eta) + mu * (1 - exp(-eta)) \ + sigma * sqrt((1 - exp(-2*eta))/(2*eta)) * omega[t] return x if __name__ == '__main__': import pylab sigma = 0.01 eta = 0.03 mu = 0.0 x = ou_sim(250, sigma, eta, mu) pylab.plot(x, label = ("eta = %0.2f, sigma=%0.2f" % (eta, sigma))) pylab.title('Ornstein-Uhlenbeck process') pylab.legend(loc='upper left') pylab.show()
jrblevin/scicomp
stat/fast_sampling.py
<filename>stat/fast_sampling.py import numpy import scipy def fast_sample(w, M, u): """Generates a sample of size M of integers with weights given by w. Parameters ---------- w : array_like A vector of real weights corresponding to each of the integers ``0`` to ``(w.size - 1)``. These weights do not need to be normalized. M : integer The requested sample size. u : real A Uniform(0,1) draw, used as an offset to the CDF. Returns ------- sample : array_like An array containing the resulting sample. """ # Normalize the weights so that they sum to 1. w = w / sum(w) # Initialize storage for the cumulative weight CDF (including the ``u`` # offset) and the resulting sample. c = numpy.empty((M)) sample = numpy.empty((M), dtype=numpy.int) # Draws for the first integer (0). a and b are the beginning and ending # offsets for each integer. c[0] = M * w[0] + u a = 0 b = numpy.math.floor(c[0]) if b > a: sample[a:b] = 0 for j in numpy.arange(1, w.size): c[j] = c[j-1] + M * w[j] a = b b = numpy.math.floor(c[j]) if b > a: sample[a:b] = j return sample def _test(): seed = 274 numpy.random.seed(274) N = 10 M = 1000 u = numpy.random.rand() w = numpy.random.rand(N) sample = fast_sample(w, M, u) num, bins = scipy.histogram(sample, numpy.arange(N)) u = 0.2 print 'u = ', u print 'w = ', w / sum(w) print 'frequency = ', num / float(M) print 'w - sample frequency = ', w / sum(w) - num / float(M) u = 0.98 sample = fast_sample(w, M, u) num, bins = scipy.histogram(sample, numpy.arange(N)) print 'u = ', u print 'w = ', w / sum(w) print 'frequency = ', num / float(M) print 'w - sample frequency = ', w / sum(w) - num / float(M) if __name__ == "__main__": _test()
jrblevin/scicomp
stat/ou_gdp.py
# ou-gdp.py # # Estimates an Ornstein-Uhlenbeck process using data on U.S. GDP. # # <NAME> <<EMAIL>> # Durham, March 24, 2008 09:02 EDT from numpy import * from scipy import stats, optimize from pylab import * def prt(x): print "theta = %f\t%f" % (x[0], x[1]) ## Log-likelihood function def log_likelihood(theta, t, x): eta = theta[0] sigma = exp(theta[1]) # sigma >= 0 result = 0.0 # Calculate the likelihood, skipping the first observation. for i in arange(1, size(t)): # Time interval (here, constant) dt = t[i] - t[i-1] # Standard deviation of x(i) sd = sigma * sqrt((1 - exp(-2*eta*dt)) / (2 * eta)) # Mean of x(i) (x_bar is zero since we de-meaned already) mean = x[i-1] * exp(-eta * dt) # Accumulate the result result += log(stats.norm.pdf(x[i], loc=mean, scale=sd)) return result / size(t) # Paths data_dir = '/home/jrblevin/projects/macro-data' gdp_file = data_dir + '/us-gdp.dat' deflator_file = data_dir + '/us-gdp-deflator.dat' # Load the data yr, qtr, gdp = loadtxt(gdp_file, unpack=True) deflator = loadtxt(deflator_file, usecols=[2]) log_gdp = log(gdp) # Linear regression t = arange(size(log_gdp)) (alpha, beta) = polyfit(t, log_gdp, 1) fit = polyval([alpha, beta], t) resid = log_gdp - alpha * t - beta # Plot the data and the fitted line # plot(log_gdp, label="Log Real GDP") # plot(fit, label="Fitted line") # title('Log Real GDP Regression') # show() # Starting values (a nonnegative transformation is applied to sigma) eta = 0.1 sigma = 0.1 theta = (eta, log(sigma)) # Define a pure function which returns the negative log-likelihood # at the given parameter values. f = lambda theta: -log_likelihood(theta, t, resid) theta = optimize.fmin(f, theta) print "eta = %f" % theta[0] print "sigma = %f" % exp(theta[1]) # Plot the residuals #plot(resid) #title('GDP Residuals') #show()
sidneyfilho/capture_request_pyshark
tests/test.py
import pyshark def print_live_notafiscal(): capture = pyshark.LiveCapture("Ethernet", display_filter="http") for packet in capture: # if hasattr(packet, 'host'): # if "livingpharma.com.br" in packet.http.host: # print(str(packet.http.file_data)) if hasattr(packet.http, 'host'): if "webservice.correios.com.br:80" in packet.http.host: print(str(packet.http.file_data)) if hasattr(packet.http, 'response_for_uri'): if "http://webservice.correios.com.br:80/service/rastro" in packet.http.response_for_uri: print(str(packet.http.file_data)) # if "DNS" in packet and not packet.dns.flags_response.int_value: # print(packet.dns.qry_name) if __name__ == "__main__": print_live_notafiscal()
pdalba/retro
planet_atmosphere.py
""" Name ---- planet_atmosphere.py Description ----------- RETrO: Refraction in Exoplanet Transit Observations This script contains the functions that determine the properties of the planetary atmos- phere. Each function is called from shoulder.py at different times. Right now, the atmosphere is set up as desribed in Dalba (2017), but the user can alter the atmosphere in any way they see fit, as long as the functions returned what the shoulder.py code is expecting. This should be obvious by the names of the functions. Input ----- Various information about the planetary atmosphere. Output ------ Various information about the planetary atmosphere. Author ------ <NAME> --- Boston University pdalba -at- bu -dot- edu """ #----------------------------------------------------------------------------------------- #Import various math, science, and plotting packages. import numpy from numpy import * #Note, numpy functions will not be explicitly called out import scipy from scipy.interpolate import interp1d from scipy.integrate import quad from scipy import optimize from scipy.optimize import minimize_scalar, fsolve import os import datetime import pickle #----------------------------------------------------------------------------------------- #Define fundamental constants - All physical units in this script will be MKS. #----------------------------------------------------------------------------------------- k_B = 1.38065e-23 #mks m_H = 1.67353e-27 #kg G = 6.67408e-11 #mks AU = 1.49598e11 #m R_earth = 6.371e6 #m M_earth = 5.9723e24 #kg R_sun = 6.957e8 #m #----------------------------------------------------------------------------------------- #The first function returns the mean molecular *mass* and the reference refractivity # based on the string qualifier of the atmosphere. This is hardcoded as described in # Dalba (2017). Note that for the H2/He atmosphere, a solar helium mass fraction is used. def get_mu_nuref(atm_type): #Atmosphere type. 4 possible options: H2, N2, H2O, CO2. if atm_type == 'H2': #Set a helium mass fraction for this atmosphere. Y = 0.25 #solar #Find mu assuming only helium and H2 mu = 1./(Y/(4.*m_H)+(1.-Y)/(2.*m_H)) #kg #The reference refractivity must come from the mole fraction, which can be found # from the mass fraction. f_He = 1./(2./Y - 1.) #The refractivities come from the NPL (Kaye & Laby) at STP, which means 1.01325 bar, # visible light, 273.15 K. Have to assume little change in refractivity solely due to # temperature. This value will also have to be corrected for the fact that the STP # number density is not necessarily the ref number density in this atmosphere. nu_ref = (1.-f_He)*1.32e-4 +f_He*3.5e-5 return mu, nu_ref if atm_type == 'N2': #Find mu assuming only N2 mu = 28.*m_H #The refractivities come from the NPL (Kaye & Laby) at STP, which means 1.01325 bar, # visible light, 273.15 K. Have to assume little change in refractivity solely due to # temperature. This value will also have to be corrected for the fact that the STP # number density is not necessarily the ref number density in this atmosphere. nu_ref = 2.98e-4 return mu, nu_ref if atm_type == 'CO2': #Find mu assuming only CO2 mu = 44.*m_H #The refractivities come from the NPL (Kaye & Laby) at STP, which means 1.01325 bar, # visible light, 273.15 K. Have to assume little change in refractivity solely due to # temperature. This value will also have to be corrected for the fact that the STP # number density is not necessarily the ref number density in this atmosphere. nu_ref = 4.49e-4 return mu, nu_ref if atm_type == 'H2O': #Find mu assuming only H2O mu = 18.*m_H #The refractivities come from the NPL (Kaye & Laby) at STP, which means 1.01325 bar, # visible light, 273.15 K. Have to assume little change in refractivity solely due to # temperature. This value will also have to be corrected for the fact that the STP # number density is not necessarily the ref number density in this atmosphere. nu_ref = 2.56e-4 return mu, nu_ref print 'atm_type not specified correctly' return nan #The next set of function each retrieve an individual property of the atmosphere. #----------------------------------------------------------------------------------------- def get_temperature(z, lat, T_atm): #Isothermal atmosphere return T_atm def get_number_density(z, lat, nd_ref, z_ref, H): #Hydrostatic equilibrium and an isothermal atmosphere is assumed to recover the # familiar exponential density profile. return nd_ref*exp(-(z-z_ref)/H) def get_pressure(z, lat): #From the temperature and number density, use the Ideal Gas Law to find the pressure. return get_number_density(z=z,lat=lat, nd_ref=nd_ref, z_ref=z_ref, H=H)*k_B*\ get_temperature(z=z,lat=lat) def get_refractivity(z, lat, z_top, nu_ref, z_ref, H): #The refractivity follows the number density (or pressure) profiles as an exponential # profile within the atmosphere and free space outside of the atmosphere if z > z_top: return 0. return nu_ref*exp(-(z-z_ref)/H) def get_drefractivity_dz(z, lat, z_top, nu_ref, z_ref, H): #Take the z-derivative of the refractivity profile within the atmosphere, 0 elsewhere if z > z_top: return 0. return -(nu_ref/H)*exp(-(z-z_ref)/H) def get_drefractivity_dlat(z, lat): #Assume no latitudinal variation - the refractivity variation is entirely radial. return 0. def get_ray_curvature(z, lat, beta, dndz, dndlat, z_top, nu_ref, z_ref, H): #Calculate the ray curvature using Eq. 2b of <NAME> (2008) within the atmoshpere # and assume it is zero outside of the atmosphere (straight-line rays). if z > z_top: return 0. ior = 1. + get_refractivity(z=z, lat=lat, z_top=z_top, nu_ref=nu_ref, z_ref=z_ref, \ H=H) return (1./ior)*(cos(beta)*dndz - (sin(beta)/z)*dndlat) #-----------------------------------------------------------------------------------------
pdalba/retro
planet_mass_radius.py
<filename>planet_mass_radius.py<gh_stars>1-10 """ Name ---- planet_mass_radius.py Description ----------- RETrO: Refraction in Exoplanet Transit Observations This script returns the radius (z_ref) of a planet give its mass. The default is to rely only the Chen & Kipping (2017) mass-radius relation, but the user can input any relation they see fit. Chen & Kipping (2017): http://adsabs.harvard.edu/abs/2017ApJ...834...17C Input ----- Planet mass in kg Output ------ Planet radius in m Author ------ <NAME> --- Boston University pdalba -at- bu -dot- edu """ #----------------------------------------------------------------------------------------- #Import various math, science, and plotting packages. import numpy from numpy import * #Note, numpy functions will not be explicitly called out #----------------------------------------------------------------------------------------- #Define fundamental constants - All physical units in this script will be MKS. #----------------------------------------------------------------------------------------- k_B = 1.38065e-23 #mks m_H = 1.67353e-27 #kg G = 6.67408e-11 #mks AU = 1.49598e11 #m R_earth = 6.371e6 #m M_earth = 5.9723e24 #kg R_sun = 6.957e8 #m #----------------------------------------------------------------------------------------- #Broken power law of R vs. M relation from Chen & Kipping (2017) C = array([1.008,0.,0.]) S = array([0.279, 0.589, -0.044]) mass_trans = array([2.04,0.414*317.83,0.08*333e3]) #Earth masses #Must determine the transition radii for the terran-neptunian planets r_trans = zeros_like(mass_trans) r_trans[0] = C[0]*mass_trans[0]**S[0] #Solve for C for the neptunian power law relation using this transition point and then get # the next transition point C[1] = r_trans[0]/(mass_trans[0]**S[1]) r_trans[1] = C[1]*mass_trans[1]**S[1] #Solve for C for the jovian power law relation using this transition point and then get # the next transition point C[2] = r_trans[1]/(mass_trans[1]**S[2]) r_trans[2] = C[2]*mass_trans[2]**S[2] #Determine the radius using the correct power law. z_ref in units of meters def get_zref(M_p): M = M_p/M_earth if M < mass_trans[0]: return R_earth*C[0]*M**S[0] if mass_trans[0] < M < mass_trans[1]: return R_earth*C[1]*M**S[1] if mass_trans[1] < M < mass_trans[2]: return R_earth*C[2]*M**S[2] print 'Mass outside of deterministic relation. No z_ref specified.' return nan
pdalba/retro
shoulder_visualization.py
<reponame>pdalba/retro """ Name ---- shoulder_visualization.py Description ----------- RETrO: Refraction in Exoplanet Transit Observations This script creates several plots to visualize the output of the shoulder.py. It can work on a single run or multiple runs (in a loop, where the figures close after each). Input ----- Requires the path to the particular parameter space run and the ID(s) of the runs to be visualized. Output ------ Various data files and pickle files describing the refraction effect for this particular system. Author ------ <NAME> --- Boston University pdalba -at- bu -dot- edu """ #----------------------------------------------------------------------------------------- #Import various math, science, and plotting packages. #Import various math, science, and plotting packages. import numpy from numpy import * #Note, numpy functions will not be explicitly called out import scipy from scipy.interpolate import interp1d from scipy.integrate import quad from scipy import optimize from scipy.optimize import minimize_scalar, fsolve import os import datetime import pickle import matplotlib #Plotting only occurs if plotting is turned 'on' import matplotlib.pyplot as plt from matplotlib.pyplot import * from matplotlib import colors, cm plt.ion() close('all') #----------------------------------------------------------------------------------------- #Path info #----------------------------------------------------------------------------------------- #Provide the path to the directory with the pickle files param_search_path = './' #Also provide a list of IDs to run. ID_list = [1] #----------------------------------------------------------------------------------------- #Begin visualization loop #----------------------------------------------------------------------------------------- print '' print 'Beginning loop of '+str(size(ID_list))+' cases.' for i in range(size(ID_list)): close('all') #Load in all pickled data from this run with open(param_search_path+'/ray_ID'+str(ID_list[i])+'.pickle','rb') as ray_pickle: ray, x_initial, y_initial, z_initial, beta_initial, lat_initial,\ bending_angle,z_min,x_exit,y_exit,beta_x0,tau_final = \ pickle.load(ray_pickle) ray_pickle.close() with open(param_search_path+'/crescent_ID'+str(ID_list[i])+'.pickle','rb') as \ crescent_pickle: theta,a,X,signal,all_x_inner,all_x_outer,all_y_inner,all_y_outer,all_chi = \ pickle.load(crescent_pickle) crescent_pickle.close() with open(param_search_path+'/raypath_ID'+str(ID_list[i])+'.pickle','rb') as \ raypath_pickle: all_z,all_lat,all_beta,all_xi,all_tau = pickle.load(raypath_pickle) raypath_pickle.close() #Also read the param list file with open(param_search_path+'/param_list_ID'+str(ID_list[i])+'.txt','r') as param_file: while 1: #Skip any header lines line = param_file.readline() if '#' in line: continue line_split = line.split('=') if 'z_top' in line: z_top = float(line_split[-1]) if 'z_ref' in line: z_ref = float(line_split[-1]) if 'R_star' in line: R_star = float(line_split[-1]) if not line: break param_file.close() #Calculate the star loc at the moment of max effect (theta0) star_loc = array([-sqrt(a[0]**2 - X[0]**2),-X[0]]) #Make a plot showing the ray paths in the planetary atmosphere fig1=figure(1) circle = linspace(0,2.*pi,1000) #radians ax1=fig1.add_subplot(111,aspect='equal') ax1.plot(z_top*sin(circle),z_top*cos(circle),c='k',lw=2) ax1.plot(z_ref*sin(circle),z_ref*cos(circle),c='k',ls='--') ax1.axhline(0.,c='k',ls='--') ax1.axvline(0.,c='k',ls='--') xticks(fontsize='large') yticks(fontsize='large') for j in range(size(all_z)): ax1.plot(cos(all_lat[j])*all_z[j],sin(all_lat[j])*all_z[j]) ax1.minorticks_on() ax1.set_xlabel('Distance from Origin [m]',fontsize='x-large') ax1.set_ylabel('Distance from Origin [m]',fontsize='x-large') ax1.set_title('ID '+str(ID_list[i]),fontsize='x-large') #fig1.savefig('raypaths.png') #Plot the crescents from the observers perspective fig2 = figure(2) ax2=fig2.add_subplot(111,aspect='equal') ax2.plot(z_top*sin(circle),z_top*cos(circle),c='k',ls='--') ax2.plot(z_ref*sin(circle),z_ref*cos(circle),c='k') ax2.plot(R_star*sin(circle)+star_loc[1],R_star*cos(circle),c='r',lw=2) xticks(fontsize='large') yticks(fontsize='large') ax2.minorticks_on() ax2.set_xlabel('Distance from Origin [m]',fontsize='x-large') ax2.set_ylabel('Distance from Origin [m]',fontsize='x-large') for k in range(shape(all_x_inner)[0]): x_crescent = append(append(all_x_inner[k,:],all_x_outer[k,:][::-1]),\ append(all_x_outer[k,:],all_x_inner[k,:][::-1])) y_crescent = append(append(all_y_inner[k,:],all_y_outer[k,:][::-1]),\ append(-all_y_outer[k,:],-all_y_inner[k,:][::-1])) ax2.plot(x_crescent+(X[k]-X[0]),y_crescent) ax2.set_title('ID '+str(ID_list[i]),fontsize='x-large') #fig2.savefig('crescents.png') #Make a plot of the shoulder vs. X separation fig3 = figure(3) ax3 = fig3.add_subplot(111) ax3.plot(X/X[0],signal*1e6,c='k',lw=2) xticks(fontsize='large') yticks(fontsize='large') ax3.minorticks_on() ax3.set_xlabel('Normalized X Separation',fontsize='x-large') ax3.set_ylabel('Relative Flux Increase [ppm]',fontsize='x-large') ax3.set_xlim(0.9,) ax3.set_title('ID '+str(ID_list[i]),fontsize='x-large') #fig3.savefig('signal_vs_X.png') #Make a plot of the shoulder vs. phase fig4 = figure(4) ax4 = fig4.add_subplot(111) ax4.plot(theta,signal*1e6,c='k',lw=2) xticks(fontsize='large') yticks(fontsize='large') ax4.minorticks_on() ax4.set_xlabel('Orbital Phase',fontsize='x-large') ax4.set_ylabel('Relative Flux Increase [ppm]',fontsize='x-large') #ax4.set_xlim(0.9,) ax4.set_title('ID '+str(ID_list[i]),fontsize='x-large') #fig4.savefig('signal_vs_phase.png') print '' stop = raw_input('Showing ID '+str(ID_list[i])+'. Press enter to continue.') #-----------------------------------------------------------------------------------------
pdalba/retro
shoulder.py
""" Name ---- shoulder.py Description ----------- RETrO: Refraction in Exoplanet Transit Observations This script models the secondary stellar image in an exoplanetary atmosphere due to refraction as a function of the angle the planet sweeps out in orbit from mid-transit. This therefore simulates the full structure of the shoulder feature. It only does this for one side (ingress or egress). Input ----- Requires input file to specify the stellar-orbital-planetary parameters. Output ------ Various data files and pickle files describing the refraction effect for this particular system. Author ------ <NAME> --- Boston University pdalba -at- bu -dot- edu """ #----------------------------------------------------------------------------------------- #Import various math, science, and plotting packages. import numpy from numpy import * #Note, numpy functions will not be explicitly called out import scipy from scipy.interpolate import interp1d from scipy.integrate import quad from scipy import optimize from scipy.optimize import minimize_scalar, fsolve import os import datetime import pickle import matplotlib #Plotting only occurs if plotting is turned 'on' import matplotlib.pyplot from matplotlib.pyplot import * from matplotlib import colors, cm #Import other RETrO modules import planet_mass_radius import planet_atmosphere from planet_atmosphere import * import retro_rk4 #----------------------------------------------------------------------------------------- #----------------------------------------------------------------------------------------- #Set path information path = './' #----------------------------------------------------------------------------------------- #Define fundamental constants - All physical units in this script will be MKS. #----------------------------------------------------------------------------------------- k_B = 1.38065e-23 #mks m_H = 1.67353e-27 #kg G = 6.67408e-11 #mks AU = 1.49598e11 #m R_earth = 6.371e6 #m M_earth = 5.9723e24 #kg R_sun = 6.957e8 #m #----------------------------------------------------------------------------------------- #Read the inputs that define this atmosphere #----------------------------------------------------------------------------------------- with open(path+'input.txt','r') as input_file: while 1: #Skip any header lines line = input_file.readline() if '#' in line: continue split_line = line.split('=') if 'ID' in line: ID = int(split_line[-1]) if 'T_ATM' in line: T_atm = float(split_line[-1]) if 'SEMI' in line: semimajor = float(split_line[-1]) if 'M_P' in line: M_p = float(split_line[-1]) if 'R_STAR' in line: R_star = float(split_line[-1]) if 'ATM' in line: atm_type = split_line[-1] if not line: break input_file.close() #----------------------------------------------------------------------------------------- #Calculate other necessary aspects of the atmosphere based on the input parameters. #----------------------------------------------------------------------------------------- #Convert units of input parameters semimajor *= AU M_p *= M_earth R_star *= R_sun #Get the planet radius in m z_ref = planet_mass_radius.get_zref(M_p) #Calculate the gravitational acceleration g = G*M_p/z_ref**2 #m/s^2 #Get mu and nu_ref from atmosphere module mu, nu_ref = get_mu_nuref(atm_type) #Atmospheric pressure scale height. H = k_B*T_atm/(mu*g) #m #Atmospheric 1 bar (1e5 Pa) reference number density nd_ref = 1e5/(k_B*T_atm) #m^-3 #STP number density (from old STP when the refractivities were measured) nd_STP = 101325./(k_B*273.15) #Since both the number density and nu profiles share the same exponential factor, the # ratio of the densities will equal the ratio of the refractivities. nu_ref *= nd_ref/nd_STP #----------------------------------------------------------------------------------------- #Other parameters required to run the model #----------------------------------------------------------------------------------------- #Other orbital parameters, either held constant or arbitrary ecc = 0. theta_peri = 0.*(pi/180.) #longitude of periastron, rad n_theta0 = 4. #Number of theta0 angles to cover n_orbit_steps = 50 #Number of orbital steps to take #Other atmosphere parameters, either held constant or arbitrary. abs_cross_sec = 0. #m^2, ignore absorption #Ray tracing z_top = z_ref+20.*H #altitude where ray path integration begins ds = 0.1*H #step size for ray path integration n_rotations = 100 #number of times the ray tracing plane is rotated (half crescent) #These parameters controlled the initial ray separation and how it changes. Each ray is # some delta-y below the one above it, and if a ray experiences critical refraction, the # delta-y is reduced by some fraction; this is one iteration. This will assure that low # altitudes are more finely sampled. The delta-y listed here is the starting value. It # will be altered. This will continue until the rays have reached bending angles great # enough for all the interpolation to occur. delta_y_init = H/4. delta_y_cut = 4. #Turn plotting on or off here. plotting = 'off' #For this parameter space search, always assume spherical symmetry. Much of this code # relies upon this assumption. sphere_sym = 'yes' #----------------------------------------------------------------------------------------- #Make a useful save file that has all of the parameters for this run #----------------------------------------------------------------------------------------- save_file1 = open(path+'param_list_ID'+str(ID)+'.txt','w') while 1: save_file1.write('#This file created at '+str(datetime.datetime.now())+\ '. All units are SI.'+os.linesep) save_file1.write('ID\t\t='+str(ID)+os.linesep) save_file1.write('T_atm\t\t='+str(T_atm)+os.linesep) save_file1.write('semimajor\t='+str(semimajor)+os.linesep) save_file1.write('M_p\t\t='+str(M_p)+os.linesep) save_file1.write('R_star\t\t='+str(R_star)+os.linesep) save_file1.write('atm_type\t='+atm_type+os.linesep) save_file1.write('z_ref\t\t='+str(z_ref)+os.linesep) save_file1.write('g\t\t='+str(g)+os.linesep) save_file1.write('mu\t\t='+str(mu)+os.linesep) save_file1.write('nu_ref\t\t='+str(nu_ref)+os.linesep) save_file1.write('H\t\t='+str(H)+os.linesep) save_file1.write('nd_ref\t\t='+str(nd_ref)+os.linesep) save_file1.write('ecc\t\t='+str(ecc)+os.linesep) save_file1.write('theta_peri\t='+str(theta_peri)+os.linesep) save_file1.write('n_theta0\t='+str(n_theta0)+os.linesep) save_file1.write('n_orbit_steps\t='+str(n_orbit_steps)+os.linesep) save_file1.write('abs_cross_sec\t='+str(abs_cross_sec)+os.linesep) save_file1.write('z_top\t\t='+str(z_top)+os.linesep) save_file1.write('ds\t\t='+str(ds)+os.linesep) save_file1.write('n_rotations\t='+str(n_rotations)+os.linesep) save_file1.write('delta_y_init\t='+str(delta_y_init)+os.linesep) save_file1.write('delta_y_cut\t='+str(delta_y_cut)+os.linesep) save_file1.write('plotting\t='+plotting+os.linesep) save_file1.write('sphere_sym\t='+sphere_sym+os.linesep) break save_file1.close() #----------------------------------------------------------------------------------------- #Define the function to be used when determining the rays the "bracket" the star (deter- # mine the rays that just graze the near and far edges of the star). # Inputs are dx: the x-axis distance between the ray exit point and the # origin point on the stellar surface; beta_x0: impact angle with y-plane at D. #----------------------------------------------------------------------------------------- def minimize_edges(dx, beta_x0, x_exit, y_exit): dy = dx/tan(beta_x0) y_final, x_final = y_exit - dy, x_exit - dx #Now find the distance from that point and the center of the star return sqrt((x_final-star_loc[0])**2 + (y_final-star_loc[1])**2) #----------------------------------------------------------------------------------------- #Orbit determination #----------------------------------------------------------------------------------------- #The orbit is measured relative to theta, which is zero at mid-transit. First determine # the theta where the max effect occurs, so the projected separation of the star-planet # centers (X) is z_top+R_star. This is an implicit equation that will require a solver. theta_start = lambda thet: thet - arcsin((z_top+R_star)*(1.+ecc*cos(thet-theta_peri))/\ (semimajor*(1.-ecc**2))) theta0 = fsolve(theta_start,(z_top+R_star)/semimajor)[0] #With this initial theta, define a theta array that is n-theta0 from mid-transit, and the # number of steps chosen above theta = linspace(theta0,n_theta0*theta0,n_orbit_steps) #Calculate the instantaneous orbital distances at these theta values (physical units) a = semimajor*(1.-ecc**2)/(1.+ecc*cos(theta-theta_peri)) #Calculate the projected center separations (in physical units) X = a*sin(theta) #Array to store the flux signals signal = zeros_like(X) #Create a few storage arrays that will store crescent-related data for all orbit steps all_x_inner, all_x_outer, all_y_inner, all_y_outer, all_chi = zeros([n_orbit_steps,\ n_rotations]),zeros([n_orbit_steps,n_rotations]),zeros([n_orbit_steps,n_rotations]),\ zeros([n_orbit_steps,n_rotations]),zeros([n_orbit_steps,n_rotations]) #----------------------------------------------------------------------------------------- #Start the loop over theta (orbital steps). #----------------------------------------------------------------------------------------- # First define where the star is wrt the planet, which will always be at the origin for l in range(n_orbit_steps): #Instantaneous x-y position of the star assuming origin at the center of the planet. star_loc = array([-sqrt(a[l]**2 - X[l]**2),-X[l]]) #Set the the value of delta_y delta_y = delta_y_init #Set up the rotation of the ray tracing plane and begin a loop over the angle chi. # Also set up arrays to store x,y positions of the crescent. If the atmosphere is # spherically symmetric, rays only need to be traced once, since each rotation will # shrink the effective radius of the host star. #------------------------------------------------------------------------------------- chi = linspace(0,(1.-1e-10)*arcsin(R_star/X[l]),n_rotations) y_inner = zeros([n_rotations]) #Y position of edge of crescent closest to planet y_outer = zeros([n_rotations]) #Y position of edge of crescent farthest from planet x_inner = zeros([n_rotations]) #X position of edge of crescent closest to planet x_outer = zeros([n_rotations]) #X position of edge of crescent closest to planet r_star_t = zeros([n_rotations]) #Distance of tangent line dropped from star center R_star_eff = zeros([n_rotations]) #Eff. radius of star after rotation for k in range(n_rotations): #First, using chi, determine the stellar tangent radius r_star_t[k] = sin(chi[k])*X[l] #Now the effective stellar radius R_star_eff[k] = sqrt(R_star**2 - r_star_t[k]**2) #--------------------------------------------------------------------------------- #--------------------------------------------------------------------------------- #Now, initialize the ray arrays. These will be amended as more rays are traced. # This initialization process only needs to happen once if spherical symmetry is # assumed. The first ray will always start at z_top-delta_y if (sphere_sym =='yes') & (k==0) & (l==0): #Ray IDs and array for final bending angle, minimum approach distance, exit # angles. Also start the ray counter and set the critical refraction variable # to 0 (False) bending_angle = array([0.]) #Start this at 0. to initiate the while loop z_min = array([]) x_exit = array([]) y_exit = array([]) beta_x0 = array([]) tau_final = array([]) ray = array([0]) crit_ref = 0 #Also create a few storage lists that will save all of the low level ray # tracing details all_z, all_lat, all_beta, all_xi, all_tau = [],[],[],[],[] #----------------------------------------------------------------------------- #Begin the loop that will trace all rays. #----------------------------------------------------------------------------- #This is a while loop that forces ray tracing until the final accepted ray has # bent enough to allow all interpolations to occur. This critical bending is # approximated using the number of orbital steps, the size of the first step, # and the orbital parameter (e.g., simple bending angle is ~R_star/semi). bending_angle_thresh = (n_theta0+1.)*(2.*z_top+R_star)/semimajor n_iter = 0 while abs(bending_angle[-1]) < bending_angle_thresh: #Redefine the bending angle array to remove the starting condition if n_iter == 0: bending_angle = array([]) #Use a while loop to trace rays so long as critical refraction (or extreme # bending beyond 90 deg) is not met. while crit_ref == 0: #Very first ray, start at the z_top minus delta_y if (ray[-1]==0) & (n_iter==0): y_initial = array([z_top - delta_y]) x_initial = sqrt(z_top**2 - y_initial**2) z_initial = ones_like(y_initial)*z_top lat_initial = arctan(y_initial/x_initial) beta_initial = lat_initial - pi/2. xi_initial = zeros_like(beta_initial) tau_initial = zeros_like(beta_initial) #All other rays, based the y_initial off the previous value, and then # find the other initial values normally. else: y_initial = append(y_initial,y_initial[-1]-delta_y) x_initial = append(x_initial,sqrt(z_top**2 - y_initial[-1]**2)) z_initial = append(z_initial,z_top) lat_initial = append(lat_initial,arctan(y_initial[-1]/\ x_initial[-1])) beta_initial = append(beta_initial, lat_initial[-1]-pi/2.) xi_initial = append(xi_initial,0.) tau_initial = append(tau_initial,0.) #Initialize the arrays for the integrated quantities, the constants, # and the refractive invariant (which is just the impact param, or # starting y-values). z, lat, beta, xi, tau = array([z_initial[-1]]), \ array([lat_initial[-1]]),array([beta_initial[-1]]), \ array([xi_initial[-1]]), array([tau_initial[-1]]) constant = array([abs_cross_sec,nd_ref,z_ref,z_top,nu_ref,H]) #Now begin the ray stepping and continue it until some termination # condition is met. while z[-1] <= z_top: #Create the current array current=array([z[-1],lat[-1],beta[-1],xi[-1],tau[-1],ds,ray[-1]]) #Call the RK4 function next = retro_rk4.rk4_ray_trace(current,constant) #Update the integrated quantity arrays z = append(z,next[0]) lat = append(lat,next[1]) beta = append(beta,next[2]) xi = append(xi,next[3]) tau = append(tau,next[4]) #Monitor that the ray has not experienced critical refraction. If # it has, break out of the ray stepping while loop. This condition # assumes spherical symmetry. Also have the check to make sure the # overall termination condition for the ray stepping while loop # has not been met, otherwise the ray curvature will be zero since # the ray is outside of the atmosphere. if z[-1] <= z_top: if abs(1./get_ray_curvature(z=z[-1],lat=lat[-1],beta=beta[-1],\ dndz=get_drefractivity_dz(z=z[-1],lat=lat[-1],z_top=z_top,\ nu_ref=nu_ref,z_ref=z_ref,H=H), dndlat = \ get_drefractivity_dlat(z=z[-1],lat=lat[-1]),\ z_top=z_top, nu_ref=nu_ref, z_ref=z_ref, \ H=H)) < z[-1]: crit_ref = 1 break #Check to see if the bending has extended beyond 90 deg. In some # atmospheres, rays can not critically refract but bend >90 deg. # Such a ray will never reach the star and will not be considered. # I will assume this is essentially critical refraction, and # terminate the ray in the same fashion as above. if abs(xi[-1]) > pi/2.: crit_ref = 1 break #Only continue analyzing this ray if it did not experience critical # refraction. if crit_ref == 0: #Update the array keeping track of the bending angle, z_min, etc bending_angle = append(bending_angle,xi[-1]) tau_final = append(tau_final,tau[-1]) z_min = append(z_min,min(z)) x_exit = append(x_exit,z[-1]*cos(lat[-1])) y_exit = append(y_exit,z[-1]*sin(lat[-1])) #Also update the storage arrays that are holding all the low-level # ray path integration info. For now, only keep a subset of all # the data, otherwise the data storage could become unreasonable. # The amount to keep should correlate to a step size of around a # scale height. all_z.append(z[::int(ceil(H/ds))]) all_lat.append(lat[::int(ceil(H/ds))]) all_beta.append(beta[::int(ceil(H/ds))]) all_xi.append(xi[::int(ceil(H/ds))]) all_tau.append(tau[::int(ceil(H/ds))]) #Now for the projected geometry. It projects the ray back to the # x=0 axis to find an alternate interior angle that is then the # impact angle at the plane behind the Sun. beta_x0 = append(beta_x0,pi + beta[-1] - lat[-1]) #The following block is used for plotting the path of the rays. # The if statement will initialize all the plots for the first ray if (ray[-1]==0) & (n_iter==0) & (plotting=='on'): #This plot for the path of the rays fig1=figure(1) circle = linspace(0,2.*pi,1000) #radians ax1=fig1.add_subplot(111,aspect='equal') ax1.plot(z_top*sin(circle),z_top*cos(circle),c='k',lw=2) ax1.plot(z_ref*sin(circle),z_ref*cos(circle),c='k',ls='--') ax1.plot(R_star_eff[k]*sin(circle)+star_loc[0],R_star_eff[k]*\ cos(circle)+star_loc[1],c='r',lw=2) ax1.axhline(0.,c='k',ls='--') ax1.axvline(0.,c='k',ls='--') xticks(fontsize='large') yticks(fontsize='large') #Update the plots for each ray if plotting=='on': ax1.plot(cos(lat)*z,sin(lat)*z) print 'Completed ray '+str(ray[-1])+', with xi= '+str(xi[-1]) #Update the ray counter ray = append(ray,ray[-1]+1) #If critical refraction was met, the while loop that keeps ray tracing # going will break and the code will come here. # Alter the delta_y to be some fraction of what it was previously delta_y /= delta_y_cut #In some cases the delta_y becomes so small that it does not numerically # distinguish the next y_initial value from the previous one. If this # happens, the ray tracing must end. Any crescent resulting from rays at # these altitudes will have widths of ~zero. if (y_initial[-2]-delta_y) == y_initial[-2]: #Remove the most recent entries in the ray arrays. The most recent led # to critical refraction. The one before that represents the lowest # possible y_initial, and the maximally bent ray. Keep that one y_initial = delete(y_initial,-1) x_initial = delete(x_initial,-1) z_initial = delete(z_initial,-1) lat_initial = delete(lat_initial,-1) beta_initial = delete(beta_initial,-1) xi_initial = delete(xi_initial,-1) tau_initial = delete(tau_initial,-1) ray = delete(ray,-1) #At this point, the ray tracing portion of the code needs to end. A # break here will terminate the ray tracing while loop. print '' print 'Ending ray tracing early due to small delta_y limit issue' print '' break #Otherwise, remove the two most recent entries in the ray arrays. The most # recent led to critical refraction, and the one before that usually bends # far from the others. This prevents large gaps in sampling at the star # plane. for cut in range(2): y_initial = delete(y_initial,-1) x_initial = delete(x_initial,-1) z_initial = delete(z_initial,-1) lat_initial = delete(lat_initial,-1) beta_initial = delete(beta_initial,-1) xi_initial = delete(xi_initial,-1) tau_initial = delete(tau_initial,-1) #If this is not the final iteration, then also remove the last # successful ray to make the sampling more even if (cut==0) & (abs(bending_angle[-2])<bending_angle_thresh): bending_angle = delete(bending_angle,-1) z_min = delete(z_min,-1) x_exit = delete(x_exit,-1) y_exit = delete(y_exit,-1) beta_x0 = delete(beta_x0,-1) ray = delete(ray,-1) del all_z[-1] del all_lat[-1] del all_beta[-1] del all_xi[-1] del all_tau[-1] #If this is the final iteration, we can keep the last ray, and break # out of this for loop. The overall while loop tracing the rays will # end because its condition will be met. In this case, the small # delta y issue did not occur if abs(bending_angle[-2])>bending_angle_thresh: ray = delete(ray,-1) break #Reset the critical refraction variable crit_ref = 0 #print a message to mark the end of this iteration print '' print 'Delta_y iteration '+str(n_iter)+' has completed.' print '' #Step up the iteration count n_iter += 1 #At this point, all rays are traced given the desired number of iterations. n_rays = size(y_initial) #--------------------------------------------------------------------------------- #House-keeping commands for the plot #--------------------------------------------------------------------------------- if plotting=='on': ax1.minorticks_on() ax1.set_xlabel('Distance from Origin [m]',fontsize='x-large') ax1.set_ylabel('Distance from Origin [m]',fontsize='x-large') #--------------------------------------------------------------------------------- #Determine the "edge" of the crescent. #--------------------------------------------------------------------------------- #Determine the rays that are tangential to the star. These determine the # boundaries of the secondary image of the star out-of-transit. This will only # run if it is called. stellar_min_z = zeros([n_rays]) #Distance b/t closest approach and star center x_final = zeros([n_rays]) y_final = zeros([n_rays]) #The x-range to be tested here are +/- a stellar radius. The point is to find # the closest approach distance of the ray to the center of the star. For the # two rays where this distance is R_star_eff, they bound the system. for i in range(n_rays): #Yplane is the plane of the star yplane_lims = array([star_loc[0]-R_star_eff[k],star_loc[0]+R_star_eff[k]]) #dx_lims should be positive and in correct order for limits dx_lims = abs((yplane_lims - x_exit[i])[::-1]) #Now run the optimize function min_res_edge = minimize_scalar(minimize_edges, bounds=(dx_lims[0],\ dx_lims[1]),method='bounded', args=(beta_x0[i],x_exit[i],y_exit[i])) #Make sure the optimization worked if min_res_edge.message != 'Solution found.': print '' stop = input('Minimization unsuccessful - please intervene') else: x_final[i] = x_exit[i] - min_res_edge.x y_final[i] = y_exit[i] - min_res_edge.x/tan(beta_x0[i]) stellar_min_z[i] = sqrt((star_loc[0]-x_final[i])**2 + (star_loc[1]-\ y_final[i])**2) #Include the minimum points on the plot of the rays. This is a busy plot, but it # show where all the rays end up at the plane of the host star. Keep this off # unless diagnosing ray tracing issues. #if plotting=='on': # ax1.scatter(x_final,y_final,facecolor='none',edgecolor='r',lw=2,s=30) #With the y_final values of all the rays, there needs to be a check that the star # central yplane is fully sampled. This may very well be the case if the ray # tracing ended due to the small delta_y problem. But that may only affect distant # orbit steps and not the close in ones. If this is the very first orbital step, # assume that the interpolation will work if l==0: fully_sampled = 'yes' # Since the bending_angle_thresh was just an approximate threshold value of the # padding, this is a double check that the interpolation below won't break. # If the problem occurs on this step, then the crescent is assumed (justifiably # so) to have a width of zero. Do not use effective stellar radius, if the central # width is zero, all the rotations' widths should be zero. if (y_final[-1] > (star_loc[1]-R_star)) | (fully_sampled == 'no'): #Skip the interpolation and manually set the y_inner and outer values y_initial_outer = y_initial[-1] y_initial_inner = y_initial[-1] if k==0: print 'Forcing the next crescent width to 0 due to the small '+\ 'delta_y limit' #If this is not already set, set it now to make sure the other edges of the # crescent are not calculated. After this is set, no other rotations or # orbital steps will switch it back, which is as expected. fully_sampled = 'no' #Stellar min z is a distance, so it is positive. However, now give it a sign # whether or not the final ray point is above or below the y=star_loc[1] # plane. In other words, this stellar min z is positive if the ray finished # 'above' the equator of the star, and negative if it finished below. This way # we can find the two bounding points of the star as +/- R_star_eff. Above the # equator is the outer ray, below is the inner ray. Only run this interp funciton # if the problem described just above did not occur. else: y_minz_interp = interp1d(stellar_min_z*sign(y_final-star_loc[1]),y_initial) y_initial_outer = y_minz_interp(R_star_eff[k]) y_initial_inner = y_minz_interp(-R_star_eff[k]) #Final x,y calculation #Based on the y inner and outer values found in the rotated plane, we can # transform back to the unrotated plane and plot the points y_outer[k] = y_initial_outer*sin(chi[k]) y_inner[k] = y_initial_inner*sin(chi[k]) x_outer[k] = y_initial_outer*cos(chi[k]) x_inner[k] = y_initial_inner*cos(chi[k]) #Plotting #Create a new plot that gives the observers perspective if (k == 0) & (l==0) & (plotting=='on'): fig2 = figure(2) circle = linspace(0,2.*pi,1000) #radians ax2=fig2.add_subplot(111,aspect='equal') ax2.plot(z_top*sin(circle),z_top*cos(circle),c='k',ls='--') ax2.plot(z_ref*sin(circle),z_ref*cos(circle),c='k') ax2.plot(R_star_eff[k]*sin(circle)+star_loc[1],R_star_eff[k]*cos(circle),\ c='r',lw=2) xticks(fontsize='large') yticks(fontsize='large') ax2.minorticks_on() ax2.set_xlabel('Distance from Origin [m]',fontsize='x-large') ax2.set_ylabel('Distance from Origin [m]',fontsize='x-large') #This marks the end of the kth ray-tracing plane rotation. #--------------------------------------------------------------------------------- #Put the crescent data into its storage array #------------------------------------------------------------------------------------- all_x_inner[l,:], all_x_outer[l,:] = x_inner, x_outer all_y_inner[l,:], all_y_outer[l,:] = y_inner, y_outer all_chi[l,:] = chi #------------------------------------------------------------------------------------- #Plot the crescent by simply combining the edge points in a closed curve order #------------------------------------------------------------------------------------- x_crescent = append(append(x_inner,x_outer[::-1]),append(x_outer,x_inner[::-1])) y_crescent = append(append(y_inner,y_outer[::-1]),append(-y_outer,-y_inner[::-1])) #Add them to the plot, with the appropriate project separation if plotting=='on': ax2.plot(x_crescent+(X[l]-X[0]),y_crescent) #------------------------------------------------------------------------------------- #Determine the area of the crescent by integrating the equation for circle sector area # Do the integral for inner and outer, then subtract them to account for the switch in # integral limits. This integral will require the radius of the x, y points as a func- # tion of chi (rotation angle). Use interpolation to find this. Obviously, more rays # will yield a more precise area. #------------------------------------------------------------------------------------- #First, find the radii of the inner and outer points radii_inner = sqrt(x_inner**2 + y_inner**2) radii_outer = sqrt(x_outer**2 + y_outer**2) #Interpolate these as functions of chi radii_inner_interp = interp1d(chi,radii_inner) radii_outer_interp = interp1d(chi,radii_outer) #Now compute the integral of a circle section using quad from scipy using throw-away # lambda functions circle_sec_inner = lambda alpha: 0.5*radii_inner_interp(alpha)**2 circle_sec_outer = lambda alpha: 0.5*radii_outer_interp(alpha)**2 #Final run the integration, subtract, and double (symmetry about x-axis) to get area outer_area = quad(circle_sec_outer,0.,max(chi)) inner_area = quad(circle_sec_inner,0.,max(chi)) crescent_area = 2.*(outer_area[0] - inner_area[0]) signal[l] = crescent_area/(pi*R_star**2) print 'Ray tracing signal = '+str(signal[l]*1e6)+' ppm' print 'End of orbital step '+str(l+1) print '' #------------------------------------------------------------------------------------- #Make a final plot of the shoulder #----------------------------------------------------------------------------------------- if plotting=='on': fig3 = figure(3) ax3 = fig3.add_subplot(111) ax3.plot(X/X[0],signal*1e6,c='k',lw=2) xticks(fontsize='large') yticks(fontsize='large') ax3.minorticks_on() ax3.set_xlabel('Normalized X Separation',fontsize='x-large') ax3.set_ylabel('Relative Flux Increase [ppm]',fontsize='x-large') ax3.set_xlim(0.9,) #----------------------------------------------------------------------------------------- #Create save files in the form of pickles to save the information for this run #----------------------------------------------------------------------------------------- #Save quantities related to the ray tracing. with open(path+'ray_ID'+str(ID)+'.pickle','wb') as ray_pickle: pickle.dump((ray, x_initial, y_initial, z_initial, beta_initial, lat_initial,\ bending_angle,z_min,x_exit,y_exit,beta_x0,tau_final),ray_pickle) ray_pickle.close() #Save quantities related to the crescents with open(path+'crescent_ID'+str(ID)+'.pickle','wb') as crescent_pickle: pickle.dump((theta,a,X,signal,all_x_inner,all_x_outer,all_y_inner,all_y_outer,\ all_chi),crescent_pickle) crescent_pickle.close() #Save quantities related to ray path integration with open(path+'raypath_ID'+str(ID)+'.pickle','wb') as raypath_pickle: pickle.dump((all_z,all_lat,all_beta,all_xi,all_tau),raypath_pickle) raypath_pickle.close() #----------------------------------------------------------------------------------------- #End of script indicator #----------------------------------------------------------------------------------------- print '' print '' print 'Ray tracing script for run ID '+str(ID)+' has completed.' print '' print '' #-----------------------------------------------------------------------------------------
pdalba/retro
retro_rk4.py
<gh_stars>1-10 """ Name ---- retro_rk4.py Description ----------- RETrO: Refraction in Exoplanet Transit Observations This script contains the RK4 integration scheme for the ray tracing portion of RETrO. It utilizes many of the funcitons from the planet atmosphere module. For the optical depth, it only uses a constant cross section for absorption (sigma). The user is free to alter this for more sophisticated opacity scheme (i.e. Rayleigh scattering). Input ----- Various information about the planetary atmosphere and present ray position. Output ------ The next ray position. Author ------ <NAME> --- Boston University pdalba -at- bu -dot- edu """ #----------------------------------------------------------------------------------------- #Import various math, science, and plotting packages. import numpy from numpy import * #Note, numpy functions will not be explicitly called out import scipy from scipy.interpolate import interp1d from scipy.integrate import quad from scipy import optimize from scipy.optimize import minimize_scalar, fsolve import os import datetime import pickle #Import RETrO modules here import planet_atmosphere from planet_atmosphere import * #----------------------------------------------------------------------------------------- #Define fundamental constants - All physical units in this script will be MKS. #----------------------------------------------------------------------------------------- k_B = 1.38065e-23 #mks m_H = 1.67353e-27 #kg G = 6.67408e-11 #mks AU = 1.49598e11 #m R_earth = 6.371e6 #m M_earth = 5.9723e24 #kg R_sun = 6.957e8 #m #----------------------------------------------------------------------------------------- #Define the 4-step Runge-Kutta ray integration procedure #----------------------------------------------------------------------------------------- #Pass in arrays with the current and parameters in this order: # # Current: [z, lat, beta, xi, tau, ds, ray] # Constant: [sigma,nd_ref,z_ref,z_top,nu_ref,H] # # Remember: z, lat, beta, xi, & tau are all evaluated through DEs. The other parameters # are just being past through to be found at every point along the integration. def rk4_ray_trace(current,constant): #Unpack the 'constant' array for more clarity sigma,nd_ref,z_ref,z_top,nu_ref,H = constant #First step of rk4 - at the initial point. Calculate any non-integrated values. nd1 = get_number_density(z=current[0],lat=current[1],nd_ref=nd_ref,z_ref=z_ref,H=H) n1 = get_refractivity(z=current[0],lat=current[1],z_top=z_top,nu_ref=nu_ref, \ z_ref=z_ref, H=H) dndz1 = get_drefractivity_dz(z=current[0],lat=current[1],z_top=z_top,nu_ref=nu_ref, \ z_ref=z_ref, H=H) dndlat1 = get_drefractivity_dlat(z=current[0],lat=current[1]) c1 = get_ray_curvature(z=current[0],lat=current[1], beta=current[2], dndz=dndz1,\ dndlat=dndlat1,z_top=z_top, nu_ref=nu_ref, z_ref=z_ref, H=H) #Now the integrated parameters z1 = sin(current[2]) lat1 = cos(current[2])/current[0] beta1 = lat1 + c1 xi1 = c1 tau1 = constant[0]*nd1 #Second step of rk4 - at the midpoint. Calculate any non-integrated values. nd2 = get_number_density(z=(current[0]+0.5*z1*current[5]),\ lat=(current[1]+0.5*lat1*current[5]),nd_ref=nd_ref, z_ref=z_ref, H=H) n2 = get_refractivity(z=(current[0]+0.5*z1*current[5]),\ lat=(current[1]+0.5*lat1*current[5]),z_top=z_top,nu_ref=nu_ref, z_ref=z_ref, H=H) dndz2 = get_drefractivity_dz(z=(current[0]+0.5*z1*current[5]),\ lat=(current[1]+0.5*lat1*current[5]),z_top=z_top,nu_ref=nu_ref, z_ref=z_ref, H=H) dndlat2 = get_drefractivity_dlat(z=(current[0]+0.5*z1*current[5]),\ lat=(current[1]+0.5*lat1*current[5])) c2 = get_ray_curvature(z=(current[0]+0.5*z1*current[5]),\ lat=(current[1]+0.5*lat1*current[5]), beta=(current[2]+0.5*beta1*current[5]),\ dndz=dndz2,dndlat=dndlat2,z_top=z_top, nu_ref=nu_ref, z_ref=z_ref, H=H) #Now the integrated parameters z2 = sin(current[2]+0.5*beta1*current[5]) lat2 = cos(current[2]+0.5*beta1*current[5])/(current[0]+0.5*z1*current[5]) beta2 = lat2 + c2 xi2 = c2 tau2 = constant[0]*nd2 #Third step of rk4 - at the midpoint. Calculate any non-integrated values. nd3 = get_number_density(z=(current[0]+0.5*z2*current[5]),\ lat=(current[1]+0.5*lat2*current[5]),nd_ref=nd_ref, z_ref=z_ref, H=H) n3 = get_refractivity(z=(current[0]+0.5*z2*current[5]),\ lat=(current[1]+0.5*lat2*current[5]),z_top=z_top,nu_ref=nu_ref, z_ref=z_ref, H=H) dndz3 = get_drefractivity_dz(z=(current[0]+0.5*z2*current[5]),\ lat=(current[1]+0.5*lat2*current[5]),z_top=z_top,nu_ref=nu_ref, z_ref=z_ref, H=H) dndlat3 = get_drefractivity_dlat(z=(current[0]+0.5*z2*current[5]),\ lat=(current[1]+0.5*lat2*current[5])) c3 = get_ray_curvature(z=(current[0]+0.5*z2*current[5]),\ lat=(current[1]+0.5*lat2*current[5]), beta=(current[2]+0.5*beta2*current[5]),\ dndz=dndz3,dndlat=dndlat3,z_top=z_top, nu_ref=nu_ref, z_ref=z_ref, H=H) #Now the integrated parameters z3 = sin(current[2]+0.5*beta2*current[5]) lat3 = cos(current[2]+0.5*beta2*current[5])/(current[0]+0.5*z2*current[5]) beta3 = lat3 + c3 xi3 = c3 tau3 = constant[0]*nd3 #Fourth step of rk4 - at the end point. Calculate any non-integrated values. nd4 = get_number_density(z=(current[0]+z3*current[5]),lat=(current[1]+lat3*current[5]),\ nd_ref=nd_ref, z_ref=z_ref, H=H) n4 = get_refractivity(z=(current[0]+z3*current[5]),lat=(current[1]+lat3*current[5]),\ z_top=z_top, nu_ref=nu_ref, z_ref=z_ref, H=H) dndz4 = get_drefractivity_dz(z=(current[0]+z3*current[5]),\ lat=(current[1]+lat3*current[5]),z_top=z_top,nu_ref=nu_ref, z_ref=z_ref, H=H) dndlat4 = get_drefractivity_dlat(z=(current[0]+z3*current[5]),\ lat=(current[1]+lat3*current[5])) c4 = get_ray_curvature(z=(current[0]+z3*current[5]),lat=(current[1]+lat3*current[5]),\ beta=(current[2]+beta3*current[5]),dndz=dndz4,dndlat=dndlat4,z_top=z_top, \ nu_ref=nu_ref, z_ref=z_ref, H=H) #Now the integrated parameters z4 = sin(current[2]+beta3*current[5]) lat4 = cos(current[2]+beta3*current[5])/(current[0]+z3*current[5]) beta4 = lat4 + c4 xi4 = c4 tau4 = constant[0]*nd4 #Find the "next" values of the integrated parameters. z_next = current[0] + (current[5]/6.)*(z1 + 2.*(z2 + z3) + z4) lat_next = current[1] + (current[5]/6.)*(lat1 + 2.*(lat2 + lat3) + lat4) beta_next = current[2] + (current[5]/6.)*(beta1 + 2.*(beta2 + beta3) + beta4) xi_next = current[3] + (current[5]/6.)*(c1 + 2.*(c2 + c3) + c4) tau_next = current[4] + (current[5]/6.)*(tau1 + 2.*(tau2 + tau3) + tau4) #We must add a block that checks for a negative z-value. This is not realistic, but can # occur in the case of a radially traveling ray. In this case, the z, lat, beta, and # xi values must be manually adjusted due to the singularity that exists when z goes # to zero. if z_next < 0.: #The z-value should actually just be the step size minus the current z-value. z_next = current[5] - current[0] #As the radial ray passes through zero, its latitude must jump by pi to the other # hemisphere. The beta value also jumps by pi (from negative pi/2 to positive # pi/2) as the ray passes through the center. The ray curvature and therefore the # value of xi is also effected by the blown up beta values, so we force there to # be no added curvature and just set xi_next as its previous value. lat_next = current[1]+pi beta_next = current[2]+pi xi_next = current[3] return array([z_next,lat_next,beta_next,xi_next,tau_next]) #-----------------------------------------------------------------------------------------
12MegaA21/LG-FedAvg
models/Update.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # Python version: 3.6 import torch from torch import nn from torch.utils.data import DataLoader, Dataset from tqdm import tqdm import math import pdb class DatasetSplit(Dataset): def __init__(self, dataset, idxs): self.dataset = dataset self.idxs = list(idxs) def __len__(self): return len(self.idxs) def __getitem__(self, item): image, label = self.dataset[self.idxs[item]] return image, label class LocalUpdate(object): def __init__(self, args, dataset=None, idxs=None, pretrain=False): self.args = args self.loss_func = nn.CrossEntropyLoss() self.selected_clients = [] self.ldr_train = DataLoader(DatasetSplit(dataset, idxs), batch_size=self.args.local_bs, shuffle=True) self.pretrain = pretrain def train(self, net, idx=-1, lr=0.1): net.train() # train and update optimizer = torch.optim.SGD(net.parameters(), lr=lr, momentum=0.5) epoch_loss = [] if self.pretrain: local_eps = self.args.local_ep_pretrain else: local_eps = self.args.local_ep for iter in range(local_eps): batch_loss = [] for batch_idx, (images, labels) in enumerate(self.ldr_train): images, labels = images.to(self.args.device), labels.to(self.args.device) net.zero_grad() log_probs = net(images) loss = self.loss_func(log_probs, labels) loss.backward() optimizer.step() batch_loss.append(loss.item()) epoch_loss.append(sum(batch_loss)/len(batch_loss)) return net.state_dict(), sum(epoch_loss) / len(epoch_loss) class LocalUpdateMTL(object): def __init__(self, args, dataset=None, idxs=None, pretrain=False): self.args = args self.loss_func = nn.CrossEntropyLoss() self.selected_clients = [] self.ldr_train = DataLoader(DatasetSplit(dataset, idxs), batch_size=self.args.local_bs, shuffle=True) self.pretrain = pretrain def train(self, net, lr=0.1, omega=None, W_glob=None, idx=None, w_glob_keys=None): net.train() # train and update optimizer = torch.optim.SGD(net.parameters(), lr=lr, momentum=0.5) epoch_loss = [] if self.pretrain: local_eps = self.args.local_ep_pretrain else: local_eps = self.args.local_ep for iter in range(local_eps): batch_loss = [] for batch_idx, (images, labels) in enumerate(self.ldr_train): images, labels = images.to(self.args.device), labels.to(self.args.device) net.zero_grad() log_probs = net(images) loss = self.loss_func(log_probs, labels) W = W_glob.clone() W_local = [net.state_dict(keep_vars=True)[key].flatten() for key in w_glob_keys] W_local = torch.cat(W_local) W[:, idx] = W_local loss_regularizer = 0 loss_regularizer += W.norm() ** 2 k = 4000 for i in range(W.shape[0] // k): x = W[i * k:(i+1) * k, :] loss_regularizer += x.mm(omega).mm(x.T).trace() f = (int)(math.log10(W.shape[0])+1) + 1 loss_regularizer *= 10 ** (-f) loss = loss + loss_regularizer loss.backward() optimizer.step() batch_loss.append(loss.item()) epoch_loss.append(sum(batch_loss)/len(batch_loss)) return net.state_dict(), sum(epoch_loss) / len(epoch_loss)
npmccord/LyricsGenius
lyricsgenius/artist.py
import json, os class Artist(object): """An artist from the Genius.com database. Attributes: name: (str) Artist name. num_songs: (int) Total number of songs listed on Genius.com """ def __init__(self, json_dict): """Populate the Artist object with the data from *json_dict*""" self._body = json_dict['artist'] self._url = self._body['url'] self._api_path = self._body['api_path'] self._id = self._body['id'] self._songs = [] self._num_songs = len(self._songs) def __len__(self): return 1 @property def name(self): return self._body['name'] @property def image_url(self): try: return self._body['image_url'] except: return None @property def songs(self): return self._songs @property def num_songs(self): return self._num_songs def add_song(self, newsong, verbose=True): """Add a Song object to the Artist object""" if any([song.title==newsong.title for song in self._songs]): if verbose: print('{newsong.title} already in {self.name}, not adding song.'.format(newsong=newsong,self=self)) return 1 # Failure if newsong.artist == self.name: self._songs.append(newsong) self._num_songs += 1 return 0 # Success else: if verbose: print("Can't add song by {newsong.artist}, artist must be {self.name}.".format(newsong=newsong,self=self)) return 1 # Failure def get_song(self, song_name): """Search Genius.com for *song_name* and add it to artist""" raise NotImplementedError("I need to figure out how to allow Artist() to access search_song().") song = Genius.search_song(song_name,self.name) self.add_song(song) return # TODO: define an export_to_json() method def save_lyrics(self, format='json', filename=None, overwrite=False, skip_duplicates=True, verbose=True): """Allows user to save all lyrics within an Artist obejct to a .json or .txt file.""" if format[0] == '.': format = format[1:] assert (format == 'json') or (format == 'txt'), "Format must be json or txt" # We want to reject songs that have already been added to artist collection def songsAreSame(s1, s2): from difflib import SequenceMatcher as sm # For comparing similarity of lyrics # Idea credit: https://bigishdata.com/2016/10/25/talkin-bout-trucks-beer-and-love-in-country-songs-analyzing-genius-lyrics/ seqA = sm(None, s1.lyrics, s2['lyrics']) seqB = sm(None, s2['lyrics'], s1.lyrics) return seqA.ratio() > 0.5 or seqB.ratio() > 0.5 def songInArtist(new_song): # artist_lyrics is global (works in Jupyter notebook) for song in lyrics_to_write['songs']: if songsAreSame(new_song, song): return True return False # Determine the filename if filename is None: filename = "Lyrics_{}.{}".format(self.name.replace(" ",""), format) else: if filename.rfind('.') != -1: filename = filename[filename.rfind('.'):] + '.' + format else: filename = filename + '.' + format # Check if file already exists write_file = False if not os.path.isfile(filename): write_file = True elif overwrite: write_file = True else: if input("{} already exists. Overwrite?\n(y/n): ".format(filename)).lower() == 'y': write_file = True # Format lyrics in either .txt or .json format if format == 'json': lyrics_to_write = {'songs': [], 'artist': self.name} for song in self.songs: if skip_duplicates is False or not songInArtist(song): # This takes way too long! It's basically O(n^2), can I do better? lyrics_to_write['songs'].append({}) lyrics_to_write['songs'][-1]['title'] = song.title lyrics_to_write['songs'][-1]['album'] = song.album lyrics_to_write['songs'][-1]['year'] = song.year lyrics_to_write['songs'][-1]['lyrics'] = song.lyrics lyrics_to_write['songs'][-1]['image'] = song.song_art_image_url lyrics_to_write['songs'][-1]['artist'] = self.name lyrics_to_write['songs'][-1]['raw'] = song._body else: if verbose: print("SKIPPING \"{}\" -- already found in artist collection.".format(song.title)) else: lyrics_to_write = " ".join([s.lyrics + 5*'\n' for s in self.songs]) # Write the lyrics to either a .json or .txt file if write_file: with open(filename, 'w') as lyrics_file: if format == 'json': json.dump(lyrics_to_write, lyrics_file) else: lyrics_file.write(lyrics_to_write) if verbose: print('Wrote {} songs to {}.'.format(self.num_songs, filename)) else: if verbose: print('Skipping file save.\n') return lyrics_to_write def __str__(self): """Return a string representation of the Artist object.""" if self._num_songs == 1: return '{0}, {1} song'.format(self.name,self._num_songs) else: return '{0}, {1} songs'.format(self.name,self._num_songs) def __repr__(self): return repr((self.name, '{0} songs'.format(self._num_songs)))
npmccord/LyricsGenius
lyricsgenius/__main__.py
# Command line usage: # $python3 -m lyricsgenius --search-song 'Begin Again' '<NAME>' # $python3 -m lyricsgenius --search-artist '<NAME>' 3 import sys import os import lyricsgenius as genius def main(args=None): if args is None: args = sys.argv[1:] client_access_token = os.environ.get("GENIUS_CLIENT_ACCESS_TOKEN", None) assert client_access_token is not None, "Must declare environment variable: GENIUS_CLIENT_ACCESS_TOKEN" api = genius.Genius(client_access_token) # There must be a standard way to handle "--" inputs on the command line if sys.argv[1] == '--search-song': if len(sys.argv) == 4: song = api.search_song(sys.argv[2],sys.argv[3]) elif len(sys.argv) == 3: song = api.search_song(sys.argv[2]) print('"{title}" by {artist}:\n {lyrics}'.format(title=song.title,artist=song.artist,lyrics=song.lyrics.replace('\n','\n '))) elif sys.argv[1] == '--search-artist': if len(sys.argv) == 4: max_songs = int(sys.argv[3]) else: max_songs = 5 artist = api.search_artist(sys.argv[2], max_songs=max_songs) print("Saving {} lyrics...".format(artist.name)) api.save_artist_lyrics(artist) else: print("Usage: python -m lyricsgenius [--search-song song_name] [--search-artist artist_name num_songs]") return if __name__ == "__main__": main()
npmccord/LyricsGenius
tests/test_genius.py
<reponame>npmccord/LyricsGenius<filename>tests/test_genius.py<gh_stars>0 import os import unittest import lyricsgenius from lyricsgenius.song import Song from lyricsgenius.artist import Artist # Import client access token from environment variable client_access_token = os.environ.get("GENIUS_CLIENT_ACCESS_TOKEN", None) assert client_access_token is not None, "Must declare environment variable: GENIUS_CLIENT_ACCESS_TOKEN" api = lyricsgenius.Genius(client_access_token, sleep_time=1) class TestArtist(unittest.TestCase): @classmethod def setUpClass(cls): print("\n---------------------\nSetting up Artist tests...\n") cls.artist_name = "The Beatles" cls.new_song = "Paperback Writer" cls.max_songs = 2 cls.artist = api.search_artist( cls.artist_name, max_songs=cls.max_songs) def test_artist(self): msg = "The returned object is not an instance of the Artist class." self.assertIsInstance(self.artist, Artist, msg) def test_name(self): msg = "The artist object name does not match the requested artist name." self.assertEqual(self.artist.name, self.artist_name, msg) def test_add_song_from_same_artist(self): msg = "The new song was not added to the artist object." self.artist.add_song(api.search_song(self.new_song, self.artist_name)) self.assertEqual(self.artist.num_songs, self.max_songs+1, msg) def test_add_song_from_different_artist(self): msg = "A song from a different artist was incorrectly allowed to be added." self.artist.add_song(api.search_song("These Days", "<NAME>")) self.assertEqual(self.artist.num_songs, self.max_songs, msg) def test_saving_json_file(self): print('\n') format = 'json' msg = "Could not save {} file.".format(format) expected_filename = 'tests/lyrics_save_test_file.' + format filename = expected_filename.split('.')[0] # Remove the test file if it already exists if os.path.isfile(expected_filename): os.remove(expected_filename) # Test saving json file self.artist.save_lyrics(filename=filename, format=format) self.assertTrue(os.path.isfile(expected_filename), msg) # Test overwriting json file try: self.artist.save_lyrics( filename=filename, format=format, overwrite=True) os.remove(expected_filename) except: self.fail("Failed {} overwrite test".format(format)) os.remove(expected_filename) def test_saving_txt_file(self): print('\n') format = 'txt' msg = "Could not save {} file.".format(format) expected_filename = 'tests/lyrics_save_test_file.' + format filename = expected_filename.split('.')[0] # Remove the test file if it already exists if os.path.isfile(expected_filename): os.remove(expected_filename) # Test saving txt file self.artist.save_lyrics(filename=filename, format=format) self.assertTrue(os.path.isfile(expected_filename), msg) # Test overwriting txt file try: self.artist.save_lyrics( filename=filename, format=format, overwrite=True) os.remove(expected_filename) except: self.fail("Failed {} overwrite test".format(format)) os.remove(expected_filename) class TestSong(unittest.TestCase): @classmethod def setUpClass(cls): print("\n---------------------\nSetting up Song tests...\n") cls.artist_name = '<NAME>' cls.song_title = 'begin again' # Lowercase is intentional cls.album = 'The Party' cls.year = '2016-05-20' cls.song = api.search_song(cls.song_title, cls.artist_name) cls.song_trimmed = api.search_song( cls.song_title, cls.artist_name, remove_section_headers=True) def test_song(self): msg = "The returned object is not an instance of the Song class." self.assertIsInstance(self.song, Song, msg) def test_title(self): msg = "The returned song title does not match the title of the requested song." self.assertEqual(api._clean_str(self.song.title), api._clean_str(self.song_title), msg) def test_artist(self): msg = "The returned artist name does not match the artist of the requested song." self.assertEqual(self.song.artist, self.artist_name) def test_album(self): msg = "The returned album name does not match the album of the requested song." self.assertEqual(self.song.album, self.album, msg) def test_year(self): msg = "The returned year does not match the year of the requested song" self.assertEqual(self.song.year, self.year, msg) def test_lyrics_raw(self): lyrics = '[Verse 1: <NAME>]' self.assertTrue(self.song.lyrics.startswith(lyrics)) def test_lyrics_no_section_headers(self): lyrics = 'Begin again\nThis time you should take a bow at the' self.assertTrue(self.song_trimmed.lyrics.startswith(lyrics)) def test_media(self): msg = "The returned song does not have a media attribute." self.assertTrue(hasattr(self.song, 'media'), msg) def test_saving_json_file(self): print('\n') format = 'json' msg = "Could not save {} file.".format(format) expected_filename = 'tests/lyrics_save_test_file.' + format filename = expected_filename.split('.')[0] # Remove the test file if it already exists if os.path.isfile(expected_filename): os.remove(expected_filename) # Test saving json file self.song.save_lyrics(filename=filename, format=format) self.assertTrue(os.path.isfile(expected_filename), msg) # Test overwriting json file try: self.song.save_lyrics( filename=filename, format=format, overwrite=True) os.remove(expected_filename) except: self.fail("Failed {} overwrite test".format(format)) os.remove(expected_filename) def test_saving_txt_file(self): print('\n') format = 'txt' msg = "Could not save {} file.".format(format) expected_filename = 'tests/lyrics_save_test_file.' + format filename = expected_filename.split('.')[0] # Remove the test file if it already exists if os.path.isfile(expected_filename): os.remove(expected_filename) # Test saving txt file self.song.save_lyrics(filename=filename, format=format) self.assertTrue(os.path.isfile(expected_filename), msg) # Test overwriting txt file try: self.song.save_lyrics( filename=filename, format=format, overwrite=True) os.remove(expected_filename) except: self.fail("Failed {} overwrite test".format(format)) os.remove(expected_filename)
npmccord/LyricsGenius
lyricsgenius/api.py
<reponame>npmccord/LyricsGenius # Usage: # import genius # api = genius.Genius('my_client_access_token_here') # artist = api.search_artist('<NAME>', max_songs=5) # print(artist) # # song = api.search_song('To You', artist.name) # artist.add_song(song) # print(artist) # print(artist.songs[-1]) import sys from urllib.request import Request, urlopen, quote import os import re import requests import socket import json from bs4 import BeautifulSoup from string import punctuation import time from warnings import warn from .song import Song from .artist import Artist class _API(object): # This is a superclass that Genius() inherits from. Not sure if this makes any sense, but it # seemed like a good idea to have this class (more removed from user) handle the lower-level # interaction with the Genius API, and then Genius() has the more user-friendly search # functions """Interface with the Genius.com API Attributes: base_url: (str) Top-most URL to access the Genius.com API with Methods: _load_credentials() OUTPUT: client_id, client_secret, client_access_token _make_api_request() INPUT: OUTPUT: """ # Genius API constants _API_URL = "https://api.genius.com/" _API_REQUEST_TYPES =\ {'song': 'songs/', 'artist': 'artists/', 'artist-songs': 'artists/songs/', 'search': 'search?q='} def __init__(self, client_access_token, client_secret='', client_id='', sleep_time=0): self._CLIENT_ACCESS_TOKEN = client_access_token self._HEADER_AUTHORIZATION = 'Bearer ' + self._CLIENT_ACCESS_TOKEN self._sleep_time = sleep_time # rate-limiting def _make_api_request(self, request_term_and_type, page=1): """Send a request (song, artist, or search) to the Genius API, returning a json object INPUT: request_term_and_type: (tuple) (request_term, request_type) *request term* is a string. If *request_type* is 'search', then *request_term* is just what you'd type into the search box on Genius.com. If you have an song ID or an artist ID, you'd do this: self._make_api_request('2236','song') Returns a json object. """ # TODO: This should maybe be a generator # The API request URL must be formatted according to the desired # request type""" api_request = self._format_api_request( request_term_and_type, page=page) # Add the necessary headers to the request request = Request(api_request) request.add_header("Authorization", self._HEADER_AUTHORIZATION) request.add_header("User-Agent", "LyricsGenius") while True: try: # timeout set to 4 seconds; automatically retries if times out response = urlopen(request, timeout=4) raw = response.read().decode('utf-8') except socket.timeout: print("Timeout raised and caught") continue break time.sleep(self._sleep_time) # rate limiting return json.loads(raw)['response'] def _format_api_request(self, term_and_type, page=1): """Format the request URL depending on the type of request""" request_term, request_type = str(term_and_type[0]), term_and_type[1] assert request_type in self._API_REQUEST_TYPES, "Unknown API request type" # TODO - Clean this up (might not need separate returns) if request_type == 'artist-songs': return self._API_URL + 'artists/' + quote(request_term) + '/songs?per_page=50&page=' + str(page) else: return self._API_URL + self._API_REQUEST_TYPES[request_type] + quote(request_term) def _scrape_song_lyrics_from_url(self, URL, remove_section_headers=False): """Use BeautifulSoup to scrape song info off of a Genius song URL""" page = requests.get(URL) html = BeautifulSoup(page.text, "html.parser") # Scrape the song lyrics from the HTML lyrics = html.find("div", class_="lyrics").get_text() if remove_section_headers: # Remove [Verse] and [Bridge] stuff lyrics = re.sub('(\[.*?\])*', '', lyrics) # Remove gaps between verses lyrics = re.sub('\n{2}', '\n', lyrics) return lyrics.strip('\n') def _clean_str(self, s): return s.translate(str.maketrans('', '', punctuation)).replace('\u200b', " ").strip().lower() def _result_is_lyrics(self, song_title): """Returns False if result from Genius is not actually song lyrics""" regex = re.compile( r"(tracklist)|(track list)|(album art(work)?)|(liner notes)|(booklet)|(credits)|(remix)|(interview)|(skit)", re.IGNORECASE) return not regex.search(song_title) class Genius(_API): """User-level interface with the Genius.com API. User can search for songs (getting lyrics) and artists (getting songs)""" def search_song(self, song_title, artist_name="", take_first_result=False, verbose=True, remove_section_headers=False, remove_non_songs=True): # TODO: Should search_song() be a @classmethod? """Search Genius.com for *song_title* by *artist_name*""" # Perform a Genius API search for the song if verbose: if artist_name != "": print('Searching for "{0}" by {1}...'.format( song_title, artist_name)) else: print('Searching for "{0}"...'.format(song_title)) search_term = "{} {}".format(song_title, artist_name) json_search = self._make_api_request((search_term, 'search')) # Loop through search results, stopping as soon as title and artist of # result match request n_hits = min(10, len(json_search['hits'])) for i in range(n_hits): search_hit = json_search['hits'][i]['result'] found_song = self._clean_str(search_hit['title']) found_artist = self._clean_str( search_hit['primary_artist']['name']) # Download song from Genius.com if title and artist match the request if take_first_result or found_song == self._clean_str(song_title) and found_artist == self._clean_str(artist_name) or artist_name == "": # Remove non-song results (e.g. Linear Notes, Tracklists, etc.) song_is_valid = self._result_is_lyrics(found_song) if remove_non_songs else True if song_is_valid: # Found correct song, accessing API ID json_song = self._make_api_request((search_hit['id'],'song')) # Scrape the song's HTML for lyrics lyrics = self._scrape_song_lyrics_from_url(json_song['song']['url'], remove_section_headers) # Create the Song object song = Song(json_song, lyrics) if verbose: print('Done.') return song else: if verbose: print('Specified song does not contain lyrics. Rejecting.') return None if verbose: print('Specified song was not first result :(') return None def search_artist(self, artist_name, verbose=True, max_songs=None, take_first_result=False, get_full_song_info=True, remove_section_headers=False, remove_non_songs=True): """Allow user to search for an artist on the Genius.com database by supplying an artist name. Returns an Artist() object containing all songs for that particular artist.""" if verbose: print('Searching for songs by {0}...\n'.format(artist_name)) # Perform a Genius API search for the artist json_search = self._make_api_request((artist_name,'search')) first_result, artist_id = None, None for hit in json_search['hits']: found_artist = hit['result']['primary_artist'] if first_result is None: first_result = found_artist artist_id = found_artist['id'] if take_first_result or self._clean_str(found_artist['name'].lower()) == self._clean_str(artist_name.lower()): artist_name = found_artist['name'] break else: # check for searched name in alternate artist names json_artist = self._make_api_request((artist_id, 'artist'))['artist'] if artist_name.lower() in [s.lower() for s in json_artist['alternate_names']]: if verbose: print("Found alternate name. Changing name to {}.".format(json_artist['name'])) artist_name = json_artist['name'] break artist_id = None if first_result is not None and artist_id is None and verbose: if input("Couldn't find {}. Did you mean {}? (y/n): ".format(artist_name, first_result['name'])).lower() == 'y': artist_name, artist_id = first_result['name'], first_result['id'] assert (not isinstance(artist_id, type(None))), "Could not find artist. Check spelling?" # Make Genius API request for the determined artist ID json_artist = self._make_api_request((artist_id,'artist')) # Create the Artist object artist = Artist(json_artist) if max_songs is None or max_songs > 0: # Access the api_path found by searching artist_search_results = self._make_api_request((artist_id, 'artist-songs')) # Download each song by artist, store as Song objects in Artist object keep_searching = True next_page = 0; n=0 while keep_searching: for json_song in artist_search_results['songs']: # TODO: Shouldn't I use self.search_song() here? # Songs must have a title if 'title' not in json_song: json_song['title'] = 'MISSING TITLE' # Remove non-song results (e.g. Linear Notes, Tracklists, etc.) song_is_valid = self._result_is_lyrics(json_song['title']) if remove_non_songs else True if song_is_valid: # Scrape song lyrics from the song's HTML lyrics = self._scrape_song_lyrics_from_url(json_song['url'], remove_section_headers) # Create song object for current song if get_full_song_info: song = Song(self._make_api_request((json_song['id'], 'song')), lyrics) else: song = Song({'song':json_song}, lyrics) # Faster, less info from API # Add song to the Artist object if artist.add_song(song, verbose=False) == 0: # print("Add song: {}".format(song.title)) n += 1 if verbose: print('Song {0}: "{1}"'.format(n, song.title)) else: # Song does not contain lyrics if verbose: print('"{title}" does not contain lyrics. Rejecting.'.format(title=json_song['title'])) # Check if user specified a max number of songs for the artist if not isinstance(max_songs, type(None)): if artist.num_songs >= max_songs: keep_searching = False if verbose: print('\nReached user-specified song limit ({0}).'.format(max_songs)) break # Move on to next page of search results next_page = artist_search_results['next_page'] if next_page == None: break else: # Get next page of artist song results artist_search_results = self._make_api_request((artist_id, 'artist-songs'), page=next_page) if verbose: print('Found {n_songs} songs.'.format(n_songs=artist.num_songs)) if verbose: print('Done.') return artist def save_artists(self, artists, filename="artist_lyrics", overwrite=False): """Pass a list of Artist objects to save multiple artists""" if isinstance(artists, Artist): artists = [artists] assert isinstance(artists, list), "Must pass in list of Artist objects." # Create a temporary directory for lyrics start = time.time() tmp_dir = 'tmp_lyrics' if not os.path.isdir(tmp_dir): os.mkdir(tmp_dir) tmp_count = 0 else: tmp_count = len(os.listdir('./' + tmp_dir)) # Check if file already exists write_file = False if not os.path.isfile(filename + ".json"): pass elif overwrite: pass else: if input("{} already exists. Overwrite?\n(y/n): ".format(filename)).lower() != 'y': print("Leaving file in place. Exiting.") os.rmdir(tmp_dir) return # Extract each artist's lyrics in json format all_lyrics = {'artists': []} for n, artist in enumerate(artists): if isinstance(artist, Artist): all_lyrics['artists'].append({}) tmp_file = "./{dir}/tmp_{num}_{name}".format(dir=tmp_dir, num=n+tmp_count, name=artist.name.replace(" ","")) print(tmp_file) all_lyrics['artists'][-1] = artist.save_lyrics(filename=tmp_file, overwrite=True) else: warn("Item #{} was not of type Artist. Skipping.".format(n)) # Save all of the lyrics with open(filename + '.json', 'w') as outfile: json.dump(all_lyrics, outfile) end = time.time() print("Time elapsed: {} hours".format((end-start)/60.0/60.0))
MadhaviRockandla/Py-BasicStats
statzcw/stats.py
from typing import List import math def zcount(list: List[float]) -> float: return len(list) def zmean(list: List[float]) -> float: return sum(list) / zcount(list) def zmode(list: List[float]) -> float: return max(set(list), key=list.count) def zmedian(list: List[float]) -> float: sorted_list = sorted(list) n = len(list) index = (n - 1) // 2 return sorted_list[index] def zvariance(list: List[float]) -> float: n = zcount(list) - 1 mean = zmean(list) # mean of data mean=sum(data) / n deviations = [abs(mean - xi) ** 2 for xi in list] # square deviations variance = sum(deviations) / n # varience return variance def zstddev(list: List[float]) -> float: return math.sqrt(zvariance(list)) def zstderr(list: List[float]) -> float: std_err = zstddev(list) / (math.sqrt(zcount(list))) return std_err def zcorr(listx: List[float], listy: List[float]) -> float: statx = zstddev(listx) staty = zstddev(listy) cov = zcov(listx, listy) return cov / (statx * staty) def cov(listx: List[float], listy: List[float]) -> float: sum = 0 if len(listx) == len(listy): for i in range(len(listx)): sum += ((listx[i] - zmean(listx)) * (listy[i] - zmean(listy))) cov = sum / (len(listx) - 1) return cov def readDataSets(files): data = {} for file in files: twoLists = readDataFile(file) data[file] = twoLists return data def readDataFile(file_name): x, y = [], [] with open(file_name) as f: first_line = f.readline() for line in f: row = line.split(',') x.append(float(row[0])) y.append(float(row[1])) return x, y
mriedem/qiskit-experiments
qiskit_experiments/library/characterization/cr_hamiltonian.py
# This code is part of Qiskit. # # (C) Copyright IBM 2021. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """ Cross resonance Hamiltonian tomography. """ from typing import List, Tuple, Iterable, Optional, Type import warnings import numpy as np from qiskit import pulse, circuit, QuantumCircuit from qiskit.circuit.parameterexpression import ParameterValueType from qiskit.exceptions import QiskitError from qiskit.providers import Backend from qiskit_experiments.framework import BaseExperiment, Options from qiskit_experiments.library.characterization.analysis import CrossResonanceHamiltonianAnalysis class CrossResonanceHamiltonian(BaseExperiment): r"""Cross resonance Hamiltonian tomography experiment. # section: overview This experiment assumes the two qubit Hamiltonian in the form .. math:: H = \frac{I \otimes A}{2} + \frac{Z \otimes B}{2} where :math:`A` and :math:`B` are linear combinations of the Pauli operators :math:`\in {X, Y, Z}`. The coefficient of each Pauli term in the Hamiltonian can be estimated with this experiment. This experiment is performed by stretching the pulse duration of a cross resonance pulse and measuring the target qubit by projecting onto the x, y, and z bases. The control qubit state dependent (controlled-) Rabi oscillation on the target qubit is observed by repeating the experiment with the control qubit both in the ground and excited states. The fit for the oscillations in the three bases with the two control qubit preparations tomographically reconstructs the Hamiltonian in the form shown above. See Ref. [1] for more details. More specifically, the following circuits are executed in this experiment. .. parsed-literal:: (X measurement) ┌───┐┌────────────────────┐ q_0: ┤ P ├┤0 ├──────── └───┘│ cr_tone(duration) │┌───┐┌─┐ q_1: ─────┤1 ├┤ H ├┤M├ └────────────────────┘└───┘└╥┘ c: 1/═════════════════════════════════╩═ 0 (Y measurement) ┌───┐┌────────────────────┐ q_0: ┤ P ├┤0 ├─────────────── └───┘│ cr_tone(duration) │┌─────┐┌───┐┌─┐ q_1: ─────┤1 ├┤ Sdg ├┤ H ├┤M├ └────────────────────┘└─────┘└───┘└╥┘ c: 1/════════════════════════════════════════╩═ 0 (Z measurement) ┌───┐┌────────────────────┐ q_0: ┤ P ├┤0 ├─── └───┘│ cr_tone(duration) │┌─┐ q_1: ─────┤1 ├┤M├ └────────────────────┘└╥┘ c: 1/════════════════════════════╩═ 0 The ``P`` gate on the control qubit (``q_0``) indicates the state preparation. Since this experiment requires two sets of sub experiments with the control qubit in the excited and ground state, ``P`` will become ``X`` gate or just be omitted, respectively. Here ``cr_tone`` is implemented by a single cross resonance tone driving the control qubit at the frequency of the target qubit. The pulse envelope is the flat-topped Gaussian implemented by the parametric pulse :py:class:`~qiskit.pulse.library.parametric_pulses.GaussianSquare`. This experiment scans the flat-top width of the :py:class:`~qiskit.pulse.library.\ parametric_pulses.GaussianSquare` envelope with the fixed rising and falling edges. The total pulse duration is implicitly computed to meet the timing constraints of the target backend. The edge duration is usually computed as .. math:: \tau_{\rm edges} = 2 r \sigma, where the :math:`r` is the ratio of the actual edge duration to :math:`\sigma` of the Gaussian rising and falling edges. Note that actual edge duration is not identical to the net duration because of the smaller pulse amplitude of the edges. The net edge duration is an extra fitting parameter with initial guess .. math:: \tau_{\rm edges}' = \sqrt{2 \pi} \sigma, which is derived by assuming a square edges with the full pulse amplitude. # section: analysis_ref :py:class:`CrossResonanceHamiltonianAnalysis` # section: reference .. ref_arxiv:: 1 1603.04821 # section: tutorial .. ref_website:: Qiskit Textbook 6.7, https://qiskit.org/textbook/ch-quantum-hardware/hamiltonian-tomography.html """ # Number of CR pulses. The flat top duration per pulse is divided by this number. num_pulses = 1 class CRPulseGate(circuit.Gate): """A pulse gate of cross resonance. Definition should be provided via calibration.""" def __init__(self, width: ParameterValueType): super().__init__("cr_gate", 2, [width]) def __init__( self, qubits: Tuple[int, int], flat_top_widths: Iterable[float], backend: Optional[Backend] = None, cr_gate: Optional[Type[circuit.Gate]] = None, **kwargs, ): """Create a new experiment. Args: qubits: Two-value tuple of qubit indices on which to run tomography. The first index stands for the control qubit. flat_top_widths: The total duration of the square part of cross resonance pulse(s) to scan, in units of dt. The total pulse duration including Gaussian rising and falling edges is implicitly computed with experiment parameters ``sigma`` and ``risefall``. backend: Optional, the backend to run the experiment on. cr_gate: Optional, circuit gate instruction of cross resonance pulse. kwargs: Pulse parameters. See :meth:`experiment_options` for details. Raises: QiskitError: When ``qubits`` length is not 2. """ super().__init__(qubits, analysis=CrossResonanceHamiltonianAnalysis(), backend=backend) if len(qubits) != 2: raise QiskitError( "Length of qubits is not 2. Please provide index for control and target qubit." ) self.set_experiment_options(flat_top_widths=flat_top_widths, **kwargs) self._cr_gate = cr_gate # backend parameters required to run this experiment # random values are populated here but these are immediately updated after backend is set # this is to keep capability of generating circuits just for checking self._dt = 1 self._cr_channel = 0 self._granularity = 1 @classmethod def _default_experiment_options(cls) -> Options: """Default experiment options. Experiment Options: flat_top_widths (np.ndarray): The total duration of the square part of cross resonance pulse(s) to scan, in units of dt. This can start from zero and take positive real values representing the durations. Pulse edge effect is considered as an offset to the durations. amp (complex): Amplitude of the cross resonance tone. amp_t (complex): Amplitude of the cancellation or rotary drive on target qubit. sigma (float): Sigma of Gaussian rise and fall edges, in units of dt. risefall (float): Ratio of edge durations to sigma. """ options = super()._default_experiment_options() options.flat_top_widths = None options.amp = 0.2 options.amp_t = 0.0 options.sigma = 64 options.risefall = 2 return options def _set_backend(self, backend: Backend): super()._set_backend(backend) if self._cr_gate is None: # This falls into CRPulseGate which requires pulse schedule # Extract control channel index try: cr_channels = backend.configuration().control(self.physical_qubits) self._cr_channel = cr_channels[0].index except AttributeError: warnings.warn( f"{backend.name()} doesn't provide cr channel mapping. " "Cannot find proper channel index to play the cross resonance pulse.", UserWarning, ) # Extract pulse granularity try: self._granularity = backend.configuration().timing_constraints["granularity"] except (AttributeError, KeyError): # Probably no chunk size restriction on waveform memory. pass # Extract time resolution, this is anyways required for xvalue conversion try: self._dt = backend.configuration().dt except AttributeError: warnings.warn( f"{backend.name()} doesn't provide system time resolution dt. " "Cannot estimate Hamiltonian coefficients in SI units.", UserWarning, ) def _build_cr_circuit(self, pulse_gate: circuit.Gate) -> QuantumCircuit: """Single tone cross resonance. Args: pulse_gate: A pulse gate to represent a single cross resonance pulse. Returns: A circuit definition for the cross resonance pulse to measure. """ cr_circuit = QuantumCircuit(2) cr_circuit.append(pulse_gate, [0, 1]) return cr_circuit def _build_cr_schedule(self, flat_top_width: float) -> pulse.ScheduleBlock: """GaussianSquared cross resonance pulse. Args: flat_top_width: Total length of flat top part of the pulse in units of dt. Returns: A schedule definition for the cross resonance pulse to measure. """ opt = self.experiment_options # Compute valid integer duration duration = flat_top_width + 2 * opt.sigma * opt.risefall valid_duration = int(self._granularity * np.floor(duration / self._granularity)) with pulse.build(default_alignment="left", name="cr") as cross_resonance: # add cross resonance tone pulse.play( pulse.GaussianSquare( duration=valid_duration, amp=opt.amp, sigma=opt.sigma, width=flat_top_width, ), pulse.ControlChannel(self._cr_channel), ) # add cancellation tone if not np.isclose(opt.amp_t, 0.0): pulse.play( pulse.GaussianSquare( duration=valid_duration, amp=opt.amp_t, sigma=opt.sigma, width=flat_top_width, ), pulse.DriveChannel(self.physical_qubits[1]), ) else: pulse.delay(valid_duration, pulse.DriveChannel(self.physical_qubits[1])) # place holder for empty drive channels. this is necessary due to known pulse gate bug. pulse.delay(valid_duration, pulse.DriveChannel(self.physical_qubits[0])) return cross_resonance def circuits(self) -> List[QuantumCircuit]: """Return a list of experiment circuits. Returns: A list of :class:`QuantumCircuit`. Raises: AttributeError: When the backend doesn't report the time resolution of waveforms. """ opt = self.experiment_options expr_circs = [] for flat_top_width in opt.flat_top_widths: if self._cr_gate is None: # default pulse gate execution cr_schedule = self._build_cr_schedule(flat_top_width) cr_gate = self.CRPulseGate(flat_top_width) else: cr_schedule = None cr_gate = self._cr_gate(flat_top_width) for control_state in (0, 1): for meas_basis in ("x", "y", "z"): tomo_circ = QuantumCircuit(2, 1) if control_state: tomo_circ.x(0) tomo_circ.compose( other=self._build_cr_circuit(cr_gate), qubits=[0, 1], inplace=True, ) if meas_basis == "x": tomo_circ.h(1) elif meas_basis == "y": tomo_circ.sdg(1) tomo_circ.h(1) tomo_circ.measure(1, 0) tomo_circ.metadata = { "experiment_type": self.experiment_type, "qubits": self.physical_qubits, "xval": flat_top_width * self._dt, # in units of sec "control_state": control_state, "meas_basis": meas_basis, } if isinstance(cr_gate, self.CRPulseGate): # Attach calibration if this is bare pulse gate tomo_circ.add_calibration( gate=cr_gate, qubits=self.physical_qubits, schedule=cr_schedule, ) expr_circs.append(tomo_circ) # Set analysis option for initial guess that depends on experiment option values. edge_duration = np.sqrt(2 * np.pi) * self.experiment_options.sigma * self.num_pulses init_guess = self.analysis.options.p0.copy() init_guess["t_off"] = edge_duration * self._dt self.analysis.set_options(p0=init_guess) return expr_circs class EchoedCrossResonanceHamiltonian(CrossResonanceHamiltonian): r"""Echoed cross resonance Hamiltonian tomography experiment. # section: overview This is a variant of :py:class:`CrossResonanceHamiltonian` for which the experiment framework is identical but the cross resonance operation is realized as an echoed sequence to remove unwanted single qubit rotations. The cross resonance circuit looks like: .. parsed-literal:: ┌────────────────────┐ ┌───┐ ┌────────────────────┐ q_0: ┤0 ├──┤ X ├──┤0 ├────────── │ cr_tone(duration) │┌─┴───┴─┐│ cr_tone(duration) │┌────────┐ q_1: ┤1 ├┤ Rz(π) ├┤1 ├┤ Rz(-π) ├ └────────────────────┘└───────┘└────────────────────┘└────────┘ Here two ``cr_tone``s are applied where the latter one is with the control qubit state flipped and with a phase flip of the target qubit frame. This operation is equivalent to applying the ``cr_tone`` with a negative amplitude. The Hamiltonian for this decomposition has no IX and ZI interactions, and also a reduced IY interaction to some extent (not completely eliminated) [1]. Note that the CR Hamiltonian tomography experiment cannot detect the ZI term. However, it is sensitive to the IX and IY terms. # section: reference .. ref_arxiv:: 1 2007.02925 """ num_pulses = 2 def _build_cr_circuit(self, pulse_gate: circuit.Gate) -> QuantumCircuit: """Single tone cross resonance. Args: pulse_gate: A pulse gate to represent a single cross resonance pulse. Returns: A circuit definition for the cross resonance pulse to measure. """ cr_circuit = QuantumCircuit(2) cr_circuit.append(pulse_gate, [0, 1]) cr_circuit.x(0) cr_circuit.rz(np.pi, 1) cr_circuit.append(pulse_gate, [0, 1]) cr_circuit.rz(-np.pi, 1) return cr_circuit
mriedem/qiskit-experiments
qiskit_experiments/test/utils.py
<gh_stars>1-10 # This code is part of Qiskit. # # (C) Copyright IBM 2021. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """Test utility functions.""" import uuid from typing import Optional, Union, Dict from datetime import datetime, timezone from qiskit.providers.job import JobV1 as Job from qiskit.providers.jobstatus import JobStatus from qiskit.providers.backend import BackendV1 as Backend from qiskit.providers import BaseBackend from qiskit.result import Result class FakeJob(Job): """Fake job.""" def __init__(self, backend: Union[Backend, BaseBackend], result: Optional[Result] = None): """Initialize FakeJob.""" if result: job_id = result.job_id else: job_id = uuid.uuid4().hex super().__init__(backend, job_id) self._result = result def result(self): """Return job result.""" return self._result def submit(self): """Submit the job to the backend for execution.""" pass @staticmethod def time_per_step() -> Dict[str, datetime]: """Return the completion time.""" return {"COMPLETED": datetime.now(timezone.utc)} def status(self) -> JobStatus: """Return the status of the job, among the values of ``JobStatus``.""" if self._result: return JobStatus.DONE return JobStatus.RUNNING
mriedem/qiskit-experiments
test/base.py
# This code is part of Qiskit. # # (C) Copyright IBM 2021. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """ Qiskit Experiments test case class """ import dataclasses import json import pickle import warnings from typing import Any, Callable, Optional import numpy as np import uncertainties from qiskit.test import QiskitTestCase from qiskit_experiments.data_processing import DataAction, DataProcessor from qiskit_experiments.database_service.db_experiment_data import ExperimentStatus from qiskit_experiments.framework import ( ExperimentDecoder, ExperimentEncoder, ExperimentData, BaseExperiment, BaseAnalysis, ) class QiskitExperimentsTestCase(QiskitTestCase): """Qiskit Experiments specific extra functionality for test cases.""" def assertExperimentDone( self, experiment_data: ExperimentData, timeout: float = 120, ): """Blocking execution of next line until all threads are completed then checks if status returns Done. Args: experiment_data: Experiment data to evaluate. timeout: The maximum time in seconds to wait for executor to complete. """ experiment_data.block_for_results(timeout=timeout) self.assertEqual( experiment_data.status(), ExperimentStatus.DONE, msg="All threads are executed but status is not DONE. " + experiment_data.errors(), ) def assertRoundTripSerializable(self, obj: Any, check_func: Optional[Callable] = None): """Assert that an object is round trip serializable. Args: obj: the object to be serialized. check_func: Optional, a custom function ``check_func(a, b) -> bool`` to check equality of the original object with the decoded object. If None the ``__eq__`` method of the original object will be used. """ try: encoded = json.dumps(obj, cls=ExperimentEncoder) except TypeError: self.fail("JSON serialization raised unexpectedly.") try: decoded = json.loads(encoded, cls=ExperimentDecoder) except TypeError: self.fail("JSON deserialization raised unexpectedly.") if check_func is None: self.assertEqual(obj, decoded) else: self.assertTrue(check_func(obj, decoded), msg=f"{obj} != {decoded}") def assertRoundTripPickle(self, obj: Any, check_func: Optional[Callable] = None): """Assert that an object is round trip serializable using pickle module. Args: obj: the object to be serialized. check_func: Optional, a custom function ``check_func(a, b) -> bool`` to check equality of the original object with the decoded object. If None the ``__eq__`` method of the original object will be used. """ try: encoded = pickle.dumps(obj) except TypeError: self.fail("pickle raised unexpectedly.") try: decoded = pickle.loads(encoded) except TypeError: self.fail("pickle deserialization raised unexpectedly.") if check_func is None: self.assertEqual(obj, decoded) else: self.assertTrue(check_func(obj, decoded), msg=f"{obj} != {decoded}") @classmethod def json_equiv(cls, data1, data2) -> bool: """Check if two experiments are equivalent by comparing their configs""" # pylint: disable = too-many-return-statements configrable_type = (BaseExperiment, BaseAnalysis) compare_repr = (DataAction, DataProcessor) list_type = (list, tuple, set) skipped = tuple() if isinstance(data1, skipped) and isinstance(data2, skipped): warnings.warn(f"Equivalence check for data {data1.__class__.__name__} is skipped.") return True elif isinstance(data1, configrable_type) and isinstance(data2, configrable_type): return cls.json_equiv(data1.config(), data2.config()) elif dataclasses.is_dataclass(data1) and dataclasses.is_dataclass(data2): # not using asdict. this copies all objects. return cls.json_equiv(data1.__dict__, data2.__dict__) elif isinstance(data1, dict) and isinstance(data2, dict): if set(data1) != set(data2): return False return all(cls.json_equiv(data1[k], data2[k]) for k in data1.keys()) elif isinstance(data1, np.ndarray) or isinstance(data2, np.ndarray): return np.allclose(data1, data2) elif isinstance(data1, list_type) and isinstance(data2, list_type): return all(cls.json_equiv(e1, e2) for e1, e2 in zip(data1, data2)) elif isinstance(data1, uncertainties.UFloat) and isinstance(data2, uncertainties.UFloat): return cls.ufloat_equiv(data1, data2) elif isinstance(data1, compare_repr) and isinstance(data2, compare_repr): # otherwise compare instance representation return repr(data1) == repr(data2) return data1 == data2 @staticmethod def ufloat_equiv(data1: uncertainties.UFloat, data2: uncertainties.UFloat) -> bool: """Check if two values with uncertainties are equal. No correlation is considered.""" return data1.n == data2.n and data1.s == data2.s @classmethod def analysis_result_equiv(cls, result1, result2): """Test two analysis results are equivalent""" # Check basic attributes skipping service which is not serializable for att in [ "name", "value", "extra", "device_components", "result_id", "experiment_id", "chisq", "quality", "verified", "tags", "auto_save", "source", ]: if not cls.json_equiv(getattr(result1, att), getattr(result2, att)): return False return True @classmethod def experiment_data_equiv(cls, data1, data2): """Check two experiment data containers are equivalent""" # Check basic attrbiutes # Skip non-compatible backend for att in [ "experiment_id", "experiment_type", "parent_id", "tags", "job_ids", "figure_names", "share_level", "metadata", ]: if not cls.json_equiv(getattr(data1, att), getattr(data2, att)): return False # Check length of data, results, child_data # check for child data attribute so this method still works for # DbExperimentData if hasattr(data1, "child_data"): child_data1 = data1.child_data() else: child_data1 = [] if hasattr(data2, "child_data"): child_data2 = data2.child_data() else: child_data2 = [] if ( len(data1.data()) != len(data2.data()) or len(data1.analysis_results()) != len(data2.analysis_results()) or len(child_data1) != len(child_data2) ): return False # Check data if not cls.json_equiv(data1.data(), data2.data()): return False # Check analysis resultsx for result1, result2 in zip(data1.analysis_results(), data2.analysis_results()): if not cls.analysis_result_equiv(result1, result2): return False # Check child data for child1, child2 in zip(child_data1, child_data2): if not cls.experiment_data_equiv(child1, child2): return False return True
mriedem/qiskit-experiments
qiskit_experiments/framework/composite/composite_experiment.py
<reponame>mriedem/qiskit-experiments # This code is part of Qiskit. # # (C) Copyright IBM 2021. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """ Composite Experiment abstract base class. """ from typing import List, Sequence, Optional, Union from abc import abstractmethod import warnings from qiskit.providers.backend import Backend from qiskit_experiments.framework import BaseExperiment, ExperimentData from qiskit_experiments.framework.base_analysis import BaseAnalysis from .composite_analysis import CompositeAnalysis class CompositeExperiment(BaseExperiment): """Composite Experiment base class""" def __init__( self, experiments: List[BaseExperiment], qubits: Sequence[int], backend: Optional[Backend] = None, experiment_type: Optional[str] = None, ): """Initialize the composite experiment object. Args: experiments: a list of experiment objects. qubits: list of physical qubits for the experiment. backend: Optional, the backend to run the experiment on. experiment_type: Optional, composite experiment subclass name. """ self._experiments = experiments self._num_experiments = len(experiments) analysis = CompositeAnalysis([exp.analysis for exp in self._experiments]) super().__init__( qubits, analysis=analysis, backend=backend, experiment_type=experiment_type, ) @abstractmethod def circuits(self): pass @property def num_experiments(self): """Return the number of sub experiments""" return self._num_experiments def component_experiment(self, index=None) -> Union[BaseExperiment, List[BaseExperiment]]: """Return the component Experiment object. Args: index (int): Experiment index, or ``None`` if all experiments are to be returned. Returns: BaseExperiment: The component experiment(s). """ if index is None: return self._experiments return self._experiments[index] def component_analysis(self, index=None) -> Union[BaseAnalysis, List[BaseAnalysis]]: """Return the component experiment Analysis object""" warnings.warn( "The `component_analysis` method is deprecated as of " "qiskit-experiments 0.3.0 and will be removed in the 0.4.0 release." " Use `analysis.component_analysis` instead.", DeprecationWarning, stacklevel=2, ) return self.analysis.component_analysis(index) def copy(self) -> "BaseExperiment": """Return a copy of the experiment""" ret = super().copy() # Recursively call copy of component experiments ret._experiments = [exp.copy() for exp in self._experiments] # Check if the analysis in CompositeAnalysis was a reference to the # original component experiment analyses and if so update the copies # to preserve this relationship if isinstance(self.analysis, CompositeAnalysis): for i, orig_exp in enumerate(self._experiments): if orig_exp.analysis is self.analysis._analyses[i]: # Update copies analysis with reference to experiment analysis ret.analysis._analyses[i] = ret._experiments[i].analysis return ret def _set_backend(self, backend): super()._set_backend(backend) for subexp in self._experiments: subexp._set_backend(backend) def _initialize_experiment_data(self): """Initialize the return data container for the experiment run""" experiment_data = ExperimentData(experiment=self) # Initialize child experiment data for sub_exp in self._experiments: sub_data = sub_exp._initialize_experiment_data() experiment_data.add_child_data(sub_data) experiment_data.metadata["component_child_index"] = list(range(self.num_experiments)) return experiment_data def _additional_metadata(self): """Add component experiment metadata""" return { "component_metadata": [sub_exp._metadata() for sub_exp in self.component_experiment()] } def _add_job_metadata(self, metadata, jobs, **run_options): super()._add_job_metadata(metadata, jobs, **run_options) # Add sub-experiment options for sub_metadata, sub_exp in zip( metadata["component_metadata"], self.component_experiment() ): # Run and transpile options are always overridden if sub_exp.run_options != sub_exp._default_run_options(): warnings.warn( "Sub-experiment run options" " are overridden by composite experiment options." ) sub_exp._add_job_metadata(sub_metadata, jobs, **run_options)
mriedem/qiskit-experiments
qiskit_experiments/curve_analysis/visualization/fit_result_plotters.py
<filename>qiskit_experiments/curve_analysis/visualization/fit_result_plotters.py # This code is part of Qiskit. # # (C) Copyright IBM 2021. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """ A collection of functions that draw formatted curve analysis results. For example, this visualization contains not only fit curves and raw data points, but also some extra fitting information, such as fit values of some interesting parameters and goodness of the fitting represented by chi-squared. These extra information can be also visualized as a fit report. Note that plotter is a class that only has a class method to draw the image. This is just like a function, but allows serialization via Enum. """ from collections import defaultdict from typing import List, Dict, Optional import uncertainties import numpy as np from matplotlib.ticker import FuncFormatter from qiskit.utils import detach_prefix from qiskit_experiments.curve_analysis.curve_data import SeriesDef, FitData, CurveData from qiskit_experiments.framework import AnalysisResultData from qiskit_experiments.framework.matplotlib import get_non_gui_ax from .curves import plot_scatter, plot_errorbar, plot_curve_fit from .style import PlotterStyle class MplDrawSingleCanvas: """A plotter to draw a single canvas figure for fit result.""" @classmethod def draw( cls, series_defs: List[SeriesDef], raw_samples: List[CurveData], fit_samples: List[CurveData], tick_labels: Dict[str, str], fit_data: FitData, result_entries: List[AnalysisResultData], style: Optional[PlotterStyle] = None, axis: Optional["matplotlib.axes.Axes"] = None, ) -> "pyplot.Figure": """Create a fit result of all curves in the single canvas. Args: series_defs: List of definition for each curve. raw_samples: List of raw sample data for each curve. fit_samples: List of formatted sample data for each curve. tick_labels: Dictionary of axis label information. Axis units and label for x and y value should be explained. fit_data: fit data generated by the analysis. result_entries: List of analysis result data entries. style: Optional. A configuration object to modify the appearance of the figure. axis: Optional. A matplotlib Axis object. Returns: A matplotlib figure of the curve fit result. """ if axis is None: axis = get_non_gui_ax() # update image size to experiment default figure = axis.get_figure() figure.set_size_inches(*style.figsize) else: figure = axis.get_figure() # draw all curves on the same canvas for series_def, raw_samp, fit_samp in zip(series_defs, raw_samples, fit_samples): draw_single_curve_mpl( axis=axis, series_def=series_def, raw_sample=raw_samp, fit_sample=fit_samp, fit_data=fit_data, style=style, ) # add legend if len(series_defs) > 1: axis.legend(loc=style.legend_loc) # get axis scaling factor for this_axis in ("x", "y"): sub_axis = getattr(axis, this_axis + "axis") unit = tick_labels[this_axis + "val_unit"] label = tick_labels[this_axis + "label"] if unit: maxv = np.max(np.abs(sub_axis.get_data_interval())) scaled_maxv, prefix = detach_prefix(maxv, decimal=3) prefactor = scaled_maxv / maxv # pylint: disable=cell-var-from-loop sub_axis.set_major_formatter(FuncFormatter(lambda x, p: f"{x * prefactor: .3g}")) sub_axis.set_label_text(f"{label} [{prefix}{unit}]", fontsize=style.axis_label_size) else: sub_axis.set_label_text(label, fontsize=style.axis_label_size) axis.ticklabel_format(axis=this_axis, style="sci", scilimits=(-3, 3)) if tick_labels["xlim"]: axis.set_xlim(tick_labels["xlim"]) if tick_labels["ylim"]: axis.set_ylim(tick_labels["ylim"]) # write analysis report if fit_data: report_str = write_fit_report(result_entries) report_str += r"Fit $\chi^2$ = " + f"{fit_data.reduced_chisq: .4g}" report_handler = axis.text( *style.fit_report_rpos, report_str, ha="center", va="top", size=style.fit_report_text_size, transform=axis.transAxes, ) bbox_props = dict(boxstyle="square, pad=0.3", fc="white", ec="black", lw=1, alpha=0.8) report_handler.set_bbox(bbox_props) axis.tick_params(labelsize=style.tick_label_size) axis.grid(True) return figure class MplDrawMultiCanvasVstack: """A plotter to draw a vertically stacked multi canvas figure for fit result.""" @classmethod def draw( cls, series_defs: List[SeriesDef], raw_samples: List[CurveData], fit_samples: List[CurveData], tick_labels: Dict[str, str], fit_data: FitData, result_entries: List[AnalysisResultData], style: Optional[PlotterStyle] = None, axis: Optional["matplotlib.axes.Axes"] = None, ) -> "pyplot.Figure": """Create a fit result of all curves in the single canvas. Args: series_defs: List of definition for each curve. raw_samples: List of raw sample data for each curve. fit_samples: List of formatted sample data for each curve. tick_labels: Dictionary of axis label information. Axis units and label for x and y value should be explained. fit_data: fit data generated by the analysis. result_entries: List of analysis result data entries. style: Optional. A configuration object to modify the appearance of the figure. axis: Optional. A matplotlib Axis object. Returns: A matplotlib figure of the curve fit result. """ if axis is None: axis = get_non_gui_ax() # update image size to experiment default figure = axis.get_figure() figure.set_size_inches(*style.figsize) else: figure = axis.get_figure() # get canvas number n_subplots = max(series_def.canvas for series_def in series_defs) + 1 # use inset axis. this allows us to draw multiple canvases on a given single axis object inset_ax_h = (1 - (0.05 * (n_subplots - 1))) / n_subplots inset_axes = [ axis.inset_axes( [0, 1 - (inset_ax_h + 0.05) * n_axis - inset_ax_h, 1, inset_ax_h], transform=axis.transAxes, zorder=1, ) for n_axis in range(n_subplots) ] # show x label only in the bottom canvas for inset_axis in inset_axes[:-1]: inset_axis.set_xticklabels([]) inset_axes[-1].get_shared_x_axes().join(*inset_axes) # remove original axis frames axis.spines.right.set_visible(False) axis.spines.left.set_visible(False) axis.spines.top.set_visible(False) axis.spines.bottom.set_visible(False) axis.set_xticks([]) axis.set_yticks([]) # collect data source per canvas plot_map = defaultdict(list) for curve_ind, series_def in enumerate(series_defs): plot_map[series_def.canvas].append(curve_ind) y_labels = tick_labels["ylabel"].split(",") if len(y_labels) == 1: y_labels = y_labels * n_subplots for ax_ind, curve_inds in plot_map.items(): inset_axis = inset_axes[ax_ind] for curve_ind in curve_inds: draw_single_curve_mpl( axis=inset_axis, series_def=series_defs[curve_ind], raw_sample=raw_samples[curve_ind], fit_sample=fit_samples[curve_ind], fit_data=fit_data, style=style, ) # add legend to each inset axis if len(curve_inds) > 1: inset_axis.legend(loc=style.legend_loc) # format y axis tick value of each inset axis yaxis = getattr(inset_axis, "yaxis") unit = tick_labels["yval_unit"] label = y_labels[ax_ind] if unit: maxv = np.max(np.abs(yaxis.get_data_interval())) scaled_maxv, prefix = detach_prefix(maxv, decimal=3) prefactor = scaled_maxv / maxv # pylint: disable=cell-var-from-loop yaxis.set_major_formatter(FuncFormatter(lambda x, p: f"{x * prefactor: .3g}")) yaxis.set_label_text(f"{label} [{prefix}{unit}]", fontsize=style.axis_label_size) else: inset_axis.ticklabel_format(axis="y", style="sci", scilimits=(-3, 3)) yaxis.set_label_text(label, fontsize=style.axis_label_size) if tick_labels["ylim"]: inset_axis.set_ylim(tick_labels["ylim"]) # format x axis xaxis = getattr(inset_axes[-1], "xaxis") unit = tick_labels["xval_unit"] label = tick_labels["xlabel"] if unit: maxv = np.max(np.abs(xaxis.get_data_interval())) scaled_maxv, prefix = detach_prefix(maxv, decimal=3) prefactor = scaled_maxv / maxv # pylint: disable=cell-var-from-loop xaxis.set_major_formatter(FuncFormatter(lambda x, p: f"{x * prefactor: .3g}")) xaxis.set_label_text(f"{label} [{prefix}{unit}]", fontsize=style.axis_label_size) else: axis.ticklabel_format(axis="x", style="sci", scilimits=(-3, 3)) xaxis.set_label_text(label, fontsize=style.axis_label_size) if tick_labels["xlim"]: inset_axes[-1].set_xlim(tick_labels["xlim"]) # write analysis report if fit_data: report_str = write_fit_report(result_entries) report_str += r"Fit $\chi^2$ = " + f"{fit_data.reduced_chisq: .4g}" report_handler = axis.text( *style.fit_report_rpos, report_str, ha="center", va="top", size=style.fit_report_text_size, transform=axis.transAxes, ) bbox_props = dict(boxstyle="square, pad=0.3", fc="white", ec="black", lw=1, alpha=0.8) report_handler.set_bbox(bbox_props) axis.tick_params(labelsize=style.tick_label_size) axis.grid(True) return figure def draw_single_curve_mpl( axis: "matplotlib.axes.Axes", series_def: SeriesDef, raw_sample: CurveData, fit_sample: CurveData, fit_data: FitData, style: PlotterStyle, ): """A function that draws a single curve on the given plotter canvas. Args: axis: Drawer canvas. series_def: Definition of the curve to draw. raw_sample: Raw sample data. fit_sample: Formatted sample data. fit_data: Fitting parameter collection. style: Style sheet for plotting. """ # plot raw data if data is formatted if not np.array_equal(raw_sample.y, fit_sample.y): plot_scatter(xdata=raw_sample.x, ydata=raw_sample.y, ax=axis, zorder=0) # plot formatted data if np.all(np.isnan(fit_sample.y_err)): sigma = None else: sigma = np.nan_to_num(fit_sample.y_err) plot_errorbar( xdata=fit_sample.x, ydata=fit_sample.y, sigma=sigma, ax=axis, label=series_def.name, marker=series_def.plot_symbol, color=series_def.plot_color, zorder=1, linestyle="", ) # plot fit curve if fit_data: plot_curve_fit( func=series_def.fit_func, result=fit_data, ax=axis, color=series_def.plot_color, zorder=2, fit_uncertainty=style.plot_sigma, ) def write_fit_report(result_entries: List[AnalysisResultData]) -> str: """A function that generates fit reports documentation from list of data. Args: result_entries: List of data entries. Returns: Documentation of fit reports. """ analysis_description = "" def format_val(float_val: float) -> str: if np.abs(float_val) < 1e-3 or np.abs(float_val) > 1e3: return f"{float_val: .4e}" return f"{float_val: .4g}" for res in result_entries: if isinstance(res.value, uncertainties.UFloat): fitval = res.value unit = res.extra.get("unit", None) if unit: # unit is defined. do detaching prefix, i.e. 1000 Hz -> 1 kHz try: val, val_prefix = detach_prefix(fitval.nominal_value, decimal=3) except ValueError: # Value is too small or too big val = fitval.nominal_value val_prefix = "" val_unit = val_prefix + unit value_repr = f"{val: .3g}" # write error bar if it is finite value if fitval.std_dev is not None and np.isfinite(fitval.std_dev): # with stderr try: err, err_prefix = detach_prefix(fitval.std_dev, decimal=3) except ValueError: # Value is too small or too big err = fitval.std_dev err_prefix = "" err_unit = err_prefix + unit if val_unit == err_unit: # same value scaling, same prefix value_repr += f" \u00B1 {err: .2f} {val_unit}" else: # different value scaling, different prefix value_repr += f" {val_unit} \u00B1 {err: .2f} {err_unit}" else: # without stderr, just append unit value_repr += f" {val_unit}" else: # unit is not defined. raw value formatting is performed. value_repr = format_val(fitval.nominal_value) if np.isfinite(fitval.std_dev): # with stderr value_repr += f" \u00B1 {format_val(fitval.std_dev)}" analysis_description += f"{res.name} = {value_repr}\n" return analysis_description
mriedem/qiskit-experiments
qiskit_experiments/test/t2hahn_backend.py
<reponame>mriedem/qiskit-experiments<gh_stars>1-10 # This code is part of Qiskit. # # (C) Copyright IBM 2022. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """ T2HahnBackend class. Temporary backend to be used for t2hahn experiment """ from typing import List import numpy as np from numpy import isclose from qiskit import QiskitError from qiskit.providers import BackendV1 from qiskit.providers.models import QasmBackendConfiguration from qiskit.result import Result from qiskit_experiments.framework import Options from qiskit_experiments.test.utils import FakeJob # Fix seed for simulations SEED = 9000 class T2HahnBackend(BackendV1): """ A simple and primitive backend, to be run by the T2Hahn tests """ def __init__( self, t2hahn=None, frequency=None, initialization_error=None, readout0to1=None, readout1to0=None, ): """ Initialize the T2Hahn backend """ configuration = QasmBackendConfiguration( backend_name="T2Hahn_simulator", backend_version="0", n_qubits=int(1e6), basis_gates=["barrier", "rx", "delay", "measure"], gates=[], local=True, simulator=True, conditional=False, open_pulse=False, memory=False, max_shots=int(1e6), coupling_map=None, ) self._t2hahn = t2hahn self._frequency = frequency self._initialization_error = initialization_error self._readout0to1 = readout0to1 self._readout1to0 = readout1to0 self._rng = np.random.default_rng(seed=SEED) super().__init__(configuration) @classmethod def _default_options(cls): """Default options of the test backend.""" return Options(shots=1024) def _qubit_initialization(self, nqubits: int) -> List[dict]: """ Initialize the list of qubits state. If initialization error is provided to the backend it will use it to determine the initialized state. Args: nqubits(int): the number of qubits in the circuit. Returns: List[dict]: A list of dictionary which each dictionary contain the qubit state in the format {"XY plane": (bool), "ZX plane": (bool), "Theta": float} Raises: QiskitError: Raised if initialization_error type isn't 'None'', 'float' or a list of 'float' with length of number of the qubits. ValueError: Raised if the initialization error is negative. """ qubits_sates = [{} for _ in range(nqubits)] # Making an array with the initialization error for each qubit. initialization_error = self._initialization_error if isinstance(initialization_error, float) or initialization_error is None: initialization_error_arr = [initialization_error for _ in range(nqubits)] elif isinstance(initialization_error, list): if len(initialization_error) == 1: initialization_error_arr = [initialization_error[0] for _ in range(nqubits)] elif len(initialization_error) == nqubits: initialization_error_arr = initialization_error else: raise QiskitError( f"The length of the list {initialization_error} isn't the same as the number " "of qubits." ) else: raise QiskitError("Initialization error type isn't a list or float") for err in initialization_error_arr: if not isinstance(err, float): raise QiskitError("Initialization error type isn't a list or float") if err < 0: raise ValueError("Initialization error value can't be negative.") for qubit in range(nqubits): if initialization_error_arr[qubit] is not None and ( self._rng.random() < initialization_error_arr[qubit] ): qubits_sates[qubit] = {"XY plane": False, "ZX plane": True, "Theta": np.pi} else: qubits_sates[qubit] = { "XY plane": False, "ZX plane": True, "Theta": 0, } return qubits_sates def _delay_gate(self, qubit_state: dict, delay: float, t2hahn: float, frequency: float) -> dict: """ Apply delay gate to the qubit. From the delay time we can calculate the probability that an error has accrued. Args: qubit_state(dict): The state of the qubit before operating the gate. delay(float): The time in which there are no operation on the qubit. t2hahn(float): The T2 parameter of the backhand for probability calculation. frequency(float): The frequency of the qubit for phase calculation. Returns: dict: The state of the qubit after operating the gate. Raises: QiskitError: Raised if the frequency is 'None' or if the qubit isn't in the XY plane. """ if frequency is None: raise QiskitError("Delay gate supported only if the qubit is on the XY plane.") new_qubit_state = qubit_state if qubit_state["XY plane"]: prob_noise = 1 - (np.exp(-delay / t2hahn)) if self._rng.random() < prob_noise: if self._rng.random() < 0.5: new_qubit_state = { "XY plane": False, "ZX plane": True, "Theta": 0, } else: new_qubit_state = { "XY plane": False, "ZX plane": True, "Theta": np.pi, } else: phase = frequency * delay new_theta = qubit_state["Theta"] + phase new_theta = new_theta % (2 * np.pi) new_qubit_state = {"XY plane": True, "ZX plane": False, "Theta": new_theta} else: if not isclose(qubit_state["Theta"], np.pi) and not isclose(qubit_state["Theta"], 0): raise QiskitError("Delay gate supported only if the qubit is on the XY plane.") return new_qubit_state def _rx_gate(self, qubit_state: dict, angle: float) -> dict: """ Apply Rx gate. Args: qubit_state(dict): The state of the qubit before operating the gate. angle(float): The angle of the rotation. Returns: dict: The state of the qubit after operating the gate. Raises: QiskitError: if angle is not ±π/2 or ±π. Those are the only supported angles. """ if qubit_state["XY plane"]: if isclose(angle, np.pi): new_theta = -qubit_state["Theta"] new_theta = new_theta % (2 * np.pi) new_qubit_state = { "XY plane": True, "ZX plane": False, "Theta": new_theta, } elif isclose(angle, np.pi / 2): new_theta = (np.pi / 2) - qubit_state["Theta"] new_theta = new_theta % (2 * np.pi) new_qubit_state = { "XY plane": False, "ZX plane": True, "Theta": new_theta, } elif isclose(angle, -np.pi / 2): new_theta = np.abs((-np.pi / 2) - qubit_state["Theta"]) new_theta = new_theta % (2 * np.pi) new_qubit_state = { "XY plane": False, "ZX plane": True, "Theta": new_theta, } else: raise QiskitError( f"Error - the angle {angle} isn't supported. We only support multiplications of pi/2" ) else: if isclose(angle, np.pi): new_theta = qubit_state["Theta"] + np.pi new_theta = new_theta % (2 * np.pi) new_qubit_state = { "XY plane": False, "ZX plane": True, "Theta": new_theta, } elif isclose(angle, np.pi / 2): new_theta = ( qubit_state["Theta"] + 3 * np.pi / 2 ) # its theta -pi/2 but we added 2*pi new_theta = new_theta % (2 * np.pi) new_qubit_state = { "XY plane": True, "ZX plane": False, "Theta": new_theta, } elif isclose(angle, -np.pi / 2): new_theta = np.pi / 2 - qubit_state["Theta"] new_theta = new_theta % (2 * np.pi) new_qubit_state = { "XY plane": True, "ZX plane": False, "Theta": new_theta, } else: raise QiskitError( f"Error - The angle {angle} isn't supported. We only support multiplication of pi/2" ) return new_qubit_state def _measurement_gate(self, qubit_state: dict) -> int: """ implementing measurement on qubit with read-out error. Args: qubit_state(dict): The state of the qubit at the end of the circuit. Returns: int: The result of the measurement after applying read-out error. """ # Here we are calculating the probability for measurement result depending on the # location of the qubit on the Bloch sphere. if qubit_state["XY plane"]: meas_res = self._rng.random() < 0.5 else: # Since we are not in the XY plane, we need to calculate the probability for # measuring output. First, we calculate the probability and later we are # tossing to see if the event did happen. z_projection = np.cos(qubit_state["Theta"]) probability = z_projection**2 if self._rng.random() > probability: meas_res = self._rng.random() < 0.5 else: meas_res = z_projection < 0 # Measurement error implementation if meas_res and self._readout1to0 is not None: if self._rng.random() < self._readout1to0[0]: meas_res = 0 elif not meas_res and self._readout0to1 is not None: if self._rng.random() < self._readout0to1[0]: meas_res = 1 return meas_res # pylint: disable = arguments-differ def run(self, run_input, **options): """ Run the T2Hahn backend """ self.options.update_options(**options) shots = self.options.get("shots") result = { "backend_name": "T2Hahn backend", "backend_version": "0", "qobj_id": "0", "job_id": "0", "success": True, "results": [], } for circ in run_input: nqubits = circ.num_qubits qubit_indices = {bit: idx for idx, bit in enumerate(circ.qubits)} clbit_indices = {bit: idx for idx, bit in enumerate(circ.clbits)} counts = dict() for _ in range(shots): qubit_state = self._qubit_initialization( nqubits=nqubits ) # for parallel need to make an array clbits = np.zeros(circ.num_clbits, dtype=int) for op, qargs, cargs in circ.data: qubit = qubit_indices[qargs[0]] # The noise will only be applied if we are in the XY plane. if op.name == "delay": delay = op.params[0] t2hahn = self._t2hahn[qubit] freq = self._frequency[qubit] qubit_state[qubit] = self._delay_gate( qubit_state=qubit_state[qubit], delay=delay, t2hahn=t2hahn, frequency=freq, ) elif op.name == "rx": qubit_state[qubit] = self._rx_gate(qubit_state[qubit], op.params[0]) elif op.name == "measure": meas_res = self._measurement_gate(qubit_state[qubit]) clbit = clbit_indices[cargs[0]] clbits[clbit] = meas_res clstr = "" for clbit in clbits[::-1]: clstr = clstr + str(clbit) if clstr in counts: counts[clstr] += 1 else: counts[clstr] = 1 result["results"].append( { "shots": shots, "success": True, "header": {"metadata": circ.metadata}, "data": {"counts": counts}, } ) return FakeJob(self, Result.from_dict(result))
mriedem/qiskit-experiments
qiskit_experiments/library/tomography/fitters/cvxpy_lstsq.py
<reponame>mriedem/qiskit-experiments<filename>qiskit_experiments/library/tomography/fitters/cvxpy_lstsq.py # This code is part of Qiskit. # # (C) Copyright IBM 2021. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """ Contrained convex least-squares tomography fitter. """ from typing import Optional, Dict, List, Tuple import numpy as np from qiskit_experiments.library.tomography.basis import ( BaseFitterMeasurementBasis, BaseFitterPreparationBasis, ) from . import cvxpy_utils from .cvxpy_utils import cvxpy from . import fitter_utils @cvxpy_utils.requires_cvxpy def cvxpy_linear_lstsq( outcome_data: List[np.ndarray], shot_data: np.ndarray, measurement_data: np.ndarray, preparation_data: np.ndarray, measurement_basis: Optional[BaseFitterMeasurementBasis] = None, preparation_basis: Optional[BaseFitterPreparationBasis] = None, psd: bool = True, trace_preserving: bool = False, trace: Optional[float] = None, weights: Optional[np.ndarray] = None, **kwargs, ) -> Tuple[np.ndarray, Dict]: r"""Constrained weighted linear least-squares tomography fitter. Overview This fitter reconstructs the maximum-likelihood estimate by using ``cvxpy`` to minimize the constrained least-squares negative log likelihood function .. math:: \hat{\rho} &= -\mbox{argmin }\log\mathcal{L}{\rho} \\ &= \mbox{argmin }\sum_i w_i^2(\mbox{Tr}[E_j\rho] - \hat{p}_i)^2 \\ &= \mbox{argmin }\|W(Ax - y) \|_2^2 subject to - *Positive-semidefinite* (``psd=True``): :math:`\rho \gg 0` is constrained to be a postive-semidefinite matrix. - *Trace* (``trace=t``): :math:`\mbox{Tr}(\rho) = t` is constained to have the specified trace. - *Trace preserving* (``trace_preserving=True``): When performing process tomography the Choi-state :math:`\rho` represents is contstained to be trace preserving. where - :math:`A` is the matrix of measurement operators :math:`A = \sum_i |i\rangle\!\langle\!\langle M_i|` - :math:`y` is the vector of expectation value data for each projector corresponding to estimates of :math:`b_i = Tr[M_i \cdot x]`. - :math:`x` is the vectorized density matrix (or Choi-matrix) to be fitted :math:`x = |\rho\rangle\\!\rangle`. .. note: Various solvers can be called in CVXPY using the `solver` keyword argument. When ``psd=True`` the optimization problem is a case of a *semidefinite program* (SDP) and requires a SDP compatible solver for CVXPY. CVXPY includes an SDP compatible solver `SCS`` but it is recommended to install the the open-source ``CVXOPT`` solver or one of the supported commercial solvers. See the `CVXPY documentation <https://www.cvxpy.org/tutorial/advanced/index.html#solve-method-options>`_ for more information on solvers. .. note:: Linear least-squares constructs the full basis matrix :math:`A` as a dense numpy array so should not be used for than 5 or 6 qubits. For larger number of qubits try the :func:`~qiskit_experiments.library.tomography.fitters.linear_inversion` fitter function. Args: outcome_data: list of outcome frequency data. shot_data: basis measurement total shot data. measurement_data: measurement basis indice data. preparation_data: preparation basis indice data. measurement_basis: measurement matrix basis. preparation_basis: Optional, preparation matrix basis. psd: If True rescale the eigenvalues of fitted matrix to be positive semidefinite (default: True) trace_preserving: Enforce the fitted matrix to be trace preserving when fitting a Choi-matrix in quantum process tomography (default: False). trace: trace constraint for the fitted matrix (default: None). weights: Optional array of weights for least squares objective. kwargs: kwargs for cvxpy solver. Raises: QiskitError: If CVXPy is not installed on the current system. AnalysisError: If analysis fails. Returns: The fitted matrix rho that maximizes the least-squares likelihood function. """ basis_matrix, probability_data = fitter_utils.lstsq_data( outcome_data, shot_data, measurement_data, preparation_data, measurement_basis=measurement_basis, preparation_basis=preparation_basis, ) if weights is not None: basis_matrix = weights[:, None] * basis_matrix probability_data = weights * probability_data # Since CVXPY only works with real variables we must specify the real # and imaginary parts of rho seperately: rho = rho_r + 1j * rho_i dim = int(np.sqrt(basis_matrix.shape[1])) rho_r, rho_i, cons = cvxpy_utils.complex_matrix_variable( dim, hermitian=True, psd=psd, trace=trace ) # Trace preserving constraint when fitting Choi-matrices for # quantum process tomography. Note that this adds an implicity # trace constraint of trace(rho) = sqrt(len(rho)) = dim # if a different trace constraint is specified above this will # cause the fitter to fail. if trace_preserving: cons += cvxpy_utils.trace_preserving_constaint(rho_r, rho_i) # OBJECTIVE FUNCTION # The function we wish to minimize is || arg ||_2 where # arg = bm * vec(rho) - data # Since we are working with real matrices in CVXPY we expand this as # bm * vec(rho) = (bm_r + 1j * bm_i) * vec(rho_r + 1j * rho_i) # = bm_r * vec(rho_r) - bm_i * vec(rho_i) # + 1j * (bm_r * vec(rho_i) + bm_i * vec(rho_r)) # = bm_r * vec(rho_r) - bm_i * vec(rho_i) # where we drop the imaginary part since the expectation value is real bm_r = np.real(basis_matrix) bm_i = np.imag(basis_matrix) arg = bm_r @ cvxpy.vec(rho_r) - bm_i @ cvxpy.vec(rho_i) - probability_data obj = cvxpy.Minimize(cvxpy.norm(arg, p=2)) prob = cvxpy.Problem(obj, cons) # Solve SDP cvxpy_utils.set_default_sdp_solver(kwargs) cvxpy_utils.solve_iteratively(prob, 5000, **kwargs) # Return optimal values and problem metadata rho_fit = rho_r.value + 1j * rho_i.value metadata = { "cvxpy_solver": prob.solver_stats.solver_name, "cvxpy_status": prob.status, } return rho_fit, metadata @cvxpy_utils.requires_cvxpy def cvxpy_gaussian_lstsq( outcome_data: List[np.ndarray], shot_data: np.ndarray, measurement_data: np.ndarray, preparation_data: np.ndarray, measurement_basis: Optional[BaseFitterMeasurementBasis] = None, preparation_basis: Optional[BaseFitterPreparationBasis] = None, psd: bool = True, trace_preserving: bool = False, trace: Optional[float] = None, **kwargs, ) -> Dict: r"""Constrained Gaussian linear least-squares tomography fitter. .. note:: This function calls :func:`cvxpy_linear_lstsq` with a Gaussian weights vector. Refer to its documentation for additional details. Overview This fitter reconstructs the maximum-likelihood estimate by using ``cvxpy`` to minimize the constrained least-squares negative log likelihood function .. math:: \hat{\rho} &= \mbox{argmin} (-\log\mathcal{L}{\rho}) \\ &= \mbox{argmin }\|W(Ax - y) \|_2^2 \\ -\log\mathcal{L}(\rho) &= |W(Ax -y) \|_2^2 \\ &= \sum_i \frac{1}{\sigma_i^2}(\mbox{Tr}[E_j\rho] - \hat{p}_i)^2 Additional Details The Gaussian weights are estimated from the observed frequency and shot data using .. math:: \sigma_i &= \sqrt{\frac{q_i(1 - q_i)}{n_i}} \\ q_i &= \frac{f_i + \beta}{n_i + K \beta} where :math:`q_i` are hedged probabilities which are rescaled to avoid 0 and 1 values using the "add-beta" rule, with :math:`\beta=0.5`, and :math:`K=2^m` the number of measurement outcomes for each basis measurement. Args: outcome_data: list of outcome frequency data. shot_data: basis measurement total shot data. measurement_data: measurement basis indice data. preparation_data: preparation basis indice data. measurement_basis: measurement matrix basis. preparation_basis: Optional, preparation matrix basis. psd: If True rescale the eigenvalues of fitted matrix to be positive semidefinite (default: True) trace_preserving: Enforce the fitted matrix to be trace preserving when fitting a Choi-matrix in quantum process tomography (default: False). trace: trace constraint for the fitted matrix (default: None). kwargs: kwargs for cvxpy solver. Raises: QiskitError: If CVXPY is not installed on the current system. AnalysisError: If analysis fails. Returns: The fitted matrix rho that maximizes the least-squares likelihood function. """ if measurement_basis is None: num_outcomes = None else: num_outcomes = [measurement_basis.num_outcomes(i) for i in measurement_data] weights = fitter_utils.binomial_weights(outcome_data, shot_data, num_outcomes, beta=0.5) return cvxpy_linear_lstsq( outcome_data, shot_data, measurement_data, preparation_data, measurement_basis, preparation_basis=preparation_basis, psd=psd, trace=trace, trace_preserving=trace_preserving, weights=weights, **kwargs, )
mriedem/qiskit-experiments
qiskit_experiments/library/tomography/basis/base_basis.py
<filename>qiskit_experiments/library/tomography/basis/base_basis.py # This code is part of Qiskit. # # (C) Copyright IBM 2021. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """ Fitter basis classes for tomography analysis. """ from abc import ABC, abstractmethod from typing import Iterable, Optional import numpy as np from qiskit import QuantumCircuit class BaseFitterMeasurementBasis(ABC): """Abstract base class for fitter measurement bases. This class defines the POVM element matrix for each measurement outcome of a basis index. It is used by tomography fitters during tomography analysis. """ def __init__(self, name: Optional[str] = None): """Initialize a fitter measurement basis. Args: name: Optional, name for the basis. If None the class name will be used. """ self._name = name if name else type(self).__name__ def __hash__(self): return hash((type(self), self._name)) def __eq__(self, value): tup1 = (type(self), self.name) tup2 = (type(value), getattr(value, "name", None)) return tup1 == tup2 @property def name(self) -> str: """Return the basis name""" return self._name @abstractmethod def __len__(self) -> int: """Return the number of indices for subsystems.""" @abstractmethod def num_outcomes(self, index: Iterable[int]) -> int: """Return the number of outcomes for basis index""" @abstractmethod def matrix(self, index: Iterable[int], outcome: int) -> np.ndarray: """Return the POVM matrix for the basis index and outcome. Args: index: a list of subsystem basis indices. outcome: the composite system count outcome. Returns: The POVM matrix for the bases and outcome. """ class BaseTomographyMeasurementBasis(BaseFitterMeasurementBasis): """Abstract base class for tomography experiment measurement bases. This class extends BaseFitterMeasurementBasis to include a circuit definition of each basis index. It can be used to construct tomography circuits for execution, as well as for tomography analysis of measurement data. """ @abstractmethod def circuit(self, index: Iterable[int]) -> QuantumCircuit: """Return a composite rotation circuit to measure in basis Args: index: a list of basis elements to tensor together. Returns: the rotation circuit for the specified basis """ class BaseFitterPreparationBasis(ABC): """Abstract base class for fitter preparation bases. This class defines the density matrix for each preparation basis index. It is used by tomography fitters during tomography analysis. """ def __init__(self, name: Optional[str] = None): """Initialize a fitter preparation basis. Args: name: Optional, name for the basis. If None the class name will be used. """ self._name = name if name else type(self).__name__ def __hash__(self): return hash((type(self), self._name)) def __eq__(self, value): tup1 = (type(self), self.name) tup2 = (type(value), getattr(value, "name", None)) return tup1 == tup2 @property def name(self) -> str: """Return the basis name""" return self._name @abstractmethod def __len__(self) -> int: """Return the number of indices for subsystems.""" @abstractmethod def matrix(self, index: Iterable[int]) -> np.ndarray: """Return the density matrix for the basis index and outcome. Args: index: a list of subsystem basis indices. Returns: The density matrix for the bases and outcome. """ class BaseTomographyPreparationBasis(BaseFitterPreparationBasis): """Abstract base class for tomography experiment basispreparation bases. This class extends BaseFitterPreparationBasis to include a circuit definition of each basis index. It can be used to construct tomography circuits for execution, as well as for tomography analysis of measurement data. """ @abstractmethod def circuit(self, index: Iterable[int]) -> QuantumCircuit: """Return the basis preparation circuit. Args: index: a list of basis elements to tensor together. Returns: the rotation circuit for the specified basis """
mriedem/qiskit-experiments
test/test_tphi.py
<filename>test/test_tphi.py # This code is part of Qiskit. # # (C) Copyright IBM 2021, 2022. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """ Test T2Ramsey experiment """ from test.base import QiskitExperimentsTestCase from qiskit_experiments.library import Tphi from qiskit_experiments.test.tphi_backend import TphiBackend from qiskit_experiments.library.characterization.analysis.t1_analysis import T1Analysis from qiskit_experiments.library.characterization.analysis.t2ramsey_analysis import T2RamseyAnalysis from qiskit_experiments.library.characterization.analysis.tphi_analysis import TphiAnalysis class TestTphi(QiskitExperimentsTestCase): """Test Tphi experiment""" __tolerance__ = 0.1 def test_tphi_end_to_end(self): """ Run a complete Tphi experiment on a fake Tphi backend """ delays_t1 = list(range(1, 40, 3)) delays_t2 = list(range(1, 51, 2)) exp = Tphi(qubit=0, delays_t1=delays_t1, delays_t2=delays_t2, osc_freq=0.1) t1 = 20 t2ramsey = 25 backend = TphiBackend(t1=t1, t2ramsey=t2ramsey, freq=0.1) analysis = TphiAnalysis([T1Analysis(), T2RamseyAnalysis()]) expdata = exp.run(backend=backend, analysis=analysis) self.assertExperimentDone(expdata) self.assertRoundTripSerializable(expdata, check_func=self.experiment_data_equiv) self.assertRoundTripPickle(expdata, check_func=self.experiment_data_equiv) result = expdata.analysis_results("T_phi") estimated_tphi = 1 / ((1 / t2ramsey) - (1 / (2 * t1))) self.assertAlmostEqual( result.value.nominal_value, estimated_tphi, delta=TestTphi.__tolerance__ * result.value.nominal_value, ) self.assertEqual(result.quality, "good", "Result quality bad") def test_tphi_with_changing_params(self): """ Run Tphi experiment, then set new delay values in set_experiment_options, and check that the new experiment has the correct delay values. """ delays_t1 = list(range(1, 40, 3)) delays_t2 = list(range(1, 50, 2)) exp = Tphi(qubit=0, delays_t1=delays_t1, delays_t2=delays_t2, osc_freq=0.1) t1 = 20 t2ramsey = 25 backend = TphiBackend(t1=t1, t2ramsey=t2ramsey, freq=0.1) analysis = TphiAnalysis([T1Analysis(), T2RamseyAnalysis()]) expdata = exp.run(backend=backend, analysis=analysis) self.assertExperimentDone(expdata) data_t1 = expdata.child_data(0).data() x_values_t1 = [datum["metadata"]["xval"] for datum in data_t1] data_t2 = expdata.child_data(1).data() x_values_t2 = [datum["metadata"]["xval"] for datum in data_t2] self.assertListEqual(x_values_t1, delays_t1, "Incorrect delays_t1") self.assertListEqual(x_values_t2, delays_t2, "Incorrect delays_t2") new_delays_t1 = list(range(1, 45, 3)) new_delays_t2 = list(range(1, 55, 2)) new_osc_freq = 0.2 exp.set_experiment_options( delays_t1=new_delays_t1, delays_t2=new_delays_t2, osc_freq=new_osc_freq ) expdata = exp.run(backend=backend, analysis=analysis) self.assertExperimentDone(expdata) data_t1 = expdata.child_data(0).data() x_values_t1 = [datum["metadata"]["xval"] for datum in data_t1] data_t2 = expdata.child_data(1).data() x_values_t2 = [datum["metadata"]["xval"] for datum in data_t2] self.assertListEqual(x_values_t1, new_delays_t1, "Option delays_t1 not set correctly") self.assertListEqual(x_values_t2, new_delays_t2, "Option delays_t2 not set correctly") new_freq_t2 = data_t2[0]["metadata"]["osc_freq"] self.assertEqual(new_freq_t2, new_osc_freq, "Option osc_freq not set correctly") def test_roundtrip_serializable(self): """Test round trip JSON serialization""" exp = Tphi(0, [1], [2], 3) self.assertRoundTripSerializable(exp, self.json_equiv) def test_analysis_config(self): """Test converting analysis to and from config works""" analysis = TphiAnalysis([T1Analysis(), T2RamseyAnalysis()]) loaded_analysis = analysis.from_config(analysis.config()) self.assertNotEqual(analysis, loaded_analysis) self.assertEqual(analysis.config(), loaded_analysis.config())
mriedem/qiskit-experiments
qiskit_experiments/framework/__init__.py
<reponame>mriedem/qiskit-experiments # This code is part of Qiskit. # # (C) Copyright IBM 2021. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """ ========================================================== Experiment Framework (:mod:`qiskit_experiments.framework`) ========================================================== .. currentmodule:: qiskit_experiments.framework .. note:: This page provides useful information for developers to implement new experiments. Overview ======== The experiment framework broadly defines an experiment as the execution of 1 or more circuits on a device, and analysis of the resulting measurement data to return 1 or more derived results. The interface for running an experiment is through the *Experiment* classes, such as those contained in the :mod:`qiskit_experiments.library` The following pseudo-code illustrates the typical workflow in Qiskit Experiments for - Initializing a new experiment - Running the experiment on a backend - Saving result to an online database (for compatible providers) - Viewing analysis results .. code-block:: python # Import an experiment from qiskit_experiments.library import SomeExperiment # Initialize with desired qubits and options exp = SomeExperiment(qubits, **options) # Run on a backend exp_data = exp.run(backend) # Wait for execution and analysis to finish exp_data.block_for_results() # Optionally save results to database exp_data.save() # View analysis results for result in exp_data.analysis_results(): print(result) The experiment class contains information for generating circuits and analysis of results. These can typically be configured with a variety of options. Once all options are set, you can call :meth:`BaseExperiment.run` method to run the experiment on a Qiskit compatible ``backend``. The steps of running an experiment involves generation experimental circuits according to the options you set and submission of a job to the specified ``backend``. Once the job has finished executing an analysis job performs data analysis of the experiment execution results. The result of running an experiment is an :class:`ExperimentData` container which contains the analysis results, any figures generated during analysis, and the raw measurement data. These can each be accessed using the :meth:`ExperimentData.analysis_results`, :meth:`ExperimentData.figure` and :meth:`ExperimentData.data` methods respectively. Analysis/plotting is done in a separate child thread, so it doesn't block the main thread. Since matplotlib doesn't support GUI mode in a child threads, the figures generated during analysis need to use a non-GUI canvas. The default is :class:`~matplotlib.backends.backend_svg.FigureCanvasSVG`, but you can change it to a different `non-interactive backend <https://matplotlib.org/stable/tutorials/introductory/usage.html#the-builtin-backends>`_ by setting the ``qiskit_experiments.framework.matplotlib.default_figure_canvas`` attribute. For example, you can set ``default_figure_canvas`` to :class:`~matplotlib.backends.backend_agg.FigureCanvasAgg` to use the ``AGG`` backend. For experiments run through a compatible provider such as the `IBMQ provider <https://github.com/Qiskit/qiskit-ibmq-provider>`_ the :class:`ExperimentData` object can be saved to an online experiment database by calling the :meth:`ExperimentData.save` method. This data can later be retrieved by its unique :attr:`~ExperimentData.experiment_id`* string using :meth:`ExperimentData.load`. Composite Experiments ===================== The experiment classes :class:`ParallelExperiment` and :class:`BatchExperiment` provide a way of combining separate component experiments for execution as a single composite experiment. - A :class:`ParallelExperiment` combines all the sub experiment circuits into circuits which run the component gates in parallel on the respective qubits. The marginalization of measurement data for analysis of each sub-experiment is handled automatically. To run as a parallel experiment each sub experiment must be defined on a independent subset of device qubits. - A :class:`BatchExperiment` combines the sub-experiment circuits into a single large job that runs all the circuits for each experiment in series. Filtering the batch result data for analysis for each sub-experiment is handled automatically. Creating Custom Experiments =========================== Qiskit experiments provides a framework for creating custom experiments which can be through Qiskit and stored in the online database when run through the IBMQ provider. You may use this framework to release your own module of experiments subject to the requirements of the Apache 2.0 license. Creating a custom experiment is done by subclassing the :class:`BaseExperiment` and :class:`BaseAnalysis` classes. - The *experiment* class generates the list of circuits to be executed on the backend and any corresponding metadata that is required for the analysis of measurement results. - The *analysis* class performs post-processing of the measurement results after execution. Analysis classes can be re-used between experiments so you can either use one of the included analysis classes if appropriate or implement your own. Experiment Subclasses ********************* To create an experiment subclass - Implement the abstract :meth:`BaseExperiment.circuits` method. This should return a list of ``QuantumCircuit`` objects defining the experiment payload. - Call the :meth:`BaseExperiment.__init__` method during the subclass constructor with a list of physical qubits. The length of this list must be equal to the number of qubits in each circuit and is used to map these circuits to this layout during execution. Arguments in the constructor can be overridden so that a subclass can be initialized with some experiment configuration. Optionally the following methods can also be overridden in the subclass to allow configuring various experiment and execution options - :meth:`BaseExperiment._default_experiment_options` to set default values for configurable option parameters for the experiment. - :meth:`BaseExperiment._default_transpile_options` to set custom default values for the ``qiskit.transpile`` used to transpile the generated circuits before execution. - :meth:`BaseExperiment._default_run_options` to set default backend options for running the transpiled circuits on a backend. - :meth:`BaseExperiment._default_analysis_options` to set default values for configurable options for the experiments analysis class. Note that these should generally be set by overriding the :class:`BaseAnalysis` method :meth:`BaseAnalysis._default_options` instead of this method except in the case where the experiment requires different defaults to the used analysis class. - :meth:`BaseExperiment._transpiled_circuits` to override the default transpilation of circuits before execution. - :meth:`BaseExperiment._additional_metadata` to add any experiment metadata to the result data. Analysis Subclasses ******************* To create an analysis subclass one only needs to implement the abstract :meth:`BaseAnalysis._run_analysis` method. This method takes a :class:`ExperimentData` container and kwarg analysis options. If any kwargs are used the :meth:`BaseAnalysis._default_options` method should be overriden to define default values for these options. The :meth:`BaseAnalysis._run_analysis` method should return a pair ``(results, figures)`` where ``results`` is a list of :class:`AnalysisResultData` and ``figures`` is a list of :class:`matplotlib.figure.Figure`. The :mod:`qiskit_experiments.data_processing` module contains classes for building data processor workflows to help with advanced analysis of experiment data. Classes ======= Experiment Data Classes *********************** .. autosummary:: :toctree: ../stubs/ ExperimentData ExperimentStatus JobStatus AnalysisStatus AnalysisResultData ExperimentConfig AnalysisConfig ExperimentEncoder ExperimentDecoder .. _composite-experiment: Composite Experiment Classes **************************** .. autosummary:: :toctree: ../stubs/ ParallelExperiment BatchExperiment CompositeAnalysis Base Classes ************ .. autosummary:: :toctree: ../stubs/ BaseExperiment BaseAnalysis .. _create-experiment: """ from qiskit.providers.options import Options from qiskit_experiments.database_service.db_analysis_result import DbAnalysisResultV1 from qiskit_experiments.database_service.db_experiment_data import ( ExperimentStatus, JobStatus, AnalysisStatus, ) from .base_analysis import BaseAnalysis from .base_experiment import BaseExperiment from .configs import ExperimentConfig, AnalysisConfig from .analysis_result_data import AnalysisResultData from .experiment_data import ExperimentData from .composite import ( ParallelExperiment, BatchExperiment, CompositeAnalysis, ) from .json import ExperimentEncoder, ExperimentDecoder
mriedem/qiskit-experiments
qiskit_experiments/calibration_management/control_channel_map.py
<reponame>mriedem/qiskit-experiments # This code is part of Qiskit. # # (C) Copyright IBM 2022. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """A qubit to control channel map.""" from typing import Any, Dict, List, Tuple from qiskit.pulse import ControlChannel class ControlChannelMap: """A class to help serialize control channel maps.""" def __init__(self, control_channel_map: Dict[Tuple[int, ...], List[ControlChannel]]): """Setup the control channel map. Args: control_channel_map: A configuration dictionary of any control channels. The keys are tuples of qubits and the values are a list of ControlChannels that correspond to the qubits in the keys. """ self._map = control_channel_map or {} @property def chan_map(self): """Return the qubits to control channel map.""" return self._map def config(self) -> Dict[str, Any]: """Return the settings used to initialize the mapping.""" return { "class": self.__class__.__name__, "map": [{"key": k, "value": [chan.index for chan in v]} for k, v in self._map.items()], } @classmethod def from_config(cls, config: Dict) -> "ControlChannelMap": """Deserialize the control channel map given the input dictionary""" ch_map = config["map"] return cls( {tuple(item["key"]): [ControlChannel(idx) for idx in item["value"]] for item in ch_map} ) def __json_encode__(self): """Convert to format that can be JSON serialized.""" return self.config() @classmethod def __json_decode__(cls, value: Dict[str, Any]) -> "ControlChannelMap": """Load from JSON compatible format.""" return cls.from_config(value)
mriedem/qiskit-experiments
qiskit_experiments/test/tphi_backend.py
<filename>qiskit_experiments/test/tphi_backend.py # This code is part of Qiskit. # # (C) Copyright IBM 2021, 2022. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """ TphiBackend class. Temporary backend to be used to test the Tphi experiment """ from qiskit.providers import BackendV1 from qiskit.providers.models import QasmBackendConfiguration from qiskit.result import Result from qiskit_experiments.framework import Options from qiskit_experiments.test.utils import FakeJob from qiskit_experiments.test.t1_backend import T1Backend from qiskit_experiments.test.t2ramsey_backend import T2RamseyBackend # Fix seed for simulations SEED = 9000 class TphiBackend(BackendV1): """ A simple and primitive backend, to be run by the Tphi tests """ def __init__( self, t1=None, t2ramsey=None, freq=None, initial_prob1=None, readout0to1=None, readout1to0=None, ): """ Initialize the Tphi backend """ self._configuration = QasmBackendConfiguration( backend_name="tphi_simulator", backend_version="0", n_qubits=int(1e6), basis_gates=["barrier", "h", "p", "delay", "measure", "x"], gates=[], local=True, simulator=True, conditional=False, open_pulse=False, memory=False, max_shots=int(1e6), coupling_map=None, ) self._t1 = [t1] self._t2ramsey = t2ramsey self._freq = freq self._initial_prob1 = initial_prob1 self._readout0to1 = readout0to1 self._readout1to0 = readout1to0 self._internal_backends = {} super().__init__(self._configuration) self._internal_backends["T1"] = T1Backend( self._t1, self._initial_prob1, self._readout0to1, self._readout1to0 ) if self._initial_prob1 is None: self._initial_prob_plus = None else: self._initial_prob_plus = 0.5 # temporary self._p0 = { "A": [0.5], "T2star": [t2ramsey], "f": [freq], "phi": [0.0], "B": [0.5], } self._internal_backends["T2*"] = T2RamseyBackend( self._p0, self._initial_prob_plus, self._readout0to1, self._readout1to0 ) def configuration(self): """Return the backend configuration. Returns: BackendConfiguration: the configuration for the backend. """ return self._configuration @classmethod def _default_options(cls): """Default options of the test backend.""" return Options() def run(self, run_input, **options): """ Run the Tphi backend """ self.options.update_options(**options) shots = 1000 t1_circuits = [] t2ramsey_circuits = [] for circ in run_input: if circ.metadata["composite_metadata"][0]["experiment_type"] == "T1": t1_circuits.append(circ) elif circ.metadata["composite_metadata"][0]["experiment_type"] == "T2Ramsey": t2ramsey_circuits.append(circ) else: raise ValueError("Illegal name for circuit in Tphi") job_t1 = self._internal_backends["T1"].run(run_input=t1_circuits, shots=shots) job_t2ramsey = self._internal_backends["T2*"].run(run_input=t2ramsey_circuits, shots=shots) final_results = job_t1.result().results + job_t2ramsey.result().results result_for_fake = Result( backend_name="Tphi backend", backend_version="0", qobj_id="0", job_id="0", success=True, results=final_results, status="JobStatus.DONE", ) return FakeJob(self, result_for_fake)
AsherManangan/beginner-projects
beginner-projects-solutions/pythagoreantriple.py
<filename>beginner-projects-solutions/pythagoreantriple.py a = int(input ("Enter side A: ")) b = int (input ("Enter side B: ")) c = int (input ("Enter the size of the hypotenuse: ")) C = c * c print (" \n") if (C == ((a*a)+(b*b))): print ("Pythagorean Triple") else: print ("Not a Pythagorean Triple")
AsherManangan/beginner-projects
beginner-projects-solutions/99Bottles.py
print("Lyrics of the Song 99 Bottles of Beer") bottlecount = int(99) confirm = int (1) i = bottlecount while(i>1): print(f"{bottlecount} bottles of beer on the wall, {bottlecount} bottles of beer. \n") bottlecount=bottlecount-1 i-=1 print(f"Take one down and pass it around, {bottlecount} bottles of beer in the wall \n\n") print(f"{bottlecount} bottle of beer on the wall, {bottlecount} bottle of beer. \n") print("Take one down and pass it around, no more bottle of beer in the wall \n\n") print("No more bottles of beer on the wall, no more bottle of beer. \n") print("Go to store and buy some more, 99 bottles of beer.")
AsherManangan/beginner-projects
beginner-projects-solutions/Fibonacci.py
<reponame>AsherManangan/beginner-projects upto = int(input("How many terms? ")) first = 0 second = 1 i = 2 print(f"Fibonacci Sequence up to {upto}th term: ") print(first) print(second) while (i<=upto): fib = first + second print(fib) first = second second = fib i = i+1
AsherManangan/beginner-projects
beginner-projects-solutions/ArmstrongNumber.py
<gh_stars>1-10 number = int (input ("Enter a number: ")) tempNumber = number sum = 0 while (number > 0): mod = int(number % 10) sum = int(sum + (mod*mod*mod)) number = int(number / 10) print (sum) if (tempNumber == sum): print(f"{tempNumber} is an Armstrong Number") else : print(f"{tempNumber} is not an Armstrong Number")
prefpkg21/DAMM-arm
scripts/opencr_control.py
<reponame>prefpkg21/DAMM-arm #!/usr/bin/env python import rospy import time from sensor_msgs.msg import Joy from std_msgs.msg import Float32MultiArray, MultiArrayDimension from damm_msgs.msg import Servos, Light move_strength0 = 300 move_strength1 = 100 move_strength_linear = 1.0 DXL0_ID = 1 DXL0_MIN = 730 DXL0_MAX = 3360 DXL1_ID = 2 DXL1_MIN = 727 DXL1_MAX = 1400 DXL2_ID = 3 DXL2_MIN = 730 DXL2_MAX = 3360 CUT_LEFT = 800 CUT_RIGHT = 3000 DXL0_HOME = 2040 DXL1_HOME = 2040 CUT_HOME = 1530 # Default setting BAUDRATE = 1000000 DEVICENAME = "/dev/ttyACM0".encode('utf-8') # Check which port is being used on your controller # ex) Windows: "COM1" Linux: "/dev/ttyUSB0" class MoveArm(): def __init__(self): #self.turret_aim_publisher = rospy.Publisher('/dynamixel_workbench/dynamixel_command', MX, queue_size=2) self.cmd_subscriber = rospy.Subscriber('/bluetooth_teleop/joy',Joy, self.handle_joystick, queue_size=2) self.goal_pub = rospy.Publisher('damm_goal', Servos, queue_size=1) self.light_pub = rospy.Publisher('damm_light', Light, queue_size=1) #self.servo_position_updater = rospy.Subscriber('/dynamixel_workbench/dynamixel_state', DynamixelStateList, self.update_current_position, queue_size=1) self.ctrl_c= False rospy.on_shutdown(self.shutdownhook) self.rate = rospy.Rate(30) self.prev_stamp = 0 self.dxl0_pos = 9999 self.dxl1_pos = 9999 self.dxl2_pos = 9999 self.prev_dxl0 = 2050 self.prev_dxl1 = 2040 self.prev_dxl2 = 1530 self.dxl1_goal = 2050 self.dxl2_goal = 2040 self.dxl3_goal = 1530 self.linsrvo_chng = 0 self.led1_state = False self.led2_state = False self.manual_mode = False self.stale_light = False # def publish_once_in_dyna_command(self, cmd): # while not self.ctrl_c: # connections = self.servo_position_updater.get_num_connections() # if connections > 0: # self.turret_aim_publisher.publish(cmd) # rospy.loginfo("Turret Pan Publisher") # break # else: # self.rate.sleep() def shutdownhook(self): self.ctrl_c = True def handle_joystick(self, data): # Simple check for PS4 driver to set initial value # for the right and left triggers # Read joystick message values dxl0_scale = data.axes[2] # right thumb horiz right = data.axes[4] - 1 #right trigger left = data.axes[3] - 1 #left trigger dxl1_scale = left - right if data.axes[0] != 0 or data.axes[1] != 0: self.manual_mode = True else: self.manual_mode = False dxl2_cut = data.axes[6] # normally 0 or 1/-1 pressed left/right d-pad if data.axes[5] > 0.2: self.linsrvo_chng = 1 #right thumb vertical elif data.axes[5] < -0.2: self.linsrvo_chng = -1 else: self.linsrvo_chng = 0 self.dxl1_goal = dxl0_scale self.dxl2_goal = dxl1_scale self.dxl3_goal = dxl2_cut def publish_goals(self): msg = Servos() msg.dxl1 = self.dxl1_goal msg.dxl2 = self.dxl2_goal msg.dxl3 = self.dxl3_goal msg.linear = self.linsrvo_chng self.goal_pub.publish(msg) def update_light(self): msg = Light() if self.manual_mode: # Yellow msg.led1 = True msg.led2 = True else: #Red msg.led1 = True msg.led2 = False self.light_pub(msg) def update_current_position(self, msg): self.dxl0_pos = msg.dynamixel_state[0].present_position #self.pan_vel = msg.dynamixel_state[0].present_velocity self.dxl1_pos = msg.dynamixel_state[1].present_position #self.tilt_vel = msg.dynamixel_state[1].present_velocity #rospy.loginfo("dxl0_pos: %s dxl1_pos: %s", self.dxl0_pos, self.dxl1_pos) if __name__ == "__main__": rospy.init_node('turret_aim', anonymous=True) # Pin Setup: # Board pin-numbering scheme #GPIO.setmode(GPIO.BOARD) # set pin as an output pin with optional initial state of HIGH #GPIO.setup(output_pin, GPIO.OUT, initial=GPIO.HIGH) arm_ctl = MoveArm() while not rospy.is_shutdown(): arm_ctl.publish_goals() arm_ctl.update_light() arm_ctl.rate.sleep()
prefpkg21/DAMM-arm
scripts/control_arm.py
<reponame>prefpkg21/DAMM-arm<filename>scripts/control_arm.py #!/usr/bin/env python import rospy import RPi.GPIO as GPIO import time from sensor_msgs.msg import Joy #from dynamixel_workbench_toolbox import dynamixel_workbench from dynamixel_workbench_msgs.srv import DynamixelCommand from dynamixel_workbench_msgs.msg import MXExt, MX, DynamixelStateList move_strength0 = 300 move_strength1 = 100 move_strength_linear = 1.0 DXL0_ID = 0 DXL0_MIN = 730 DXL0_MAX = 3360 DXL1_ID = 1 DXL1_MIN = 727 DXL1_MAX = 1400 DXL2_ID = 2 DXL2_MIN = 730 DXL2_MAX = 3360 CUT_LEFT = 800 CUT_RIGHT = 3000 DXL0_HOME = 2040 DXL1_HOME = 2040 CUT_HOME = 1530 # Data Byte Length LEN_GOAL_POSITION = 4 LEN_PRESENT_POSITION = 4 # Protocol version PROTOCOL_VERSION = 2 # See which protocol version is used in the Dynamixel position_goal = 'Goal_Position' # Default setting BAUDRATE = 1000000 DEVICENAME = "/dev/ttyACM0".encode('utf-8') # Check which port is being used on your controller # ex) Windows: "COM1" Linux: "/dev/ttyUSB0" output_pins = { 'JETSON_XAVIER': 18, 'JETSON_NANO': 33, 'JETSON_NX': 32, } output_pin = output_pins.get(GPIO.model, None) if output_pin is None: raise Exception('PWM not supported on this board') class MoveArm(): def __init__(self): #self.turret_aim_publisher = rospy.Publisher('/dynamixel_workbench/dynamixel_command', MX, queue_size=2) self.cmd_subscriber = rospy.Subscriber('/bluetooth_teleop/joy',Joy, self.handle_joystick, queue_size=2) self.servo_position_updater = rospy.Subscriber('/dynamixel_workbench/dynamixel_state', DynamixelStateList, self.update_current_position, queue_size=1) self.dyna_cmd_srvproxy = rospy.ServiceProxy('/dynamixel_workbench/dynamixel_command',DynamixelCommand) self.ctrl_c= False rospy.on_shutdown(self.shutdownhook) self.rate = rospy.Rate(1) self.dxl0_pos = 9999 self.dxl1_pos = 9999 self.dxl2_pos = 9999 self.prev_dxl0 = 2050 self.prev_dxl1 = 2040 self.prev_dxl2 = 1530 self.dyna_cmd_srvproxy('',0,position_goal, DXL0_HOME) self.dyna_cmd_srvproxy('',1,position_goal, DXL1_HOME) self.dyna_cmd_srvproxy('',2,position_goal, CUT_HOME) rospy.wait_for_service('/dynamixel_workbench/dynamixel_command') #PWM Setup self.linsrvo_val = 50 self.p = GPIO.PWM(output_pin, 50) self.p.start(self.linsrvo_val) # def publish_once_in_dyna_command(self, cmd): # while not self.ctrl_c: # connections = self.servo_position_updater.get_num_connections() # if connections > 0: # self.turret_aim_publisher.publish(cmd) # rospy.loginfo("Turret Pan Publisher") # break # else: # self.rate.sleep() def shutdownhook(self): self.p.stop() GPIO.cleanup() self.ctrl_c = True def handle_joystick(self, data): #check to see if current position read by callback if self.dxl0_pos == 9999 or self.dxl1_pos == 9999: return # Simple check for PS4 driver to set initial value # for the right and left triggers if data.axes[2] == 0 and data.axes[4] == 0: return # Read joystick message values dxl0_scale = data.axes[2] # right thumb horiz right = data.axes[4] - 1 #right trigger left = data.axes[3] - 1 #left trigger dxl1_scale = left - right dxl2_cut = data.axes[6] # normally 0 or 1/-1 pressed left/right d-pad linsrvo_chng = data.axes[5] #right thumb vertical val = self.linsrvo_val + (move_strength_linear * linsrvo_chng) # PWM on JetsonNX 0 to 100 percent if val > 100: val = 100 elif val < 0: val = 0 else: self.linsrvo_val = val #rospy.loginfo(val) #change linear PWM signal self.p.ChangeDutyCycle(self.linsrvo_val) #set the new position within the Max/Min boundaries new_dxl0 = max(min((self.dxl0_pos + (dxl0_scale * move_strength0)), DXL0_MAX),DXL0_MIN) new_dxl1 = max(min(self.dxl1_pos + (dxl1_scale * move_strength1), DXL1_MAX), DXL1_MIN) dxl0_diff = abs(self.dxl0_pos - new_dxl0) dxl1_diff = abs(self.dxl1_pos - new_dxl1) if dxl0_diff >= 10: self.dyna_cmd_srvproxy.call('',DXL0_ID, position_goal, new_dxl0) if dxl1_diff >= 10: self.dyna_cmd_srvproxy.call('',DXL1_ID, position_goal, new_dxl1) if dxl2_cut == 1: #left self.dyna_cmd_srvproxy.call('',DXL2_ID, position_goal, CUT_LEFT) elif dxl2_cut == -1: #right self.dyna_cmd_srvproxy.call('',DXL2_ID, position_goal, CUT_RIGHT) else: self.dyna_cmd_srvproxy.call('',DXL2_ID, position_goal, CUT_HOME) #rospy.loginfo("Cut Home") def update_current_position(self, msg): self.dxl0_pos = msg.dynamixel_state[0].present_position #self.pan_vel = msg.dynamixel_state[0].present_velocity self.dxl1_pos = msg.dynamixel_state[1].present_position #self.tilt_vel = msg.dynamixel_state[1].present_velocity #rospy.loginfo("dxl0_pos: %s dxl1_pos: %s", self.dxl0_pos, self.dxl1_pos) if __name__ == "__main__": rospy.init_node('turret_aim', anonymous=True) # Pin Setup: # Board pin-numbering scheme GPIO.setmode(GPIO.BOARD) # set pin as an output pin with optional initial state of HIGH GPIO.setup(output_pin, GPIO.OUT, initial=GPIO.HIGH) turret = MoveArm() while not rospy.is_shutdown(): rospy.spin()
by09115/Flask-URLshortener
Tests/get_url_test.py
<reponame>by09115/Flask-URLshortener<filename>Tests/get_url_test.py from Tests import TestCaseBase, check_status_code class GetUrlTest(TestCaseBase): def setUp(self): super(GetUrlTest, self).setUp() self.short_url = self.save_url_request() @check_status_code(302) def test_success_get_url(self): rv = self.get_url_request('b') self.assertEqual(rv.handlers['location'], 'http://blog.jaehoon.kim') return rv @check_status_code(204) def test_wrong_url(self): return self.get_url_request('Pizza')
by09115/Flask-URLshortener
Tests/save_url_test.py
from Tests import TestCaseBase, check_status_code class SaveUrlTest(TestCaseBase): @check_status_code(201) def test_success_save_url(self): rv = self.save_url_request() self.assertEqual(rv.json['url'], 'http://lovalhost/b') return rv @check_status_code(201) def test_exist_url(self): rv = self.save_url_request() url = rv.json['output_url'] self.assertEqual(rv.status_code, 201) rv2 = self.save_url_request() self.assertEqual(rv.json['output_url'], url) return rv2
by09115/Flask-URLshortener
Tests/__init__.py
<filename>Tests/__init__.py from functools import wraps from unittest import TestCase from Server.app import create_app from mongoengine import connect from mongoengine.connection import _get_db class TestCaseBase(TestCase): def setUp(self): self.app = create_app() self.client = self.app.test_client() self.mongodb = self.app.config['MONGODB_URI'] connect(self.mongodb) def tearDown(self): db = _get_db db.connection.drop_database(self.mongodb) def save_url_request(self, url = 'http://blog.jaehoon.kim'): rv = self.client.post('/', json={'output_url': url}) return rv def get_url_request(self, url): rv = self.client.get('/' + url) return rv def check_status_code(status_code): def decorator(fn): @wraps(fn) def wrapper(self, *args, **kwargs): rv = fn(self, *args, **kwargs) self.assertEqual(rv.status_code, status_code) return wrapper return decorator
by09115/Flask-URLshortener
Server/view.py
<reponame>by09115/Flask-URLshortener<gh_stars>1-10 from flask import request, redirect, Response, current_app from flask_restful import Resource from Server.model import URLModel from base62 import encode, decode class URLHandling(Resource): def get(self, input_url): data = URLModel.objects(link=encode(input_url)).first() destination = 'http://' + current_app.config['SERVER_NAME'] + '/' + data.link redirect(destination) def post(self): # json 으로 줄이려는 원본 url 받아오기 input_url = request.json['input_url'] # DB 에 input_url 저장 URLModel(link=input_url) # url 생성 과정 if current_app.config['SERVER_NAME']: host = current_app.config['SERVER_NAME'] else: host = 'localhost' return {'output_url': 'http://{}/{}'.format(host, decode(input_url))}
by09115/Flask-URLshortener
Server/model.py
from mongoengine import * from datetime import datetime class URLModel(Document): id = IntField(primary_key=True) link = StringField(required=True) generated_date = DateTimeField(default=datetime.utcnow)
by09115/Flask-URLshortener
Server/config.py
class Config: JSON_AS_ASCII = False MONGODB_URI = 'mongodb://altasAdmin:<EMAIL>/test?retryWrites=true'
by09115/Flask-URLshortener
Server/app.py
from flask import Flask, current_app from flask_restful import Api from mongoengine import connect from Server.config import Config def create_app(): app = Flask(__name__) api = Api(app) app.config.from_object(Config) connect(current_app.config['MONGODB_URI']) from Server.view import URLHandling api.add_resource(URLHandling, '/<input_url>') return app
codedsk/hubzero-tool-spiro-flask-1
www/app/views.py
from __future__ import print_function import numpy as np import sys from app import app from flask import render_template, jsonify from flask_wtf import FlaskForm from wtforms import IntegerField from wtforms.validators import DataRequired, NumberRange def debug(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def spiro(n1,n2,n3): t = np.linspace(0,1,1000); z = np.exp(1j*2*np.pi*n1*t) + np.exp(1j*2*np.pi*n2*t) + np.exp(1j*2*np.pi*n3*t) return (np.real(z),np.imag(z)) class SpiroForm(FlaskForm): n1 = IntegerField('n1', default=13, validators=[DataRequired(), NumberRange(min=-20, max=20)]) n2 = IntegerField('n2', default=-7, validators=[DataRequired(), NumberRange(min=-20, max=20)]) n3 = IntegerField('n3', default=-3, validators=[DataRequired(), NumberRange(min=-20, max=20)]) @app.route('/', methods=['GET','POST']) def index(): debug("received request for /") form = SpiroForm() if form.validate_on_submit(): debug("running spiro with n1={} n2={} n3={}" .format(form.n1.data,form.n2.data,form.n3.data)) (x,y) = spiro(form.n1.data,form.n2.data,form.n3.data) debug("returning spirograph data") return jsonify(x=x.tolist(), y=y.tolist()) if len(form.errors) > 0: debug("processing errors: ".format(form.errors)) # handle GET and everything else debug("rendering index.html") return render_template("index.html", form=form)
codedsk/hubzero-tool-spiro-flask-1
www/app/__init__.py
<reponame>codedsk/hubzero-tool-spiro-flask-1 from flask import Flask # Initialize the app app = Flask(__name__, instance_relative_config=True) # Load the views from app import views # Load the config file app.config.from_object('config') # set a secret key for sessions app.secret_key = '<KEY>'
724432436/Login_SYS
login/forms.py
<gh_stars>0 from django import forms from captcha.fields import CaptchaField class UserForm(forms.Form): username = forms.CharField(label='用户名',max_length=128,widget=forms.TextInput(attrs={'class': 'form-control'})) password = forms.CharField(label='密码',max_length=256,widget=forms.PasswordInput(attrs={'class': 'form-control'})) captcha = CaptchaField(label='验证码') # from django import forms # from . import models # # class UserForm(forms.ModelForm): # class Meta: # model = models.User # fields = ['name', 'password'] # # def __init__(self, *args, **kwargs): # super(UserForm, self).__init__(*args, *kwargs) # self.fields['name'].label = '用户名' # self.fields['password'].label = '密码' class RegisterForm(forms.Form): gender = ( ('male', "男"), ('female', "女"), ) username = forms.CharField(label="用户名", max_length=128, widget=forms.TextInput(attrs={'class': 'form-control'})) password1 = forms.CharField(label="密码", max_length=256, widget=forms.PasswordInput(attrs={'class': 'form-control'})) password2 = forms.CharField(label="确认密码", max_length=256, widget=forms.PasswordInput(attrs={'class': 'form-control'})) email = forms.EmailField(label="邮箱地址", widget=forms.EmailInput(attrs={'class': 'form-control'})) sex = forms.ChoiceField(label='性别', choices=gender) captcha = CaptchaField(label='验证码')
koukyo1994/streamlit-audio
components/augmentation.py
import librosa import numpy as np import streamlit as st class AudioTransform: def __init__(self, always_apply=False, p=0.5): self.always_apply = always_apply self.p = p def __call__(self, y: np.ndarray): if self.always_apply: return self.apply(y) else: if np.random.rand() < self.p: return self.apply(y) else: return y def apply(self, y: np.ndarray): raise NotImplementedError class NoiseInjection(AudioTransform): def __init__(self, always_apply=False, p=0.5, max_noise_level=0.5, sr=32000): super().__init__(always_apply, p) self.noise_level = (0.0, max_noise_level) self.sr = sr def apply(self, y: np.ndarray, **params): noise_level = np.random.uniform(*self.noise_level) noise = np.random.randn(len(y)) augmented = (y + noise * noise_level).astype(y.dtype) return augmented class PitchShift(AudioTransform): def __init__(self, always_apply=False, p=0.5, max_range=5, sr=32000): super().__init__(always_apply, p) self.max_range = max_range self.sr = sr def apply(self, y: np.ndarray, **params): n_steps = np.random.randint(-self.max_range, self.max_range) augmented = librosa.effects.pitch_shift(y, self.sr, n_steps) return augmented class TimeStretch(AudioTransform): def __init__(self, always_apply=False, p=0.5, max_rate=1, sr=32000): super().__init__(always_apply, p) self.max_rate = max_rate self.sr = sr def apply(self, y: np.ndarray, **params): rate = np.random.uniform(0, self.max_rate) augmented = librosa.effects.time_stretch(y, rate) return augmented def _db2float(db: float, amplitude=True): if amplitude: return 10**(db / 20) else: return 10 ** (db / 10) def volume_down(y: np.ndarray, db: float): """ Low level API for decreasing the volume Parameters ---------- y: numpy.ndarray stereo / monaural input audio db: float how much decibel to decrease Returns ------- applied: numpy.ndarray audio with decreased volume """ applied = y * _db2float(-db) return applied def volume_up(y: np.ndarray, db: float): """ Low level API for increasing the volume Parameters ---------- y: numpy.ndarray stereo / monaural input audio db: float how much decibel to increase Returns ------- applied: numpy.ndarray audio with increased volume """ applied = y * _db2float(db) return applied class RandomVolume(AudioTransform): def __init__(self, always_apply=False, p=0.5, limit=10): super().__init__(always_apply, p) self.limit = limit def apply(self, y: np.ndarray, **params): db = np.random.uniform(-self.limit, self.limit) if db >= 0: return volume_up(y, db) else: return volume_down(y, db) def augmentations_on_wave(y: np.ndarray, sr: int): st.sidebar.markdown("#### Augmentations option") options = st.sidebar.multiselect( "augmentation to apply", options=["noise", "pitch", "stretch", "volume"]) compose = [] if "noise" in options: st.sidebar.markdown("NoiseInjection") max_noise_level = st.sidebar.number_input( "max noise level", min_value=0.01, max_value=0.5, value=0.1, step=0.01) always_apply = st.sidebar.checkbox( "always_apply", key="noise_always_apply") p = st.sidebar.slider( "p", min_value=0.0, max_value=1.0, value=0.5, step=0.01, key="noise_p") compose.append( NoiseInjection( always_apply=always_apply, p=p, max_noise_level=max_noise_level, sr=sr)) if "pitch" in options: st.sidebar.markdown("PitchShift") max_range = st.sidebar.number_input( "max range", min_value=1, max_value=10, value=5, step=1) always_apply = st.sidebar.checkbox( "always_apply", key="pitch_always_apply") p = st.sidebar.slider( "p", min_value=0.0, max_value=1.0, value=0.5, step=0.01, key="pitch_p") compose.append( PitchShift( # type: ignore always_apply=always_apply, p=p, max_range=max_range, sr=sr)) if "stretch" in options: st.sidebar.markdown("Stretch") max_rate = st.sidebar.number_input( "max_rate", min_value=0.01, max_value=2.5, value=1.0, step=0.01) always_apply = st.sidebar.checkbox( "always_apply", key="stretch_always_apply") p = st.sidebar.slider( "p", min_value=0.0, max_value=1.0, value=0.5, step=0.01, key="stretch_p") compose.append( TimeStretch( # type: ignore always_apply=always_apply, p=p, max_rate=max_rate, sr=sr)) if "volume" in options: st.sidebar.markdown("Volume") limit = st.sidebar.number_input( "db limit", min_value=1, max_value=20, value=3, step=1) always_apply = st.sidebar.checkbox( "always_apply", key="volume_always_apply") p = st.sidebar.slider( "p", min_value=0.0, max_value=1.0, value=0.5, step=0.01, key="volume_p") compose.append( RandomVolume( # type: ignore always_apply=always_apply, p=p, limit=limit)) return apply(y, compose) @st.cache def apply(y: np.ndarray, compose: list): y_processed = y.copy() for augmentor in compose: y_processed = augmentor(y=y_processed) return y_processed
koukyo1994/streamlit-audio
utils/__init__.py
<gh_stars>1-10 from .io import check_folder, check_audio_info, display_media_audio, read_audio, display_media_audio_from_ndarray, read_csv
koukyo1994/streamlit-audio
components/preprocessing.py
<filename>components/preprocessing.py import nussl import numpy as np import streamlit as st import pyroomacoustics as pra from scipy import signal def butterworth_filter(y: np.ndarray, sr: int, N: int, cutoff=500., btype="lowpass"): b, a = signal.butter(N, cutoff / (sr / 2.), btype=btype) y_filtered = signal.filtfilt(b, a, y) return y_filtered def preprocess_on_wave(y: np.ndarray, sr: int, audio_path: str): st.sidebar.markdown("#### Preprocess option") option = st.sidebar.selectbox( "process", options=["-", "normalize", "lowpass", "highpass", "bandpass", "denoise", "nussl"]) if option == "lowpass": param_N = st.sidebar.number_input( "N", min_value=1, max_value=10, value=4, step=1) param_cutoff = st.sidebar.number_input( "cutoff", min_value=20.0, max_value=4000.0, value=500.0, step=10.0) filtered = butterworth_filter( y, sr=sr, N=param_N, cutoff=param_cutoff, btype="lowpass") return np.asfortranarray(filtered) elif option == "highpass": param_N = st.sidebar.number_input( "N", min_value=1, max_value=10, value=4, step=1) param_cutoff = st.sidebar.number_input( "cutoff", min_value=500.0, max_value=16000.0, value=1000.0, step=10.0) filtered = butterworth_filter( y, sr=sr, N=param_N, cutoff=param_cutoff, btype="highpass") return np.asfortranarray(filtered) elif option == "bandpass": param_N = st.sidebar.number_input( "N", min_value=1, max_value=10, value=4, step=1) upper_limit = st.sidebar.number_input( "upper_limit", min_value=0.0, max_value=16000.0, value=16000.0, step=10.0) lower_limit = st.sidebar.number_input( "lower_limit", min_value=0.0, max_value=16000.0, value=20.0, step=10.0) lowpassed = butterworth_filter( y, sr=sr, N=param_N, cutoff=upper_limit, btype="lowpass") bandpassed = butterworth_filter( lowpassed, sr=sr, N=param_N, cutoff=lower_limit, btype="highpass") return np.asfortranarray(bandpassed) elif option == "normalize": max_vol = np.abs(y).max() y_vol = y * 1 / (max_vol) return np.asfortranarray(y_vol) elif option == "denoise": frame_len = st.sidebar.number_input( "frame_len", min_value=1, max_value=8192, value=512, step=32) lpc_order = st.sidebar.number_input( "lpc_order", min_value=1, max_value=100, value=20, step=1) iterations = st.sidebar.number_input( "iterations", min_value=1, max_value=100, value=2, step=1) alpha = st.sidebar.number_input( "alpha", min_value=0.1, max_value=10.0, value=0.8, step=0.1) thresh = st.sidebar.number_input( "thresh", min_value=0.01, value=0.01, step=0.01, max_value=10.0) denoised = pra.denoise.apply_iterative_wiener( y, frame_len, lpc_order, iterations, alpha, thresh) return denoised elif option == "nussl": history = nussl.AudioSignal(audio_path) method = st.sidebar.selectbox( "Denoise method", options=[ "Repet", "ICA", "FT2D", "REPETSIM", "TimbreClustering", "HPSS", "DUET", "PROJET" ]) if method == "Repet": separator = nussl.separation.primitive.Repet(history) elif method == "ICA": separator = nussl.separation.factorization.ICA(history) elif method == "FT2D": separator = nussl.separation.primitive.FT2D(history) elif method == "REMETSIM": separator = nussl.separation.primitive.REMETSIM(history) elif method == "TimbreClustering": separator = nussl.separation.primitive.TimbreClustering(history, num_sources=2, n_components=50) elif method == "HPSS": separator = nussl.separation.primitive.HPSS(history) elif method == "DUET": separator = nussl.separation.spatial.Duet(history, num_sources=2) elif method == "PROJET": separator = nussl.separation.spatial.Projet(history, num_sources=2) estimates = separator() foreground = estimates[1].audio_data[0] return foreground else: return None
koukyo1994/streamlit-audio
main.py
import pandas as pd import streamlit as st import components as C import utils if __name__ == "__main__": st.title("Audio Checking Tool") base_folder = st.text_input("specify directory which contains audio file") tp = pd.read_csv("../input/train_tp.csv") fp = pd.read_csv("../input/train_fp.csv") st.dataframe(tp) path = utils.check_folder(base_folder) if path is not None: audio_files = sorted([ f.name for f in (list(path.glob("*.wav")) + list(path.glob("*.mp3")) + list(path.glob("*.flac"))) ]) audio_file_name = st.selectbox( "Choose audio file", options=audio_files) audio_id = audio_file_name.replace(".flac", "") tp_in_audio = tp.query(f"recording_id == '{audio_id}'").reset_index() fp_in_audio = fp.query(f"recording_id == '{audio_id}'").reset_index() st.text("tp") st.dataframe(tp_in_audio) st.text("fp") st.dataframe(fp_in_audio) audio_path = path / audio_file_name audio_info = utils.check_audio_info(audio_path) C.write_audio_info_to_sidebar(audio_path, audio_info) second = C.set_start_second(max_value=audio_info["duration"]) sr = C.set_sampling_rate(audio_info["sample_rate"]) options = st.sidebar.selectbox( "Audio option", options=["normal", "preprocessing", "augmentations"]) utils.display_media_audio(audio_path, second) annotation = st.sidebar.file_uploader( "Upload annotation file if exist") if annotation is not None: event_level_annotation = utils.read_csv(annotation) else: event_level_annotation = None y = utils.read_audio(audio_path, audio_info, sr=sr) if options == "preprocessing": y_processed = C.preprocess_on_wave( y, sr=sr, audio_path=str(audio_path)) if y_processed is not None: st.text("Processed audio") utils.display_media_audio_from_ndarray(y_processed, sr) if event_level_annotation is None: C.waveplot(y, sr, y_processed) C.specshow(y, sr, y_processed) else: C.waveplot_with_annotation(y, sr, event_level_annotation, audio_file_name, y_processed) C.specshow_with_annotation(y, sr, event_level_annotation, audio_file_name, y_processed) elif options == "augmentations": y_processed = C.augmentations_on_wave( y, sr=sr) if y_processed is not None: st.text("Processed audio") utils.display_media_audio_from_ndarray(y_processed, sr) if event_level_annotation is None: C.waveplot(y, sr, y_processed) C.specshow(y, sr, y_processed) else: C.waveplot_with_annotation(y, sr, event_level_annotation, audio_file_name, y_processed) C.specshow_with_annotation(y, sr, event_level_annotation, audio_file_name, y_processed) else: if event_level_annotation is None: C.waveplot(y, sr, tp=tp_in_audio, fp=fp_in_audio) C.specshow(y, sr, tp=tp_in_audio, fp=fp_in_audio) else: C.waveplot_with_annotation( y, sr, event_level_annotation, audio_file_name, processed=None) C.specshow_with_annotation( y, sr, event_level_annotation, audio_file_name, y_processed=None)
koukyo1994/streamlit-audio
utils/io.py
import audioread import io import tempfile import struct import wave import librosa import numpy as np import pandas as pd import streamlit as st from pathlib import Path from typing import Optional @st.cache def read_csv(uploaded_file): df = pd.read_csv(uploaded_file) return df @st.cache def read_audio(path: Path, info: dict, sr: Optional[int] = None): if sr is None: sr = info["sample_rate"] y, _ = librosa.load(path, sr=sr, mono=True, res_type="kaiser_fast") return y @st.cache def read_audio_bytes(path: Path): with open(path, "rb") as f: audio_bytes = f.read() return audio_bytes @st.cache def check_audio_info(path: Path): path_ = str(path) with audioread.audio_open(path_) as f: sr = f.samplerate ch = f.channels dur = f.duration return {"sample_rate": sr, "channels": ch, "duration": dur} def display_media_audio_from_ndarray(y: np.ndarray, sr: int): max_num = 32767.0 / y.max() y_hex = (y * max_num).astype(np.int16) binary_wave = struct.pack("h" * len(y_hex), *(y_hex.tolist())) with tempfile.TemporaryFile() as fp: w = wave.Wave_write(fp) # type: ignore params = (1, 2, sr, len(binary_wave), "NONE", "not compressed") w.setparams(params) # type: ignore w.writeframes(binary_wave) w.close() fp.seek(0) bytesio = io.BytesIO(fp.read()) st.audio(bytesio) def display_media_audio(path: Path, start_second: int = 0): format_ = path.name.split(".")[-1] if format_ == "mp3": format_ = "audio/mp3" elif format_ == "wav": format_ = "audio/wav" elif format_ == "flac": format_ = "audio/x-flac" else: st.warning("Selected type is not readable format") if format_ in {"audio/wav", "audio/mp3", "audio/x-flac"}: st.audio( read_audio_bytes(path), start_time=start_second, format=format_) def check_folder(folder: str): path = Path(folder) if not path.exists(): st.warning("specified folder does not exist") return else: wavs = list(path.glob("*.wav")) mp3s = list(path.glob("*.mp3")) flacs = list(path.glob("*.flac")) subdirs = [ subpaths for subpaths in path.glob("*") if subpaths.is_dir() ] if len(wavs) > 0: st.success(f"Found {len(wavs)} wav files") return path if len(mp3s) > 0: st.success(f"Found {len(mp3s)} mp3 files") return path if len(flacs) > 0: st.success(f"Found {len(flacs)} flac files") return path if len(subdirs) == 0: st.warning("No wav or mp3 found under the directory you specified") return else: subdir_names = sorted([subdir.name for subdir in subdirs]) subfolder = st.selectbox( f"Pick one folder below {str(folder)}", options=subdir_names, key=f"{str(folder)}") new_folder = path / subfolder return check_folder(str(new_folder))
koukyo1994/streamlit-audio
components/plots.py
<reponame>koukyo1994/streamlit-audio import librosa import librosa.display as display import matplotlib.pyplot as plt import matplotlib.patches as patches import numpy as np import pandas as pd import streamlit as st def waveplot(y: np.ndarray, sr: int, processed=None, tp: pd.DataFrame=None, fp: pd.DataFrame=None): plot_wave = st.checkbox("Waveplot") if plot_wave: st.sidebar.markdown("#### Waveplot settings") start_second = st.sidebar.number_input( "start second", min_value=0, max_value=len(y) // sr, value=0, step=1, key="waveplot_start") end_second = st.sidebar.number_input( "end second", min_value=0, max_value=len(y) // sr, value=len(y) // sr, step=1, key="waveplot_end") start_index = start_second * sr if end_second == len(y) // sr: end_index = len(y) else: end_index = end_second * sr fig = plt.figure(figsize=(12, 4)) plt.grid(True) display.waveplot(y[start_index:end_index], sr=sr, alpha=0.5) if processed is not None: display.waveplot( processed[start_index:end_index], sr=sr, alpha=0.5, color="red") if tp is not None and len(tp) > 0: for _, row in tp.iterrows(): plt.axvspan(row["t_min"], row["t_max"], color="g", alpha=0.5, label=str(row["species_id"])) if fp is not None and len(fp) > 0: for _, row in fp.iterrows(): plt.axvspan(row["t_min"], row["t_max"], color="r", alpha=0.5, label=str(row["species_id"])) plt.legend() st.pyplot(fig) def waveplot_with_annotation(y: np.ndarray, sr: int, annotation: pd.DataFrame, filename: str, processed=None): plot_wave = st.checkbox("Waveplot") if filename.endswith(".mp3"): filename = filename.replace(".mp3", ".wav") events = annotation.query(f"filename == '{filename}'") colors = [ "#bf6565", "#ac7ceb", "#e3e176", "#f081e1", "#e8cb6b", "#25b4db", "#fa787e", "#a9f274", "#1d7335", "#797fb3" ] if plot_wave: st.sidebar.markdown("#### Waveplot settings") start_second = st.sidebar.number_input( "start second", min_value=0, max_value=len(y) // sr, value=0, step=1, key="waveplot_start") end_second = st.sidebar.number_input( "end second", min_value=0, max_value=len(y) // sr, value=len(y) // sr, step=1, key="waveplot_end") start_index = start_second * sr if end_second == len(y) // sr: end_index = len(y) end_second = len(y) / sr else: end_index = end_second * sr events_in_period = events.query( f"onset >= {start_second} & offset <= {end_second}") uniq_labels = events_in_period["ebird_code"].unique().tolist() fig = plt.figure(figsize=(12, 4)) plt.grid(True) display.waveplot(y[start_index:end_index], sr=sr, alpha=0.5) used_color = [] # type: ignore for i, event in events_in_period.iterrows(): onset = event.onset offset = event.offset color = colors[uniq_labels.index(event.ebird_code)] if color not in used_color: label = event.ebird_code used_color.append(color) else: label = "_" + event.ebird_code plt.axvspan(onset, offset, facecolor=color, alpha=0.5, label=label) plt.legend() if processed is not None: display.waveplot( processed[start_index:end_index], sr=sr, alpha=0.5, color="red") st.pyplot(fig) @st.cache def melspectrogram(y: np.ndarray, params: dict, log=True): melspec = librosa.feature.melspectrogram(y=y, **params) if log: melspec = librosa.power_to_db(melspec) return melspec @st.cache def spectrogram(y: np.ndarray, params: dict, log=True): spec = librosa.stft(y, **params) if log: spec = librosa.power_to_db(spec) return spec def specshow_with_annotation(y: np.ndarray, sr: int, annotation: pd.DataFrame, filename: str, y_processed=None): plot_spectrogram = st.checkbox("Spectrogram plot") if filename.endswith(".mp3"): filename = filename.replace(".mp3", ".wav") events = annotation.query(f"filename == '{filename}'") colors = [ "#bf6565", "#ac7ceb", "#e3e176", "#f081e1", "#e8cb6b", "#25b4db", "#fa787e", "#a9f274", "#1d7335", "#797fb3" ] if plot_spectrogram: st.sidebar.markdown("#### Spectrogram plot settings") start_second = st.sidebar.number_input( "start second", min_value=0, max_value=len(y) // sr, value=0, step=1, key="specshow_start") end_second = st.sidebar.number_input( "end second", min_value=0, max_value=len(y) // sr, value=len(y) // sr, step=1, key="specshow_end") start_index = start_second * sr if end_second == len(y) // sr: end_index = len(y) else: end_index = end_second * sr y_plot = y[start_index:end_index] if y_processed is not None: y_plot_processed = y_processed[start_index:end_index] events_in_period = events.query( f"onset >= {start_second} & offset <= {end_second}") uniq_labels = events_in_period["ebird_code"].unique().tolist() st.sidebar.markdown("##### (Mel)spectrogram parameters") mel = st.sidebar.checkbox("Mel scale", value=True) n_fft = st.sidebar.number_input( "n_fft", min_value=64, max_value=8192, value=1024, step=64) hop_length = st.sidebar.number_input( "hop_length", min_value=1, max_value=2048, value=320, step=10) if mel: n_mels = st.sidebar.number_input( "n_mels", min_value=1, max_value=512, value=64, step=16) fmin = st.sidebar.number_input( "fmin", min_value=1, max_value=8192, value=20, step=100) fmax = st.sidebar.number_input( "fmax", min_value=4000, max_value=44100, value=14000, step=100) log = st.sidebar.checkbox("apply log", value=True) if mel: melspec_params = { "n_fft": n_fft, "hop_length": hop_length, "n_mels": n_mels, "fmin": fmin, "fmax": fmax, "sr": sr } else: spec_params = { "n_fft": n_fft, "hop_length": hop_length } if st.button("Show melspectrogram"): with st.spinner("Calculating melspectrogram"): if mel: spec = melspectrogram(y_plot, melspec_params, log) else: spec = spectrogram(y_plot, spec_params, log) if y_processed is not None: if mel: spec_processed = melspectrogram(y_plot_processed, melspec_params, log) else: spec_processed = spectrogram(y_plot_processed, spec_params, log) height, width = spec.shape st.write(f"{height} x {width} matrix") if y_processed is not None: with st.spinner("Plotting"): fig = plt.figure(figsize=(12, 8)) ax1 = fig.add_subplot(2, 1, 1) if mel: display.specshow( spec, sr=sr, hop_length=hop_length, x_axis="time", y_axis="mel", fmin=fmin, fmax=fmax, ax=ax1) else: display.specshow( spec, sr=sr, hop_length=hop_length, x_axis="time", y_axis="linear", ax=ax1) used_color = [] # type: ignore for i, event in events_in_period.iterrows(): onset = event.onset offset = event.offset color = colors[uniq_labels.index(event.ebird_code)] if color not in used_color: label = event.ebird_code used_color.append(color) else: label = "_" + event.ebird_code ax1.axvspan( onset, offset, facecolor=color, alpha=0.5, label=label) ax1.legend() ax2 = fig.add_subplot(2, 1, 2) if mel: display.specshow( spec_processed, sr=sr, hop_length=hop_length, x_axis="time", y_axis="mel", fmin=fmin, fmax=fmax, ax=ax2) else: display.specshow( spec_processed, sr=sr, hop_length=hop_length, x_axis="time", y_axis="linear", ax=ax2) else: with st.spinner("Plotting"): fig = plt.figure(figsize=(12, 4)) if mel: display.specshow( spec, sr=sr, hop_length=hop_length, x_axis="time", y_axis="mel", fmin=fmin, fmax=fmax) plt.colorbar() else: display.specshow( spec, sr=sr, hop_length=hop_length, x_axis="time", y_axis="linear") plt.colorbar() used_color = [] # type: ignore for i, event in events_in_period.iterrows(): onset = event.onset offset = event.offset color = colors[uniq_labels.index(event.ebird_code)] if color not in used_color: label = event.ebird_code used_color.append(color) else: label = "_" + event.ebird_code plt.axvspan( onset, offset, facecolor=color, alpha=0.5, label=label) plt.legend() st.pyplot(fig) def specshow(y: np.ndarray, sr: int, y_processed=None, tp: pd.DataFrame=None, fp: pd.DataFrame=None): plot_spectrogram = st.checkbox("Spectrogram plot") if plot_spectrogram: st.sidebar.markdown("#### Spectrogram plot settings") start_second = st.sidebar.number_input( "start second", min_value=0, max_value=len(y) // sr, value=0, step=1, key="specshow_start") end_second = st.sidebar.number_input( "end second", min_value=0, max_value=len(y) // sr, value=len(y) // sr, step=1, key="specshow_end") start_index = start_second * sr if end_second == len(y) // sr: end_index = len(y) else: end_index = end_second * sr y_plot = y[start_index:end_index] if y_processed is not None: y_plot_processed = y_processed[start_index:end_index] st.sidebar.markdown("##### (Mel)spectrogram parameters") mel = st.sidebar.checkbox("Mel scale", value=True) n_fft = st.sidebar.number_input( "n_fft", min_value=64, max_value=8192, value=1024, step=64) hop_length = st.sidebar.number_input( "hop_length", min_value=1, max_value=2048, value=320, step=10) if mel: n_mels = st.sidebar.number_input( "n_mels", min_value=1, max_value=512, value=64, step=16) fmin = st.sidebar.number_input( "fmin", min_value=1, max_value=8192, value=20, step=100) fmax = st.sidebar.number_input( "fmax", min_value=4000, max_value=44100, value=14000, step=100) log = st.sidebar.checkbox("apply log", value=True) if mel: melspec_params = { "n_fft": n_fft, "hop_length": hop_length, "n_mels": n_mels, "fmin": fmin, "fmax": fmax, "sr": sr } else: spec_params = { "n_fft": n_fft, "hop_length": hop_length } if st.button("Show melspectrogram"): with st.spinner("Calculating melspectrogram"): if mel: spec = melspectrogram(y_plot, melspec_params, log) else: spec = spectrogram(y_plot, spec_params, log) if y_processed is not None: if mel: spec_processed = melspectrogram(y_plot_processed, melspec_params, log) else: spec_processed = spectrogram(y_plot_processed, spec_params, log) height, width = spec.shape st.write(f"{height} x {width} matrix") if y_processed is not None: with st.spinner("Plotting"): fig = plt.figure(figsize=(12, 8)) ax1 = fig.add_subplot(2, 1, 1) if mel: display.specshow( spec, sr=sr, hop_length=hop_length, x_axis="time", y_axis="mel", fmin=fmin, fmax=fmax, ax=ax1) else: display.specshow( spec, sr=sr, hop_length=hop_length, x_axis="time", y_axis="linear", ax=ax1) ax2 = fig.add_subplot(2, 1, 2) if mel: display.specshow( spec_processed, sr=sr, hop_length=hop_length, x_axis="time", y_axis="mel", fmin=fmin, fmax=fmax, ax=ax2) else: display.specshow( spec_processed, sr=sr, hop_length=hop_length, x_axis="time", y_axis="linear", ax=ax2) else: with st.spinner("Plotting"): fig = plt.figure(figsize=(12, 4)) ax = plt.axes() if mel: display.specshow( spec, sr=sr, hop_length=hop_length, x_axis="time", y_axis="mel", fmin=fmin, fmax=fmax) plt.colorbar() else: display.specshow( spec, sr=sr, hop_length=hop_length, x_axis="time", y_axis="linear") plt.colorbar() if tp is not None and len(tp) > 0: for _, row in tp.iterrows(): rect = patches.Rectangle( (row["t_min"], row["f_min"]), row["t_max"] - row["t_min"], row["f_max"] - row["f_min"], linewidth=1, edgecolor="g", facecolor="g", alpha=0.5, label="tp") ax.add_patch(rect) if fp is not None and len(fp) > 0: for _, row in fp.iterrows(): rect = patches.Rectangle( (row["t_min"], row["f_min"]), row["t_max"] - row["t_min"], row["f_max"] - row["f_min"], linewidth=1, edgecolor="r", facecolor="r", alpha=0.5, label="fp") ax.add_patch(rect) st.pyplot(fig)
koukyo1994/streamlit-audio
components/base.py
<reponame>koukyo1994/streamlit-audio import streamlit as st from pathlib import Path def write_audio_info_to_sidebar(path: Path, info: dict): filename = path.name st.sidebar.subheader(f"Audio file: {filename}") st.sidebar.markdown("#### Basic info") for key, value in info.items(): st.sidebar.text(f"{key}: {value}") def set_start_second(max_value: float): second = st.sidebar.slider( "start second", min_value=0, max_value=int(max_value), value=0, step=1) return second def set_sampling_rate(current_value: int): options = [8000, 16000, 22050, 24000, 32000, 44100, 48000] index = options.index(current_value) sr = st.sidebar.selectbox( "Choose sampling rate", options=options, index=index) return sr