branch_name
stringclasses 149
values | text
stringlengths 23
89.3M
| directory_id
stringlengths 40
40
| languages
listlengths 1
19
| num_files
int64 1
11.8k
| repo_language
stringclasses 38
values | repo_name
stringlengths 6
114
| revision_id
stringlengths 40
40
| snapshot_id
stringlengths 40
40
|
|---|---|---|---|---|---|---|---|---|
refs/heads/main
|
<file_sep># NewRepository
To learn linking, pushing and pulling
|
ed5fb95e99279c51542d9f7857bf011c73c8acfe
|
[
"Markdown"
] | 1
|
Markdown
|
MrIdul2804/NewRepository
|
f4b452cffa6de7aeec214d3893fe7fa3f7ef7c91
|
7085005011deb01943915e82e478efb6e0a32443
|
refs/heads/master
|
<repo_name>tobias-gill/Figshare_desktop<file_sep>/custom_widgets/local_article_list.py
"""
"""
import collections
import time
from elasticsearch import Elasticsearch
from PyQt5.QtWidgets import (QWidget, QVBoxLayout, QProgressBar, QAbstractItemView, QTreeWidget, QTreeWidgetItem,
QLineEdit, QHBoxLayout, QComboBox, QPushButton, QDialog, QGridLayout, QSizePolicy,
QCheckBox)
from PyQt5.QtCore import (QThread, pyqtSignal, pyqtSlot, QObject)
from Figshare_desktop.formatting.formatting import (search_bar, search_combo, press_button)
from Figshare_desktop.custom_widgets.article_list import ArticleList
from Figshare_desktop.figshare_articles.determine_type import gen_article
from figshare_interface import Projects
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class LocalArticleList(ArticleList):
def __init__(self, app, OAuth_token, parent):
super(QWidget, self).__init__()
self.app = app
self.token = OAuth_token
self.parent = parent
self.article_ids = set()
self.initUI()
#####
# Window Widgets
#####
def initTree(self):
"""
Called to initalise the QTreeWidget
:return:
"""
tree = QTreeWidget()
# Format tree to allow for multiple items to be selected
tree.setSelectionMode(QAbstractItemView.ExtendedSelection)
# Allow for sorting of columns
tree.setSortingEnabled(True)
# Create the initial set of column headers
headers = ['id', 'title', 'type', 'tags']
header_item = QTreeWidgetItem(headers)
tree.setHeaderItem(header_item)
self.tree = tree
self.tree_headers = headers
#####
# Widget Actions
#####
@pyqtSlot(bool)
def update_search_field(self):
"""
Updates the items in the search field combobox.
Returns:
None
"""
# Clear combo and add empty first item
self.search_field_combo.clear()
self.search_field_combo.addItem('')
# Get list of fields in local article search index
fields = self.parent.local_article_index.get_fields(schema='local_articles')
self.search_field_combo.addItems(fields)
@pyqtSlot(str)
def add_to_tree(self, local_article_id: str, headers: list=None):
"""
Attempts to parse and
:param local_article_id:
:return:
"""
if headers is None:
headers = self.tree_headers
local_article = self.parent.local_articles[local_article_id]
local_article.gen_qtree_item(self.tree_headers, local_article.input_dicts())
self.tree.addTopLevelItem(local_article.qtreeitem)
for column in range(self.tree.columnCount()):
self.tree.resizeColumnToContents(column)
def search_on_return(self):
"""
Called when the return key is pressed within the search bar
:return:
"""
field = self.search_field_combo.currentText()
query = self.search_edit.text()
if query == '':
self.search_on_clear()
else:
local_article_index = self.parent.local_article_index
results = local_article_index.perform_search(schema='local_articles', field=field, query=query)
self.result_ids = set()
for docnum, val_dict in results.items():
if 'id' in val_dict:
self.result_ids.add(val_dict['id'])
self.fill_tree(self.tree_headers, self.result_ids)
self.parent.data_articles_window.check_edit()
def search_on_clear(self):
"""
Called when the clear button is pressed within the search bar
:return:
"""
self.fill_tree(self.tree_headers, self.article_ids)
self.parent.data_articles_window.check_edit()
def on_headers_set_pressed(self):
"""
Called when the set headers button is pressed
:return:
"""
# Create a dialog window
dlg = QDialog()
# Create a vertical layout to hold header selections and confirmation buttons
vbox = QVBoxLayout()
# Create a grid layout to hold the QCheckboxes
grid = QGridLayout()
grid.setHorizontalSpacing(15)
grid.setVerticalSpacing(15)
# Add the grid to the layout
vbox.addLayout(grid)
# Create a confirmation button
btn = QPushButton('OK')
btn.pressed.connect(self.on_headers_ok_pressed)
# Add Button to layout
vbox.addWidget(btn)
# Set the dialog window layout
dlg.setLayout(vbox)
# Create an ordered set of field names
fields = self.parent.local_article_index.get_fields('local_articles')
# Define how many columns of check boxes to create
columns = 3
# Empty the tree headers list
#self.tree_headers = []
# Start at row zero
row = 0
# While we still have a field in the ordered set
while fields:
for i in range(columns):
# Here we have to use the exec function to programatically name each box variable otherwise the connect
# function only ever calls the last button.
# Further complication from having to remember that the stateChanged signal passes a bool int to the
# lambda function.
if len(fields) == 0:
break
lbl = fields.pop(False)
exec("chk_box_{}_{} = QCheckBox(lbl)".format(row, i)) # Create a checkbox
if lbl in self.tree_headers:
eval("chk_box_{}_{}".format(row, i)).toggle()
eval("chk_box_{}_{}".format(row, i)).stateChanged.connect(lambda state, r=row,
c=i: self.check_box_clicked(r, c))
grid.addWidget(eval("chk_box_{}_{}".format(row, i)), row, i) # add the checkbox to the grid
row += 1 # increase the row counter
self.dlg = dlg
self.headers_box_layout = grid
self.dlg.show()
def get_selection(self):
"""
Can be called to return a list of the article id numbers of all selected articles
:return:
"""
items = self.tree.selectedItems()
article_ids = set()
for item in items:
article_ids.add(item.text(0))
return article_ids
def get_all(self):
"""
Can be called to return the article id numbers of all articles in the tree
:return:
"""
self.tree.selectAll()
items = self.tree.selectedItems()
article_ids = set()
for item in items:
article_ids.add(item.text(0))
return article_ids
def add_to_articles(self, article_id):
"""
Convenience function to add an article id to the artiles_ids set.
Args:
article_id: local article id number.
Returns:
None
"""
self.article_ids.add(article_id)
<file_sep>/data_window/search_index.py
"""
"""
import os
from whoosh.fields import *
from whoosh.index import create_in
from whoosh.qparser import QueryParser, MultifieldParser
class ArticleIndex(object):
"""
"""
def __init__(self, index_dir: str='figshare_desktop_index'):
super().__init__()
self.index_dir = index_dir
# If there is no index directory create it
if not os.path.exists(self.index_dir):
os.mkdir(self.index_dir)
self.schemas = {}
self.document_types = set()
#####
# Index Functions
#####
def list_schema(self):
"""
Returns a list of the schema name currently in the index
:return: list
"""
names = []
for name in self.schemas.keys():
names.append(name)
return names
def delete_index(self):
"""
Called to remove the index from the disk.
:return:
"""
if os.path.isdir(self.index_dir):
os.rmdir(self.index_dir)
#####
# Schema Functions
#####
def create_schema(self, schema_name: str):
"""
Creates a schema and adds it to the index
:param schema_name:
:return:
"""
index = create_in(self.index_dir, Schema())
self.schemas[schema_name] = index
def add_field(self, schema, field_name, field):
"""
:return:
"""
writer = self.schemas[schema].writer()
writer.add_field(field_name, field)
writer.commit()
def add_TEXT(self, schema, field_name: str, stored: bool=False):
"""
Adds a text field to the given schema
:param schema: index schema
:param field_name: name of the field to be added
:param stored: should field be stored
:return:
"""
self.add_field(schema, field_name, TEXT(stored=stored))
def add_KEYWORD(self, schema, field_name: str, stored: bool=False, commas: bool=True):
"""
Addsa a keyword field to the given schema
:param schema: index schema
:param field_name: name of the field to be added
:param stored: should field be stored
:param commas: is this field a list of comma separated variables
:return:
"""
self.add_field(schema, field_name, KEYWORD(stored=stored, commas=commas))
def add_ID(self, schema, field_name: str, stored: bool=False, unique: bool=False):
"""
Adds a id field to the given schema
:param schema: index schema
:param field_name: name of the field to be added
:param stored: should field be stored
:return:
"""
self.add_field(schema, field_name, ID(stored=stored, unique=unique))
def add_NUMERIC(self, schema, field_name: str, stored: bool=False):
"""
Adds a numeric (integer or float) field to the given schema
:param schema: index schema
:param field_name: name of the field to be added
:param stored: should field be stored
:return:
"""
self.add_field(schema, field_name, NUMERIC(float, stored=stored))
def add_DATETIME(self, schema, field_name: str, stored: bool=False):
"""
Adds a datetime field to the given schema
:param schema: index schema
:param field_name: name of the field to be added
:param stored: should field be stored
:return:
"""
self.add_field(schema, field_name, DATETIME(stored=stored))
def add_BOOLEAN(self, schema, field_name: str, stored: bool=False):
"""
Adds a booelan field to the given schema
:param schema: index schema
:param field_name: name of the field to be added
:param stored: should field be stored
:return:
"""
self.add_field(schema, field_name, BOOLEAN(stored=stored))
def add_NGRAM(self, schema, field_name: str, stored: bool=False):
"""
Adds a N-Gram field to the given schema
:param schema: index schema
:param field_name: name of the field to be added
:param stored: should field be stored
:return:
"""
self.add_field(schema, field_name, NGRAM(stored=stored))
def get_fields(self, schema):
"""
Returns a list of schema fields
:return:
"""
return self.schemas[schema].schema.names()
def remove_field(self, schema, field_name: str):
"""
Removes a given field from the schema
:param field_name: name of the field to be removed
:return:
"""
writer = self.schemas[schema].writer()
writer.remove_field(field_name)
writer.commit()
#####
# Document Functions
#####
def addDocument(self, schema: str, data_dict: dict):
"""
Adds a document to the index with fields from a dictionary
Args:
schema: Name of the Whoosh index schema to add document to.
data_dict: Dictionary of document metadata from which schema relevant key, value pairs will be extracted.
Returns:
None
"""
# Create an empty dictionary to hold schema relevant key, value pairs.
document_dict = {}
for key, value in data_dict.items():
# Check to see if key is in the schema.
if key in self.get_fields(schema):
if value is not None and value != '':
# For lists create a string with comma separated tags.
if type(value) is list and value != []:
tags = ''
for tag in value:
tags += '{},'.format(tag)
value = tags[:-1] # Drop the last comma.
document_dict[key] = u"{}".format(value) # Convert value to unicode and add to the doc dict
# Add the new document to the inedx schema
writer = self.schemas[schema].writer()
writer.add_document(**document_dict)
writer.commit()
def updateDocument(self, schema, data_dict: dict):
"""
Updates an existing document in the given schema
:param schema:
:param data_dict: must contain the th unique document identifier as a field
:return:
"""
document_dict = {}
for key, value in data_dict.items():
if key in self.get_fields(schema):
if value is not None and value != '':
if type(value) is list and value != []:
tags = ''
for tag in value:
tags += '{},'.format(tag)
value = tags[:-1]
document_dict[key] = u"{}".format(value)
writer = self.schemas[schema].writer()
writer.update_document(**document_dict)
writer.commit()
def removeDocument(self, schema, docnum: int):
"""
Removes a document from the index by its document number
:param docnum: docnum
:return:
"""
writer = self.schemas[schema].writer()
writer.delete_document(docnum)
writer.commit()
#####
# Search Functions
#####
def perform_search(self, schema, field: str, query: str, page: int=1, pagelen: int=20):
"""
Performs a query of the index from the given field and query string
:param schema:
:param field: String. Index field
:param query: String.
:param page: int. starting page of results to return results from
:param pagelen: int. number of results to display per page
:return: list. results
"""
if field is '':
# Get All Schema fields
fields = self.get_fields(schema=schema)
results_dict = {}
with self.schemas[schema].searcher() as searcher:
last_page = False
while not last_page:
parser = MultifieldParser(fields, self.schemas[schema].schema)
search_query = parser.parse(query)
results = searcher.search_page(search_query, page, pagelen)
if results.total > 0:
for doc in range(results.pagelen):
results_dict[results.docnum(doc)] = results.results.fields(doc)
last_page = results.is_last_page()
page += 1
return results_dict
else:
results_dict = {}
with self.schemas[schema].searcher() as searcher:
last_page = False
while not last_page:
parser = QueryParser(field, self.schemas[schema].schema)
search_query = parser.parse(query)
results = searcher.search_page(search_query, page, pagelen)
if results.total > 0:
for doc in range(results.pagelen):
results_dict[results.docnum(doc)] = results.results.fields(doc)
last_page = results.is_last_page()
page += 1
return results_dict
<file_sep>/data_window/upload_control_widget.py
"""
"""
import os
from requests import HTTPError
from PyQt5.QtWidgets import (QWidget, QPushButton, QLineEdit, QMessageBox, QFileDialog, QAbstractItemView,
QTextEdit, QGridLayout, QHBoxLayout, QVBoxLayout, QSizePolicy, QTreeWidgetItem,
QInputDialog)
from PyQt5.QtGui import (QIcon, QFont, QPalette, QColor)
from PyQt5.QtCore import (Qt, QThread, pyqtSlot, pyqtSignal, QObject)
from Figshare_desktop.formatting.formatting import (press_button)
from figshare_interface.figshare_structures.projects import Projects
from figshare_interface.figshare_structures.collections import Collections
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class UploadControl(QWidget):
def __init__(self, app, OAuth_token, parent):
super().__init__()
self.app = app
self.token = OAuth_token
self.parent = parent
self.__threads = []
self.initUI()
#####
# Widgets
#####
def initUI(self):
vbox = QVBoxLayout()
vbox.addWidget(self.start_upload_btn())
vbox.addWidget(self.stop_upload_btn())
self.setLayout(vbox)
def start_upload_btn(self):
"""
Creates a QPushButton that will start the upload process
:return:
"""
btn = QPushButton()
press_button(self.app, btn)
btn.setIcon(QIcon(os.path.normpath(__file__ + '/../../img/figshare_upload.png')))
btn.setToolTip('Begin Upload')
btn.setToolTipDuration(1)
btn.setEnabled(False)
btn.pressed.connect(self.start_upload)
self.start_btn = btn
return self.start_btn
def stop_upload_btn(self):
"""
Creates a QPushButton that will try to stop the upload process
:return:
"""
btn = QPushButton()
press_button(self.app, btn)
btn.setIcon(QIcon(os.path.normpath(__file__ + '/../../img/exit.png')))
btn.setToolTip('Stop Upload')
btn.setToolTipDuration(1)
btn.setEnabled(False)
#btn.pressed.connect(self.stop_upload)
self.stop_btn = btn
return self.stop_btn
#####
# Actions
#####
def enable_start(self):
"""
Called to enable the start upload button
:return:
"""
self.start_btn.setEnabled(True)
def start_upload(self):
"""
Starts the upload of articles to the defined figshare project
:return:
"""
# Locally define the upload queue
figshare_add_window = self.parent.figshare_add_window
upload_queue = figshare_add_window.upload_queue
# Locally define the upload log
upload_log = figshare_add_window.upload_log
# Setup the Upload Worker Thread
worker = UploadWorker(self.token, self.parent)
upload_thread = QThread()
self.__threads.append((upload_thread, worker))
worker.moveToThread(upload_thread)
# Remove local articles from the queue as they are uploaded
worker.sig_step.connect(lambda local_article_id, figshare_article_id,
aritcle_title: upload_queue.fill_tree())
# Log the upload
worker.sig_step.connect(upload_log.add_success_log)
# Log errors
worker.sig_error.connect(upload_log.add_error_log)
worker.sig_error.connect(lambda local_id, title, errs: upload_queue.fill_tree())
upload_thread.started.connect(worker.work)
upload_thread.start()
class UploadWorker(QObject):
sig_step = pyqtSignal(str, int, str)
sig_done = pyqtSignal(bool)
sig_error = pyqtSignal(str, str, tuple)
sig_abort = pyqtSignal(bool)
def __init__(self, OAuth_token, parent):
super().__init__()
self.token = OAuth_token
self.parent = parent
@pyqtSlot()
def work(self):
"""
:return:
"""
# Locally define some windows and widgets
figshare_add_window = self.parent.figshare_add_window
upload_queue = figshare_add_window.upload_queue
self.project_id = figshare_add_window.upload_project
self.collection_id = figshare_add_window.upload_collection
# Get the upload project id
if self.project_id is not None:
projects = Projects(self.token)
else:
return self.sig_done.emit(True)
if self.collection_id is not None and self.collection_id != '':
collections = Collections(self.token)
while upload_queue.local_ids:
local_article_id = upload_queue.local_ids.pop()
self.project_upload(projects, local_article_id)
def project_upload(self, projects_instance, local_article_id: str):
"""
Creates a new figshare article and uploads the file associated with it
:param projects_instance: Instance of the Figshare_API_Interface Projects Class
:param local_article_id: local id number of the article to be uploaded
:return:
"""
# Get the local article
local_article = self.parent.local_articles[local_article_id]
# Article Title
article_title = local_article.figshare_metadata['title']
# Generate the Figshare upload dictionary
upload_dict = local_article.get_upload_dict()
# Get the local file location
local_location = local_article.figshare_desktop_metadata['location']
# Upload file to the figshare project
try:
# Create a new figshare article in the given project
figshare_article_id = projects_instance.create_article(self.project_id, upload_dict)
# Upload files to the new article
projects_instance.upload_file(figshare_article_id, local_location)
# Signal that the article has been created and uploaded
self.sig_step.emit(local_article_id, figshare_article_id, article_title)
except FileExistsError as err:
err_args = err.args
self.sig_error.emit(local_article_id, article_title, err_args)
except HTTPError as err:
err_args = err.args
self.sig_error.emit(local_article_id, article_title, err_args)
except ValueError as err:
err_args = err.args
self.sig_error.emit(local_article_id, article_title, err_args)
<file_sep>/custom_widgets/extended_combo.py
import sys
from PyQt5.QtWidgets import (QComboBox, QApplication, QCompleter)
from PyQt5.QtCore import (Qt, QSortFilterProxyModel)
class ExtendedCombo(QComboBox):
def __init__(self):
super().__init__()
self.setFocusPolicy(Qt.StrongFocus)
self.setEditable(True)
self.completer = QCompleter(self)
# always show all completions
self.completer.setCompletionMode(QCompleter.UnfilteredPopupCompletion)
self.pFilterModel = QSortFilterProxyModel(self)
self.pFilterModel.setFilterCaseSensitivity(Qt.CaseInsensitive)
self.completer.setPopup(self.view())
self.setCompleter(self.completer)
self.lineEdit().textEdited[str].connect(self.pFilterModel.setFilterFixedString)
self.completer.activated.connect(self.setTextIfCompleterIsClicked)
def setModel(self, model):
super(ExtendedCombo, self).setModel(model)
self.pFilterModel.setSourceModel(model)
self.completer.setModel(self.pFilterModel)
def setModelColumn(self,column):
self.completer.setCompletionColumn( column )
self.pFilterModel.setFilterKeyColumn( column )
super(ExtendedCombo, self).setModelColumn( column )
def view(self):
return self.completer.popup()
def index( self ):
return self.currentIndex()
def setTextIfCompleterIsClicked(self, text):
if text:
index = self.findText(text)
self.setCurrentIndex(index)
<file_sep>/custom_widgets/tag_button.py
"""
"""
from PyQt5.QtWidgets import (QWidget, QPushButton)
from PyQt5.QtGui import (QFont, QFontMetrics)
from PyQt5.QtCore import (Qt)
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class QTagButton(QPushButton):
def __init__(self, label: str, tag_set: set, tooltip_lbl: str=None):
"""
Formats the tag button
:param label: String of tag.
:param tag_set: set of tag strings
"""
super().__init__()
self.label = label
self.tag_set = tag_set
self.setText(str(label))
font = QFont('SansSerif', 9)
font.setBold(False)
self.setFont(font)
font_metric = QFontMetrics(font)
width = font_metric.width(str(label)) + 20
if tooltip_lbl is not None:
self.setToolTip(str(tooltip_lbl))
self.setToolTipDuration(1000)
self.setMinimumWidth(width)
self.setMaximumWidth(width)
def mousePressEvent(self, event):
"""
Overides existing mousepressevent. If a right click occurs the tag is deleted
:param event:
:return:
"""
if event.button() == Qt.RightButton:
self.tag_set.remove(self.label)
self.deleteLater()
elif event.button() == Qt.LeftButton:
return QWidget.mousePressEvent(self, event)
<file_sep>/local_articles/local_stm_articles/local_topography_article.py
"""
"""
from os.path import splitext
from figshare_interface.figshare_structures.projects import Projects
from figshare_interface.file_parsers import flatfile_3 as FlatFile
from figshare_interface.file_parsers.zyvex_parser import ZyvexFile
from ...figshare_articles.stm_articles.topography_article import TopoArticle
from ..local_article import LocalArticle
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class LocalTopoArticle(TopoArticle, LocalArticle):
def __init__(self, OAuth_token, filename, file_ext):
# Initialize STM topography metadata dictionary
self.stm_topo_metadata = {'type': None,
'vgap': None,
'current': None,
'xres': None,
'yres': None,
'xinc': None,
'yinc': None,
'xreal': None,
'yreal': None,
'unit': None,
'unitxy': None,
'date': None,
'direction': None,
'sample': None,
'users': None,
'substrate': None,
'adsorbate': None,
'prep': None,
'notebook': None,
'notes': None
}
self.file_ext = file_ext
LocalArticle.__init__(self, OAuth_token, filename)
self.read_file(filename)
self.figshare_metadata['type'] = 'topo'
def read_file(self, filename):
"""
Determines the type of STM file and uses the correct parse function to fill the stm_topo_metadata fields.
:param filename: str. Local path to file.
:return:
"""
file_types = {
# OMICRON FLAT FILES
'.Z_flat': FlatFile,
# ZYVEX Files
'.zad': ZyvexFile
}
if self.file_ext in file_types:
if self.file_ext == '.Z_flat':
file_data = file_types[self.file_ext].load(filename)
file_info = file_data[0].info
directions_str = ''
for direction in file_data:
directions_str += direction.info['direction'] + ', '
self.stm_topo_metadata['direction'] = directions_str
else:
file_data = file_types[self.file_ext].load(filename)
file_info = file_data.info
for key in file_info:
if key in self.stm_topo_metadata:
# Added string to comply with new figshare custom fields formatting
self.stm_topo_metadata[key] = str(file_info[key])
def index_schema(self):
"""
Creates a dictionary to create a Whoosh index schema from
:return:
"""
schema_dict = {'type': ('text', True),
'vgap': ('numeric', True),
'current': ('numeric', True),
'xres': ('numeric', True),
'yres': ('numeric', True),
'xinc': ('numeric', True),
'yinc': ('numeric', True),
'xreal': ('numeric', True),
'yreal': ('numeric', True),
'unit': ('text', True),
'unitxy': ('text', True),
'date': ('text', True),
'direction': ('keyword', True),
'sample': ('text', True),
'users': ('keyword', True),
'substrate': ('text', True),
'adsorbate': ('text', True),
'prep': ('text', True),
'notebook': ('keyword', True),
'notes': ('text', True)
}
return schema_dict
<file_sep>/custom_widgets/button_field.py
"""
"""
from PyQt5.QtWidgets import (QWidget, QLineEdit, QHBoxLayout, QScrollArea, QSizePolicy)
from PyQt5.QtGui import (QFont, QColor, QPainter)
from PyQt5.QtCore import (Qt, QRect)
from Figshare_desktop.custom_widgets.tag_button import QTagButton
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class QButtonField(QWidget):
"""
A custom class that creates an empty QLineEdit looking text field where upon return being pressed the current
text is saved as a button/tag.
"""
def __init__(self, parent=None):
super().__init__()
if parent is not None:
self.setParent(parent)
self.initUI()
def initUI(self):
# Create layout to hold the tags
self.tag_box = QHBoxLayout()
self.tag_box.setAlignment(Qt.AlignLeft)
# Create a Widget to hold the tag box
self.tag_widget = QWidget()
self.tag_widget.setLayout(self.tag_box)
# Format Geometry of Widget
self.width = 0.75 * (self.parent().geometry().width())
self.setMaximumWidth(self.width)
# Create Scroll area to put the tag box widget
self.tag_scroll = QScrollArea()
self.tag_scroll.setWidget(self.tag_widget)
self.tag_scroll.setWidgetResizable(True)
self.tag_scroll.setMinimumWidth(self.width * (2 / 3))
self.tag_scroll.setMaximumWidth(self.width * (2 / 3))
self.tag_scroll.setToolTip('Right click to remove')
self.tag_scroll.setToolTipDuration(1000)
# Create layout to hold tag layout and line edit
self.hbox = QHBoxLayout()
self.hbox.addWidget(self.tag_scroll)
self.hbox.addWidget(self.create_linedit())
# Create a set to hold all the existing tags
self.tags = set()
self.setLayout(self.hbox)
def paintEvent(self, e):
"""
Draws the widget
:param e:
:return:
"""
paint_event = QPainter()
paint_event.begin(self)
self.drawWidget(paint_event)
paint_event.end()
def drawWidget(self, paint_event):
"""
Constructs the individual components together
:param paint_event: QPainter
:return:
"""
self.create_frame(paint_event)
def create_frame(self, paint_event):
"""
Creates and formats the encompassing frame
:return:
"""
offset = 0
parent = self.parent()
geom = parent.geometry()
frame_w = (geom.width() - offset) * 0.9
frame_h = geom.height() - offset
self.frame_geom = QRect(offset, offset, frame_w, frame_h)
paint_event.setPen(QColor(255, 255, 255))
paint_event.setBrush(QColor(255, 255, 255))
paint_event.drawRect(offset, offset, frame_w, frame_h)
def add_tag(self, label: str, tooltip_lbl: str=None):
"""
Adds a tag button to the frame
:param label: String.
:return:
"""
btn = QTagButton(label, self.tags, tooltip_lbl)
self.tags.add(label)
self.tag_box.addWidget(btn)
def create_linedit(self):
"""
Creates a formated line edit to create new tags with.
:return:
"""
edit = QLineEdit()
edit.setPlaceholderText('Enter new tag here')
edit.setToolTip('Press return to add tag')
font = QFont('SansSerif', 11)
edit.setFont(font)
edit.returnPressed.connect(lambda: self.on_return_pressed(edit))
return edit
def on_return_pressed(self, edit):
"""
Called when a new tag is to be created
:param edit: QLineEdit from where to take text
:return:
"""
text = edit.text()
if text != '' and text not in self.tags:
self.add_tag(text)
edit.clear()
elif text in self.tags:
edit.clear()
def get_tags(self):
"""
Returns the set of tags as a list object
:return: list. Of tag strings
"""
return list(self.tags)
<file_sep>/figshare_articles/collection_article.py
"""
Base Class of Figshare Articles for use in Figshare Desktop
"""
from PyQt5.QtWidgets import (QTreeWidgetItem)
from figshare_interface.figshare_structures.collections import Collections
from figshare_interface.http_requests.figshare_requests import issue_request
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class Article(object):
"""
Figshare Article Base Class
"""
def __init__(self, OAuth_token: str, collection_id: int, article_id: str):
"""
Creates the base Figshare metadata dictionary for an article and fills all available information.
Args:
OAuth_token: Authentication token generated from Figshare login.
project_id: ID number of Figshare project article is within.
article_id: ID number of the Figshare article.
Returns:
None
Raises:
None
"""
# Create class variables
self.token = OAuth_token
self.collection_id = collection_id
self.article_id = article_id
# Initialise dictionary for basic Figshare metadata.
self.figshare_metadata = {'title': None,
'id': None,
'description': None,
'tags': None,
'references': None,
'categories': None,
'authors': None,
'defined_type': None,
'funding': None,
'license': None,
'size': None,
'version': None,
'created_date': None,
'modified_date': None,
'published_date': None,
'up_to_date': None,
'status': None,
'group_id': None}
# Initialise dictionary of metadata needed only for Figshare Desktop
self.figshare_desktop_metadata = {
'location': None,
'thumb': None,
'public_modified_date': None
}
# Initialize an empty object that will hold generated QTreeWidgetItem representations of the article
self.qtreeitem = None
# Request the article information from Figshare and fill the initialised metadata dictionaries
self.fill_info()
# Set the location field to Figshare to denote that it is not a local file
self.figshare_desktop_metadata['location'] = 'Figshare'
def gen_figshare_metadata(self, input_dict: dict):
"""
Fill values in basic figshare_metadata dictionary from an input dictionary.
Args:
input_dict: Common keys in input_dict and figshare_metadata dict have values set to those of input_dict
Returns:
None
Raises:
None
"""
# Iterate through keys in the input dictionary and check to see if they are in figshare_metadata
for key in input_dict:
if key in self.figshare_metadata:
# For non-None values in input_dict set the values of figshare_metadata to those of input_dict
if input_dict[key] != 'None' and input_dict[key] is not None:
self.figshare_metadata[key] = input_dict[key]
# For published articles check to see if the public article is up-to-date
if self.figshare_metadata['status'] == 'public':
# Get the date of the last modification to the public article
result = issue_request('GET', 'articles/{a_id}'.format(a_id=self.article_id), token=self.token)
date = result['modified_date']
self.figshare_desktop_metadata['public_modified_date'] = date
# Perform up-to-date check
self.check_uptodate()
def check_uptodate(self):
"""
Compares the private and public versions of an article to determine if it is up-to-date
Args:
Returns:
None
Raises:
None
"""
# For published articles
if self.figshare_metadata['status'] == 'public':
# Compare the private and public modified dates and set the up-to-date field appropriately
if self.figshare_metadata['modified_date'] != self.figshare_desktop_metadata['public_modified_date']:
self.figshare_metadata['up_to_date'] = False
else:
self.figshare_metadata['up_to_date'] = True
# For unpublished articles, denote as such
else:
self.figshare_metadata['up_to_date'] = 'Unpublished'
def input_dicts(self):
"""
Returns a list of all metadata dictionaries associated with this article. Should be overwritten by child classes
Args:
Returns:
list of dictionaries containing all metadata dictionaries
Raises:
None
"""
return [self.figshare_metadata, self.figshare_desktop_metadata]
def gen_qtree_item(self, input_list: list, input_dicts: list=None):
"""
Create a QTreeWidgetItem from a list of keys corresponding to values in dictionry of metadata fields.
Args:
input_list: List of strings corresponding to keys in the input dictionaries. The order of the list
dictates the order of the columns generated in the QTreeWidgetItem.
input_dicts: List of dictionaries from which to generate the input list values. The order of the input
dictionaries is important if multiple dictionaries contain the same key. Only the first will be used.
Returns:
None
Raises:
None
"""
# If no input dictionaries are given take the default metadata dictionaries
if input_dicts is None:
input_dicts = self.input_dicts()
# Create an empty list to generate the QTreeWidgetItem from
tree_list = []
# Loop through strings in the input_list
for key in input_list:
key_found = False # Initialise that key has not yet been found
# Loop through input dictionaries
for d in input_dicts:
# If the key is in the current dictionary add its value to the list and break the dictionary loop
if key in d:
tree_list.append(str(d[key]))
key_found = True
break
# If the key was not found then append a blank value to the list
if not key_found:
tree_list.append('')
# Create a blank QTreeWidgetItem
self.qtreeitem = QTreeWidgetItem()
# Loop through the keys in the input list and add string versions of them to the columns in QTreeWidgetItem
column = 0
for key in tree_list:
if key is bool:
self.qtreeitem.setData(column, 0, str(key))
else:
self.qtreeitem.setData(column, 0, key)
column += 1
@staticmethod
def recreate_custom_fields(custom_fields):
d = {}
for row in custom_fields:
d[row['name']] = row['value']
return d
def fill_info(self):
"""
Retrieves article information from Figshare and fills the local metadata dictionaries from it.
Args:
Returns:
None
Raises:
None
"""
# Create an instance of the Projects class
collections = Collections(self.token)
# Retrieve a dictionary of article information from the project
basic_info = collections.get_article(self.article_id)
# Use the retrieved dictionary to fill the local metadata dictionaries
self.gen_figshare_metadata(basic_info)
# Perform a check on the format of the filled information
self.check_basic()
def update_info(self, input_dict: dict):
"""
Updates the local metadata dictionaries from a given input dictionary.
Args:
input_dict: Key, Value pairs to overwrite existing values in the article metadata dictionaries.
Returns:
None
Raises:
None
"""
# Use the input dictionary to fill the local metadata dictionaries
self.gen_figshare_metadata(input_dict)
# Perform a check on the format of the filled information
self.check_basic()
def check_basic(self):
"""
Checks the formatting of metadata fields in the figshare_metadata dictionary
Args:
Returns:
None
Raises:
None
"""
# TITLE
# =====
# Title metadata should be a string with length between 3 and 500 characters
title = self.figshare_metadata['title'] # get the current value for title
if title is not None:
# Convert the title to a string if it is not one already
if type(title) is not str:
title = str(title)
# If the title was a list then it will have square brackets around it. Remove these
if title[0] == '[':
title = title[1:-1]
# The Article title must be between 2 and 500 characters
if len(title) < 3:
title += '000'
if len(title) > 500:
title = title[:500]
# Set the metadata title to the edited version
self.figshare_metadata['title'] = title
# DESCRIPTION
# ===========
# Description should be a string of unlimited length.
# Here we will remove any vestige of the description being within a list
descr = self.figshare_metadata['description'] # get the current value
if descr is not None:
# Convert to a string if necessary, and remove brackets from a list if present
if type(descr) is not str:
descr = str(descr)
if descr[0] == '[' and descr[-1] == ']':
descr = descr[1:-1]
# Set the metadata value to the edited version
self.figshare_metadata['description'] = descr
# TAGS
# ====
# Tags should be a list of strings.
tags = self.figshare_metadata['tags'] # get the current value
if tags is not None:
# If the tags are not in a list attempt to structure them into one
if type(tags) is not list:
tags = str(tags)
if tags[0] == '[':
tags = tags[1:-1]
tags = [tags]
# For each tag in the list of tags check that it is a string and does not have brackets around it
else:
for tag in range(len(tags)):
if type(tags[tag]) is not str:
tags[tag] = str(tags[tag])
elif tags[tag][0] == '[':
tags[tag] = tags[tag][1:-1]
# Set the metadata value to the edited version
self.figshare_metadata['tags'] = tags
# REFERENCES
# ==========
# References should be a list of strings of valid URLS
refs = self.figshare_metadata['references'] # get the current value
if refs is not None:
# If references are not in a list convert to one
if type(refs) is not list:
refs = str(refs)
if refs[0] == '[':
refs = refs[1:-1]
checked_refs = [refs]
else:
# For each reference in the list remove encompassing brackets and check that it is a valid url
for ref in range(len(refs)):
if type(refs[ref]) is not str:
refs[ref] = str(refs[ref])
elif refs[ref][0] == '[':
refs[ref] = refs[ref][1:-1]
if refs[ref][0:7] != 'http://':
refs[ref] = None
# Remove any references that were not valid
checked_refs = []
for ref in refs:
if ref is not None:
checked_refs.append(ref)
# Set the metadata value to the edited version
self.figshare_metadata['references'] = checked_refs
# CATEGORIES
# ==========
# Categories should be a list of integers that relate to valid category ID numbers on Figshare.
# Here we will attempt to construct this list from either given integers, or strings that correspond to the
# category names on Figshare.
cats = self.figshare_metadata['categories'] # get the current value
if cats is not None:
# Get a dictionary of categories from Figshare with id and name pairs
allowed_cats = issue_request(method='GET', endpoint='categories', token=self.token)
cat_dict = {}
for cat in allowed_cats:
cat_dict[cat['id']] = cat['title']
# If the current value of categories is a list check each of the items
if type(cats) is list:
checked_cats = [] # create an empty list to hold checked categories
for cat in cats:
cat_type = type(cat)
# If the category value is a dictionary object check that it is a valid id, name pair
if cat_type is dict:
if 'id' in cat:
cat_id = cat['id']
if cat_id in cat_dict:
checked_cats.append(cat_id)
# If the category value is a string attempt to convert it to a valid categoriy ID number
elif cat_type is str:
# If the string can be converted directly to an integer do so and check for a valid ID
try:
cat_id = int(cat)
if cat_id in cat_dict:
checked_cats.append(cat_id)
# If the string could not be converted check if the string is the name of a category
except:
if cat in cat_dict.values():
for key, value in cat_dict.items():
if value == cat:
checked_cats.append(key)
break
# If the category value is an integer check to see if it is a valid ID
elif cat_type is int:
if cat in cat_dict:
checked_cats.append(cat)
# If the categories value is not a list set it to none
else:
checked_cats = None
# Set the metadata value to the edited version
self.figshare_metadata['categories'] = checked_cats
# AUTHORS
# =======
# Authors metadata should comprise of a list of dictionaries which have either or the two following structures
# {'id': int}
# {'name': string}
auths = self.figshare_metadata['authors'] # get the current value
if auths is not None:
auths_type = type(auths) # get the type of the current value
# If value is a list and not empty check its values
if auths_type is list and auths != []:
checked_auths = [] # create an empty list to hold the author dictionaries
# Iterate through the list of authors
for auth in auths:
item_type = type(auth) # get the type of the value
# If the value is a dictionary object
if item_type is dict:
if 'id' in auth: # If the value is an id
try:
id_int = int(auth['id'])
temp_d = {'id': id_int}
checked_auths.append(temp_d)
except:
pass
elif 'name' in auth: # If the value is a name
if type(auth['name']) is str:
temp_d = {'name': auth['name']}
checked_auths.append(temp_d)
elif item_type is str:
try:
user_id = int(auth)
checked_auths.append({'id': user_id})
except:
checked_auths.append({'name': auth})
elif item_type is int:
checked_auths.append({'id': auth})
elif auths_type is str:
checked_auths = [{'name': auths}]
elif auths_type is int:
checked_auths = [{'id': auths}]
else:
checked_auths = None
self.figshare_metadata['authors'] = checked_auths
# DEFINED TYPE
# ============
# Defined type should be string equal to one of ten pre-defiend values. Here we will normalise inputs that
# are either the string itself or an integer corresponding to the position in the list
def_type = self.figshare_metadata['defined_type'] # get the current value
# define a dictionary of the allowed values and corresponding integer keys
types = {1: 'figure', 2: 'media', 3: 'dataset', 4: 'fileset', 5: 'poster', 6: 'paper', 7: 'presentation',
8: 'thesis', 9: 'code', 10: 'metadata'}
if def_type is not None:
def_type_type = type(def_type) # get the type of the metadata value
# If a string check that it is one of the ten pre-defined values
if def_type_type is str:
if def_type not in types.values():
def_type = None
# If in integer check to see if it is in the pre-defined keys
elif def_type_type is int:
if def_type not in types:
def_type = None
else:
def_type = types[def_type]
else:
def_type = None
# Set the metadata value to the edited version
self.figshare_metadata['defined_type'] = def_type
# FUNDING
# =======
# Funding should be a string of length between 0 and 2000 characters
fund = self.figshare_metadata['funding'] # get the current value
if fund is not None:
fund_type = type(fund)
if fund_type is not str:
fund = str(fund)
if 0 < len(fund) < 2001:
fund = fund[:2000]
# Set string to edited version
self.figshare_metadata['funding'] = fund
# License
# ========
# License should be a string of an integer corresponding to a predefined value
lic = self.figshare_metadata['license'] # Get the current value of the license
# Retrieve a list of available licenses from Figshare
# Yields a list of dictionaries, with string-integer: name pairs
lics_list = issue_request(method='GET', endpoint='account/licenses', token=self.token)
# Convert the list of dictionaries into a single dictionary
allowed_lics = {}
for d in lics_list:
allowed_lics[d['value']] = d['name']
if lic is not None:
lic_type = type(lic) # get the type of the value
# If a dictionary object is given try to extract value string-integer
if lic_type is dict:
if 'value' in lic:
lic_id = lic['value']
if lic_id in allowed_lics:
checked_lic = lic_id
else:
checked_lic = None
# If a string is given then check if it is an allowed value
elif lic_type is str:
if lic in allowed_lics:
checked_lic = lic
else:
checked_lic = None
# If an integer is passed try to see if a string version of it is an allowed value
elif lic_type is int:
lic = str(lic)
if lic in allowed_lics:
checked_lic = lic
else:
checked_lic = None
else:
checked_lic = None
# Set the metadata value to the edited version
self.figshare_metadata['license'] = checked_lic
def get_upload_dict(self):
"""
Takes the different metadata dictionaries and ensures that their contents are ok for upload to figshare.
Args:
Returns:
None
Raises:
None
"""
self.check_basic()
upload_dict = {}
for key, value in self.figshare_metadata.items():
if value is not None:
upload_dict[key] = value
return upload_dict
def get_type(self):
"""
Used to determine the type of the article instance. Should be overwritten by child classes.
Args:
Returns:
string. Specifying the article type
Raises:
None
"""
return 'article'
<file_sep>/data_window/figshare_upload_log.py
"""
"""
import os
from PyQt5.QtWidgets import (QWidget, QLabel, QPushButton, QLineEdit, QMessageBox, QFileDialog, QMdiSubWindow,
QPlainTextEdit, QGridLayout, QHBoxLayout, QVBoxLayout, QSizePolicy, QFrame)
from PyQt5.QtGui import (QIcon, QFont, QPalette, QColor)
from PyQt5.QtCore import (Qt, QThread, pyqtSlot)
from Figshare_desktop.formatting.formatting import (press_button, log_edit)
from Figshare_desktop.data_window.search_index import (ArticleIndex)
from Figshare_desktop.article_edit_window.local_metadata_window import LocalMetadataWindow
from Figshare_desktop.custom_widgets.local_article_list import LocalArticleList
from Figshare_desktop.data_window.figshare_add_article_list import TreeAddWorker
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class UploadLog(QWidget):
def __init__(self, app, OAuth_token, parent):
super().__init__()
self.app = app
self.token = OAuth_token
self.parent = parent
self.initUI()
def initUI(self):
vbox = QVBoxLayout()
vbox.addWidget(self.initLog())
vbox.addWidget(self.clear_btn())
self.setLayout(vbox)
#####
# Widgets
#####
def initLog(self):
edit = QPlainTextEdit()
edit.setEnabled(False)
log_edit(self.app, edit)
self.log = edit
return self.log
def clear_btn(self):
"""
QPushButton to clear the log
:return: QPushButton
"""
btn = QPushButton()
btn.setIcon(QIcon(os.path.normpath(__file__ + '/../../img/delete.png')))
press_button(self.app, btn)
btn.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
btn.pressed.connect(self.clear_log)
return btn
#####
# Actions
#####
def clear_log(self):
"""
Called to clear the log text
:return:
"""
self.log.clear()
@pyqtSlot(str, int, str)
def add_success_log(self, local_id: str, figshare_id: int, article_title: str):
"""
Called to add a success log to the edit
:param local_id: local article id
:param figshare_id: new figshare article id
:param article_title: article title
:return:
"""
msg = "[UPLOAD] local article {local} uploaded to Figshare article {fig} with title {title}\n"
msg = msg.format(local=local_id, fig=figshare_id, title=article_title)
log = self.log.toPlainText() + msg
self.log.setPlainText(log)
@pyqtSlot(str, str, tuple)
def add_error_log(self, local_id, article_title, error_args):
"""
Called to add an error log to the edit
:param local_id: local article id
:param article_title: article title
:param error_args: error arguments
:return:
"""
msg = "[ERROR] local article {local} : {title}\n"
msg = msg.format(local=local_id, title=article_title)
for arg in error_args:
msg += '\t{}\n'.format(arg)
log = self.log.toPlainText() + msg
self.log.setPlainText(log)
<file_sep>/projects_windows/new_project_window.py
"""
"""
import os
import math
from requests import HTTPError
from PyQt5.QtWidgets import (QMdiSubWindow, QLabel, QPushButton, QTextEdit, QGridLayout, QMainWindow,
QWidget, QLineEdit, QHBoxLayout, QVBoxLayout, QSizePolicy, QScrollBar, QMessageBox)
from PyQt5.QtGui import (QIcon, QFont, QPalette, QColor)
from PyQt5.QtCore import (Qt, QObject)
from Figshare_desktop.custom_widgets.button_field import QButtonField
from ..formatting.formatting import (scaling_ratio, press_button, grid_label, label_font, edit_font, grid_edit)
from figshare_interface import (Projects, Groups)
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class NewProjectWindow(QMdiSubWindow):
def __init__(self, app, OAuth_token, parent):
super().__init__()
self.app = app
self.token = OAuth_token
self.parent = parent
self.open_windows = self.parent.open_windows # Gets the set of open windows
self.initUI()
def initUI(self):
self.format_window()
self.hbox = QHBoxLayout()
self.hbox.addLayout(self.create_command_buttons())
self.hbox.addLayout(self.create_project_info_layout())
window_widget = QWidget()
window_widget.setLayout(self.hbox)
self.setWidget(window_widget)
#####
# Window Formatting and Actions
#####
def format_window(self):
"""
Formats the Projects window
"""
# Get the scaling ratios for the window size and fonts
w_scale, f_scale = scaling_ratio(self.app)
# Gets the QRect of the main window
geom = self.parent.geometry()
# Gets the Qrect of the sections window
section_geom = self.parent.section_geom
# Define geometries for the projects window
x0 = section_geom.x() + section_geom.width()
y0 = section_geom.y()
w = geom.width() - x0
h = ((geom.height() - y0) / 3)
self.setGeometry(x0, y0, w, h)
# Remove frame from projects window
self.setWindowFlags(Qt.FramelessWindowHint)
#####
# Window Widgets
#####
def create_lineedit(self, label):
"""
Creates a label and lineedit
:param label: String. Containing label name.
:return: QLabel and QLineEdit
"""
# Create Label
lbl = QLabel(label)
grid_label(self.app, lbl)
# Create LineEdit
edit = QLineEdit()
edit.setClearButtonEnabled(True)
grid_edit(self.app, edit)
return lbl, edit
def create_edit(self, label):
"""
Creates a label and Textedit
:param label: String. Containing label name.
:return: QLabel and QTextEdit.
"""
# Create Label
lbl = QLabel(label)
lbl.setFont(label_font(self.app))
lbl.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Preferred)
# Create LineEdit
edit = QTextEdit()
grid_edit(self.app, edit)
return lbl, edit
def create_command_buttons(self):
"""
Creates a layout containg two buttons. One to create the new project, the second to cancel.
:return: QVBoxLayout containg the save and cancel buttons.
"""
# Create save button
sv_btn = QPushButton()
press_button(self.app, sv_btn) # Format button
sv_btn.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/figshare_upload.png')))
sv_btn.setToolTip('Save new project.')
sv_btn.setToolTipDuration(1)
sv_btn.pressed.connect(self.on_save_pressed)
# Create cancel button
cl_btn = QPushButton()
press_button(self.app, cl_btn) # Format button
cl_btn.setIcon((QIcon(os.path.abspath(__file__ + '/../..' + '/img/exit.png'))))
cl_btn.setToolTip('Exit without saving.')
cl_btn.setToolTipDuration(1)
cl_btn.pressed.connect(self.on_cancel_pressed)
# Create Layout
vbox = QVBoxLayout()
vbox.addWidget(sv_btn)
vbox.addWidget(cl_btn)
return vbox
def create_project_info_layout(self):
"""
Creates a layout with label and edit fields for creating a new project.
:return: QVBoxLayout with fields to create a new project
"""
# Create Layout
grid = QGridLayout()
# Title
title_lbl, self.title_field = self.create_lineedit('Title')
self.title_field.setPlaceholderText('Enter Project Title Here.')
# Description
description_lbl, self.description_field = self.create_edit('Description')
self.description_field.setPlaceholderText('Enter meaningful project description here.')
# Funding
funding_lbl, self.funding_field = self.create_lineedit('Funding')
self.funding_field = QButtonField(self)
# Group
group_lbl, self.group_field = self.create_lineedit('Group ID')
self.group_field.setText(str(self.get_group())) # Auto fill with the users group id
# Add Widgets to layout
grid.addWidget(title_lbl, 0, 0, Qt.AlignLeft)
grid.addWidget(self.title_field, 0, 1)
grid.addWidget(description_lbl, 1, 0, Qt.AlignLeft)
grid.addWidget(self.description_field, 1, 1)
grid.addWidget(funding_lbl, 2, 0, Qt.AlignLeft)
grid.addWidget(self.funding_field, 2, 1)
grid.addWidget(group_lbl, 3, 0, Qt.AlignLeft)
grid.addWidget(self.group_field, 3, 1)
grid.setColumnStretch(1, 3)
return grid
#####
# Button Actions
#####
def on_save_pressed(self):
"""
Called when the save button is pressed. Will upload the new project to Figshare.
:return:
"""
title = self.title_field.text()
description = self.description_field.toPlainText()
funding = self.funding_field.get_tags()
fund_text = ''
for fund in funding:
fund_text += ':_:{}'.format(fund)
try:
group_id = self.group_field.text()
group_id = int(group_id)
available_groups = [i['id'] for i in Groups(self.token).get_list()]
if group_id not in available_groups:
raise ValueError('Not a valid group id.')
else:
project_info = self.create_project(title, description, fund_text, group_id)
msgBox = QMessageBox()
msgBox.setIcon(QMessageBox.Information)
msgBox.setText("New Project Created\n{}".format(project_info['title']))
msgBox.setStandardButtons(QMessageBox.Ok)
msgBox.buttonClicked.connect(lambda: self.on_msgbtn_pressed(msgBox, exit_parent=True))
msgBox.show()
except ValueError as err:
err_args = err.args
msgBox = QMessageBox()
msgBox.setIcon(QMessageBox.Critical)
msgBox.setText(err_args[0])
msgBox.setStandardButtons(QMessageBox.Ok)
msgBox.buttonClicked.connect(lambda: self.on_msgbtn_pressed(msgBox))
msgBox.show()
except TypeError as err:
err_args = err.args
msgBox = QMessageBox()
msgBox.setIcon(QMessageBox.Critical)
msgBox.setText(err_args[0])
msgBox.setStandardButtons(QMessageBox.Ok)
msgBox.buttonClicked.connect(lambda: self.on_msgbtn_pressed(msgBox))
msgBox.show()
except HTTPError as err:
err_args = err.args
msgBox = QMessageBox()
msgBox.setIcon(QMessageBox.Critical)
msgBox.setText(err_args[0])
msgBox.setStandardButtons(QMessageBox.Ok)
msgBox.buttonClicked.connect(lambda: self.on_msgbtn_pressed(msgBox))
msgBox.show()
def on_cancel_pressed(self):
"""
Called when the cancel button is pressed. Will return to the projects window without creating new project.
:return:
"""
self.open_windows.remove('new_project_window')
self.close()
self.parent.section_window.on_projects_btn_pressed()
def on_msgbtn_pressed(self, box, exit_parent=None):
"""
Called when an error message button is pressed
:return:
"""
box.close()
if exit_parent:
self.on_cancel_pressed()
#####
# Figshare API Interface Actions
#####
def get_group(self):
"""
Gets the group ID for the current user
:return: Int.
"""
groups = Groups(self.token)
group_list = groups.get_list()
group_id = group_list[0]['id']
return group_id
def create_project(self, title, description, funding, group_id):
"""
Creates a new private figshare project
:param title:
:param description:
:param funding:
:param group_id:
:return: Dictionary with information on the new project
"""
projects = Projects(self.token)
project_info = projects.create(title, description, funding, group_id)
return project_info<file_sep>/custom_widgets/author_field.py
"""
"""
# PyQt Imports
from Figshare_desktop.custom_widgets.tag_button import QTagButton
# Figshare Desktop Imports
from Figshare_desktop.custom_widgets.button_field import QButtonField
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class AuthorField(QButtonField):
"""
Subclass of the QButton field widget that is customised visualise and return Figshare author metadata.
"""
def add_tag(self, author_dict: dict):
"""
Adds an author tag button to the frame.
Args:
author_dict: Figshare style author dictionary object.
Returns:
"""
if type(author_dict) is dict:
if 'full_name' in author_dict and 'id' in author_dict:
lbl = author_dict['full_name']
tooltip = author_dict['id']
elif 'full_name' in author_dict:
lbl = author_dict['name']
tooltip = ''
elif 'id' in author_dict:
lbl = str(author_dict['id'])
tooltip = ''
elif type(author_dict) is str:
lbl = author_dict
tooltip = ''
btn = QTagButton(lbl, self.tags, tooltip)
self.tags.add(lbl)
self.tag_box.addWidget(btn)
def get_tags(self):
"""
Gets the tags in a Figshare author metadata format.
Returns:
auth_list (list): list of dictionary objects for the authors.
"""
auth_list = []
tags = list(self.tags)
for tag in tags:
auth_dict = {}
try:
tag = int(tag)
auth_dict['id'] = tag
except:
auth_dict['name'] = tag
auth_list.append(auth_dict)
return auth_list
<file_sep>/README.md
# Figshare_desktop
Desktop application to manage figshare repositories.
<file_sep>/custom_widgets/collection_article_list.py
"""
"""
import collections
import time
from PyQt5.QtWidgets import (QWidget, QVBoxLayout, QProgressBar, QAbstractItemView, QTreeWidget, QTreeWidgetItem,
QLineEdit, QHBoxLayout, QComboBox, QPushButton, QDialog, QGridLayout, QSizePolicy,
QCheckBox)
from PyQt5.QtCore import (QThread, pyqtSignal, pyqtSlot, QObject)
from Figshare_desktop.formatting.formatting import (search_bar, search_combo, press_button)
from Figshare_desktop.figshare_articles.determine_type import gen_article
from figshare_interface import Collections
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class ArticleList(QWidget):
def __init__(self, app, OAuth_token, collection_id, parent):
super().__init__()
self.app = app
self.token = OAuth_token
self.collection_id = collection_id
self.parent = parent
self.__threads = []
self.initFig()
self.initUI()
def initFig(self, show_progress=True):
"""
Initial data load from Figshare
:return:
"""
# Get the list of articles from
collections = Collections(self.token)
articles = collections.get_articles(self.collection_id)
n_articles = len(articles)
worker = ArticleLoadWorker(self.app, self.token, self.parent, self.collection_id, articles)
thread = QThread()
thread.setObjectName('thread_article_load')
self.__threads.append((thread, worker))
worker.moveToThread(thread)
worker.sig_step.connect(self.add_to_tree)
worker.sig_done.connect(self.update_search_field)
worker.sig_done.connect(self.enable_fields)
thread.started.connect(worker.work)
thread.start()
self.articles = articles
self.article_ids = set()
for article in articles:
self.article_ids.add(article['id'])
def initUI(self):
# Initialise the article QTree
self.initTree()
# Create horizontal layout for the search bar and fields
search_layout = QHBoxLayout()
# Add field search to search layout
search_layout.addWidget(self.search_field())
# Add search bar to search layout
search_layout.addWidget(self.search_bar())
# Add headers selection button to search layout
search_layout.addWidget(self.headers_selection_button())
# Create a Vertical layout
vbox = QVBoxLayout()
# Add the search layout to the vertical layout
vbox.addLayout(search_layout)
# Add the Article tree to the layout
vbox.addWidget(self.tree)
# Set Widget layout
self.setLayout(vbox)
#####
# Window Widgets
#####
def initTree(self):
"""
Called to initilize the QTree widget
:return:
"""
# Create instance of QTreeWidget
tree = QTreeWidget()
# Format tree to allow for multiple items to be selected
tree.setSelectionMode(QAbstractItemView.ExtendedSelection)
# Allow for sorting by clicking on headers
tree.setSortingEnabled(True)
# Create the initial set of column headers
headers = ['id', 'title', 'created_date', 'up_to_date', 'type', 'tags']
header_item = QTreeWidgetItem(headers)
tree.setHeaderItem(header_item)
self.tree = tree
self.tree_headers = headers
def search_bar(self):
"""
Creates a QLineEdit object for the user to enter a search query
:return: QLineEdit
"""
# Create text box
edit = QLineEdit()
# Set font style
search_bar(self.app, edit)
# Set place holder text
edit.setPlaceholderText('Search')
# Add a clear button to the line edit
edit.setClearButtonEnabled(True)
# Add mouse over text
edit.setToolTip('Search for specific Figshare Projects')
edit.setToolTipDuration(2000)
# Connect search function to the return key
edit.returnPressed.connect(self.search_on_return)
# Connect the clear button to our own function
edit.children()[2].triggered.connect(self.search_on_clear)
edit.setEnabled(False)
self.search_edit = edit
return self.search_edit
def search_field(self):
"""
Creates a QComboBox with the different search fields to choose from
:return: QComboBox
"""
combo = QComboBox()
combo.setMaximumWidth(self.geometry().width() / 4)
search_combo(self.app, combo)
combo.setToolTip('Set search field parameter. Leave blank for general search.')
combo.setToolTipDuration(2000)
self.search_field_combo = combo
self.update_search_field()
self.search_field_combo.setEnabled(False)
return self.search_field_combo
def headers_selection_button(self):
"""
Button pressed to open the headers selectionw window
:return: QPushButton
"""
btn = QPushButton('Select Headers')
press_button(self.app, btn)
btn.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
btn.clicked[bool].connect(self.on_headers_set_pressed)
btn.setEnabled(False)
self.headers_btn = btn
return self.headers_btn
#####
# Widgets Actions
#####
@pyqtSlot(bool)
def update_search_field(self):
"""
Updates the items in the search field combobox.
Returns:
None
"""
# Clear combo and add empty first item
self.search_field_combo.clear()
self.search_field_combo.addItem('')
# Get list of fields in Figshare article search index
fields = self.parent.figshare_article_index.get_fields(schema='figshare_articles')
self.search_field_combo.addItems(fields)
@pyqtSlot(bool)
def enable_fields(self):
"""
Enables all fields
:return:
"""
self.search_edit.setEnabled(True)
self.search_field_combo.setEnabled(True)
self.headers_btn.setEnabled(True)
def disable_fields(self):
"""
Disables all fields.
Returns:
None
"""
self.search_edit.setEnabled(False)
self.search_field_combo.setEnabled(False)
self.headers_btn.setEnabled(False)
def fill_tree(self, headers: list, article_ids: set):
"""
Called to fill the QTree with articles
Args:
headers: Metadata key names in order to which appear in the QTreeWidget.
article_ids: Set of Figshare article ID numbers from to fill tree with.
Returns:
None
"""
self.tree.clear()
for article_id in article_ids:
self.add_to_tree(article_id, headers=headers)
@pyqtSlot(int)
def add_to_tree(self, article_id: int, headers: list=None):
"""
Adds a single article to the QTree
:param article_id: int. or str. figshare article id
:return:
"""
if headers is None:
headers = self.tree_headers
article_id = str(article_id)
local_article = self.parent.figshare_articles[article_id]
local_article.gen_qtree_item(headers, local_article.input_dicts())
self.tree.addTopLevelItem(local_article.qtreeitem)
# Adjust the size of the columns to the contents
for column in range(self.tree.columnCount()):
self.tree.resizeColumnToContents(column)
def update_headers(self, headers):
"""
Called to update the column headers in the QTree
:param headers: list of strings. in Order for the different column headers
:return:
"""
header_item = QTreeWidgetItem(headers)
self.tree.setHeaderItem(header_item)
self.tree.clear()
self.fill_tree(headers, self.article_ids)
# Adjust the size of the columns to the contents
for column in range(self.tree.columnCount()):
self.tree.resizeColumnToContents(column)
def search_on_return(self):
"""
Called when return is pressed in the search bar.
:return:
"""
field = self.search_field_combo.currentText()
query = self.search_edit.text()
if query == '':
self.search_on_clear()
else:
article_index = self.parent.figshare_article_index
results = article_index.perform_search(schema='figshare_articles', field=field, query=query)
self.result_ids = set()
for docnum, val_dict in results.items():
if 'id' in val_dict:
self.result_ids.add(val_dict['id'])
self.fill_tree(self.tree_headers, self.result_ids)
def search_on_clear(self):
"""
Called when the search bar is cleared
:param search_text: search bar text
:return:
"""
self.fill_tree(self.tree_headers, self.article_ids)
def on_headers_set_pressed(self):
"""
Called when the set headers button is pressed
:return:
"""
# Create a dialog window
dlg = QDialog()
# Create a vertical layout to hold header selections and confirmation buttons
vbox = QVBoxLayout()
# Create a grid layout to hold the QCheckboxes
grid = QGridLayout()
grid.setHorizontalSpacing(15)
grid.setVerticalSpacing(15)
# Add the grid to the layout
vbox.addLayout(grid)
# Create a confirmation button
btn = QPushButton('OK')
btn.pressed.connect(self.on_headers_ok_pressed)
# Add Button to layout
vbox.addWidget(btn)
# Set the dialog window layout
dlg.setLayout(vbox)
# Create an ordered set of field names
fields = OrderedSet()
for f in self.get_fields():
fields.add(f)
# Define how many columns of check boxes to create
columns = 3
# Empty the tree headers list
#self.tree_headers = []
# Start at row zero
row = 0
# While we still have a field in the ordered set
while fields:
for i in range(columns):
# Here we have to use the exec fundtion to programatically name each box variable otherwise the connect
# function only ever calls the last button.
# Further complication from having to remember that the stateChanged signal passes a bool int to the
# lambda function.
if len(fields) == 0:
break
lbl = fields.popitem(False)[0]
exec("chk_box_{}_{} = QCheckBox(lbl)".format(row, i)) # Create a checkbox
if lbl in self.tree_headers:
eval("chk_box_{}_{}".format(row, i)).toggle()
eval("chk_box_{}_{}".format(row, i)).stateChanged.connect(lambda state, r=row,
c=i: self.check_box_clicked(r, c))
grid.addWidget(eval("chk_box_{}_{}".format(row, i)), row, i) # add the checkbox to the grid
row += 1 # increase the row counter
self.dlg = dlg
self.headers_box_layout = grid
self.dlg.show()
def check_box_clicked(self, row, column):
"""
Called when a check box in the header selection dialog is clicked
:return:
"""
if self.headers_box_layout.itemAtPosition(row, column) is not None:
field = self.headers_box_layout.itemAtPosition(row, column).widget().text()
if field in self.tree_headers:
self.tree_headers.remove(field)
elif field not in self.tree_headers:
self.tree_headers.append(field)
def on_headers_ok_pressed(self):
"""
Called when the headers dialog window ok button is pressed
:return:
"""
self.dlg.close()
# Ensure that id number is always the first column.
if self.tree_headers[0] != 'id':
self.tree_headers.insert(0, 'id')
self.update_headers(self.tree_headers)
def get_selection(self):
"""
Can be called to return a list of the article id numbers of all selected articles
:return:
"""
items = self.tree.selectedItems()
article_ids = set()
for item in items:
article_ids.add(int(item.text(0)))
return article_ids
def get_all(self):
"""
Can be called to return the article id numbers of all articles in the tree
:return:
"""
self.tree.selectAll()
items = self.tree.selectedItems()
article_ids = set()
for item in items:
article_ids.add(int(item.text(0)))
return article_ids
#####
# Figshare API Functions
#####
def get_fields(self):
"""
Called to return a list of custom metadata fields
:return: list of strings
"""
if len(self.articles) > 0:
collections = Collections(self.token)
article_id = self.articles[0]['id']
result = collections.get_article(article_id)
keys = set()
for key in result.keys():
if key != 'custom_fields':
keys.add(key)
for d in result['custom_fields']:
keys.add(d['name'])
return sorted(list(keys))
else:
return []
class ArticleLoadWorker(QObject):
sig_step = pyqtSignal(int)
sig_done = pyqtSignal(bool)
def __init__(self, app, OAuth_token: str, parent, collection_id: int, articles: list):
super().__init__()
self.__abort = False
self.app = app
self.token = OAuth_token
self.parent = parent
self.collection_id = collection_id
self.articles = articles
self.n_articles = len(articles)
@pyqtSlot()
def work(self):
"""
:return:
"""
if self.n_articles > 0:
for article in self.articles:
self.create_local_article(article)
self.sig_step.emit(article['id'])
self.sig_done.emit(True)
def abort(self):
self.__abort = True
def create_local_article(self, article):
"""
Given a Figshare article id number this function will create a local version if one does not already exist
:param article: Dict. Figshare article returned from Projects.list_articles()
:return:
"""
# Get the article id number and title
article_id = str(article['id']) # Convert int to str
article_title = article['title']
# If article is not already stored locally create a
if not self.does_article_exist_locally(article_id):
article = gen_article(article_title, self.token, None, article_id)
self.parent.figshare_articles[article_id] = article
# Locally reference the Figshare Article Index
article_index = self.parent.figshare_article_index
# Get the type of the article
article_type = article.get_type()
# Check to see if the article type has been added to the articles index.
# If note we will need to create new fields in the schema.
if article_type not in article_index.document_types:
# Add the new file type to the index schema
article_index.document_types.add(article_type)
# Define the schema we wish to add fields to
schema = 'figshare_articles'
# From the article type created get the index dictionary and add fields to the index
for field_name, field_type in article.index_schema().items():
if field_name not in article_index.get_fields(schema):
if field_type[0] == 'id':
article_index.add_ID(schema=schema, field_name=field_name, stored=field_type[1],
unique=True)
elif field_type[0] == 'text':
article_index.add_TEXT(schema, field_name, field_type[1])
elif field_type[0] == 'keyword':
article_index.add_KEYWORD(schema, field_name, field_type[1])
elif field_type[0] == 'numeric':
article_index.add_NUMERIC(schema, field_name, field_type[1])
elif field_type[0] == 'datetime':
article_index.add_DATETIME(schema, field_name, field_type[1])
elif field_type[0] == 'boolean':
article_index.add_BOOLEAN(schema, field_name, field_type[1])
elif field_type[0] == 'ngram':
article_index.add_NGRAM(schema, field_name, field_type[1])
# Get single dictionary of all fields associated to the article
document_dict = {}
for d in article.input_dicts():
document_dict = {**document_dict, **d}
# Add document to index
article_index.addDocument(schema='figshare_articles', data_dict=document_dict)
def does_article_exist_locally(self, article_id):
"""
Checks to see if there is a local version of the article.
:param article_id: int. Figshare article id number
:return: Bool. Dependent on if local version of article exists or not
"""
# Convert article id to a string. Should have already been done, but just in case we call it somewhere else
a_id = str(article_id)
if a_id in self.parent.figshare_articles:
return True
else:
return False
class OrderedSet(collections.OrderedDict, collections.MutableSet):
def update(self, *args, **kwargs):
if kwargs:
raise TypeError("update() takes no keyword arguments")
for s in args:
for e in s:
self.add(e)
def add(self, elem):
self[elem] = None
def discard(self, elem):
self.pop(elem, None)
def __le__(self, other):
return all(e in other for e in self)
def __lt__(self, other):
return self <= other and self != other
def __ge__(self, other):
return all(e in self for e in other)
def __gt__(self, other):
return self >= other and self != other
def __repr__(self):
return 'OrderedSet([%s])' % (', '.join(map(repr, self.keys())))
def __str__(self):
return '{%s}' % (', '.join(map(repr, self.keys())))
difference = property(lambda self: self.__sub__)
difference_update = property(lambda self: self.__isub__)
intersection = property(lambda self: self.__and__)
intersection_update = property(lambda self: self.__iand__)
issubset = property(lambda self: self.__le__)
issuperset = property(lambda self: self.__ge__)
symmetric_difference = property(lambda self: self.__xor__)
symmetric_difference_update = property(lambda self: self.__ixor__)
union = property(lambda self: self.__or__)
<file_sep>/data_window/data_window.py
import os
import math
from PyQt5.QtWidgets import (QMdiSubWindow, QWidget, QLabel, QPushButton, QAbstractItemView, QMessageBox, QMainWindow,
QFileDialog, QTreeWidgetItem, QHBoxLayout, QVBoxLayout, QSizePolicy, QTreeWidget,
QFileSystemModel, QTreeView)
from PyQt5.QtGui import (QIcon, QFont, QPalette, QColor)
from PyQt5.QtCore import (Qt, QObject, QThread, pyqtSignal, pyqtSlot)
from Figshare_desktop.formatting.formatting import (press_button, checkable_button)
from Figshare_desktop.figshare_articles.determine_type import gen_local_article
from Figshare_desktop.data_window.data_articles_window import DataArticlesWindow
from Figshare_desktop.article_edit_window.local_metadata_window import LocalMetadataWindow
from figshare_interface import (Groups, Projects)
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class DataWindow(QMdiSubWindow):
def __init__(self, app, OAuth_token, parent):
super().__init__()
self.app = app
self.token = OAuth_token
self.parent = parent
self.__threads = []
self.initUI()
def initUI(self):
# Format the window
self.format_window()
# Create a horizontal layout to hold the widgets
hbox = QHBoxLayout()
# Add the widgets
hbox.addWidget(self.set_directory_btn())
hbox.addWidget(self.create_file_browser())
hbox.addWidget(self.add_to_selection_btn())
# Create a central widget for the local data window
window_widget = QWidget()
# Add the vertical box layout
window_widget.setLayout(hbox)
# Set the projects window widget
self.setWidget(window_widget)
def format_window(self):
"""
Form the local data window
:return:
"""
# Gets the QRect of the main window
geom = self.parent.geometry()
# Gets the Qrect of the sections window
section_geom = self.parent.section_geom
# Define geometries for the projects window
x0 = section_geom.x() + section_geom.width()
y0 = section_geom.y()
w = geom.width() - x0
h = ((geom.height() - y0) / 3)
self.setGeometry(x0, y0, w, h)
# Remove frame from projects window
self.setWindowFlags(Qt.FramelessWindowHint)
#####
# Window Widgets
#####
def set_directory_btn(self):
"""
Creates a QPushButton that can be used to set the current root directory
:return: QPushButton
"""
btn = QPushButton()
btn.setIcon(QIcon(os.path.normpath(__file__ + '/../../img/Folder-48.png')))
press_button(self.app, btn) # Format button
btn.setToolTip("Select local directory")
btn.setToolTipDuration(1)
btn.pressed.connect(self.on_set_directory_pressed)
return btn
def create_file_browser(self):
"""
Creates a QTreeView with a QFileSystemModel that is used as a file browser
:return: QTreeview
"""
self.browser = QTreeView()
# Set the model of the QTreeView
self.model = QFileSystemModel()
home_dir = os.path.expanduser("~") # Define the initial root directory
self.model.setRootPath(home_dir)
self.browser.setModel(self.model)
# Resize the first column
self.browser.setColumnWidth(0, self.geometry().width() / 3)
# Control how selection of items works
#self.browser.setSelectionBehavior(QAbstractItemView.SelectItems) # Allow for only single item selection
self.browser.setSelectionMode(QAbstractItemView.ExtendedSelection) # Alow for multiple rows to be selected
return self.browser
def add_to_selection_btn(self):
"""
Creates a QPushButton that can be used to open the metadata window for the selected items in the file browser
:return: QPushButton
"""
btn = QPushButton()
btn.setIcon(QIcon(os.path.normpath(__file__ + '/../../img/Insert Row Below-48.png')))
press_button(self.app, btn) # Format button
btn.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Expanding)
btn.setToolTip("Add selected items to articles list")
btn.setToolTipDuration(1)
btn.pressed.connect(self.on_open_selection_clicked)
return btn
#####
# Widget Actions
#####
def on_set_directory_pressed(self):
"""
Called when the set root directory button is pressed
:return:
"""
dir_name = self.user_set_dir()
self.browser.setRootIndex(self.model.index(dir_name))
def user_set_dir(self):
"""
Creates a QFileDialog that prompts the user to choose a root directory
:return: Sting. directory path
"""
return str(QFileDialog.getExistingDirectory(self, "Select Directory"))
def on_open_selection_clicked(self):
"""
Called when the open selection button is clicked. Either open or closes the metadata window
:return:
"""
# Retrieve a set of file paths
file_paths = self.get_selection_set()
# Locally reference the Article List Widget
article_tree = self.parent.data_articles_window.article_tree
# Prevent the user from editting, searching, etc. while new articles are created.
article_tree.disable_fields()
# Create an article creation worker
worker = ArticleCreationWorker(self.token, self.parent, file_paths)
# Create the thread.
load_articles_thread = QThread()
load_articles_thread.setObjectName('local_articles_thread')
self.__threads.append((load_articles_thread, worker))
# Add the worker to the thread
worker.moveToThread(load_articles_thread)
# Connect signals from worker
worker.sig_step.connect(article_tree.add_to_tree)
worker.sig_step.connect(lambda article_id: article_tree.add_to_articles(article_id))
worker.sig_done.connect(article_tree.enable_fields)
worker.sig_done.connect(article_tree.update_search_field)
worker.sig_done.connect(self.parent.data_articles_window.check_edit)
load_articles_thread.started.connect(worker.work)
# Begin worker thread
load_articles_thread.start()
def get_selection_set(self):
"""
Creates a set of selected item file paths.
:return:
"""
# Get a list of selected items from the QTreeview
items = self.browser.selectedIndexes()
# Create an empty set to add file paths to
file_paths = set()
for item in items:
# For items that are not directories
if not self.model.isDir(item):
file_paths.add(self.model.filePath(item)) # Add the item file path
else:
# Combine the current set with a set of files contained within the directory. Does not recursively
# open contained directories
contained_files = self.get_child_files(self.model.filePath(item))
if contained_files is not None:
file_paths |= contained_files
return file_paths
@staticmethod
def get_child_files(path):
"""
given a path to a directory will return a set of file paths contained within. Does not recursively open internal
directories
:param path: string. path to directory
:return: set. Containing file paths
"""
dir = os.path.normpath(path)
if os.path.isdir(dir):
dir_contents = os.listdir(dir)
file_set = set()
for item in dir_contents:
if not os.path.isdir(item):
file_set.add(os.path.join(dir, item))
return file_set
else:
return None
class ArticleCreationWorker(QObject):
sig_step = pyqtSignal(str)
sig_done = pyqtSignal(bool)
def __init__(self, OAuth_token, parent, file_paths: set):
super().__init__()
self.token = OAuth_token
self.parent = parent
self.file_paths = file_paths
@pyqtSlot()
def work(self):
"""
:return:
"""
while self.file_paths:
path = self.file_paths.pop()
local_id = self.create_local_article(path)
self.sig_step.emit(local_id)
self.sig_done.emit(True)
def create_local_article(self, file_path):
"""
Creates a local article of the given file
:param file_path: string.
:return:
"""
# Check if an article does not already exist with the same title
article_exists, local_id = self.does_local_article_exist(file_path)
if not article_exists:
# set the local file id number
local_id = 'local_' + str(self.parent.next_local_id)
# Create local article
self.parent.local_articles[local_id] = gen_local_article(self.token, file_path)
# Set id number
self.parent.local_articles[local_id].figshare_metadata['id'] = local_id
# Increment next local id counter
self.parent.next_local_id += 1
# locally define the local article index for convenience
local_article_index = self.parent.local_article_index
# Get the article type
article = self.parent.local_articles[local_id]
article_type = article.figshare_metadata['type']
# Check to see if the article type has been added to the articles index
# If not we will need to add new fields to the index for the new file type
if article_type not in local_article_index.document_types:
# Add the new file type to the set of types included in the index
local_article_index.document_types.add(article_type)
# Define the schema we wish to add fields to
schema = 'local_articles'
# From the article type created get the index dictionary and add fields to the index appropriately
for field_name, field_type in article.index_schema().items():
if field_type[0] == 'id':
local_article_index.add_ID(schema=schema, field_name=field_name, stored=field_type[1],
unique=True)
elif field_type[0] == 'text':
local_article_index.add_TEXT(schema, field_name, field_type[1])
elif field_type[0] == 'keyword':
local_article_index.add_KEYWORD(schema, field_name, field_type[1])
elif field_type[0] == 'numeric':
local_article_index.add_NUMERIC(schema, field_name, field_type[1])
elif field_type[0] == 'datetime':
local_article_index.add_DATETIME(schema, field_name, field_type[1])
elif field_type[0] == 'boolean':
local_article_index.add_BOOLEAN(schema, field_name, field_type[1])
elif field_type[0] == 'ngram':
local_article_index.add_NGRAM(schema, field_name, field_type[1])
# Get a single dictionary of all fields associated to the article
document_dict = {}
for d in article.input_dicts():
document_dict = {**document_dict, **d}
# Add document to Index
local_article_index.addDocument(schema='local_articles', data_dict=document_dict)
return local_id
# Else if an existing article:
else:
return local_id
def does_local_article_exist(self, file_path: str):
"""
Checks to see if an article already exists with the same title in the local article set.
Args:
file_path: local path to the file from which to create an article.
Returns:
article_exits (bool): True of False depending on whether article already exists.
article_id (str): If article with same title is found returns the article ID, otherwise None is returned.
"""
# Get the file name from the full path
file_name = os.path.split(file_path)[-1]
# locally define the local article index for convenience
local_article_index = self.parent.local_article_index
# If there is the local article schema present
if 'local_articles' in local_article_index.list_schema():
# Initially set article exists as False
exists = False
# Search for articles with the same title as the current file name
results = local_article_index.perform_search(schema='local_articles', field='title', query=file_name)
# Check in the results given if there is a document with the same title as the file name
for doc_num, val_dict in results.items():
# If one is found return true
if 'title' in val_dict:
if val_dict['title'] == file_name:
exists = True
local_id = val_dict['id']
break
if exists:
return True, local_id
# If we get here then no results had the same title (within the top ten hits) so return false
else:
return False, None
<file_sep>/article_edit_window/article_edit_window.py
"""
"""
# Standard Imports
import os
from requests import HTTPError
# PyQt Imports
from PyQt5.QtWidgets import (QWidget, QLabel, QPushButton, QLineEdit, QMessageBox, QScrollArea, QMdiSubWindow,
QTextEdit, QGridLayout, QHBoxLayout, QVBoxLayout, QTabWidget, QComboBox)
from PyQt5.QtGui import (QIcon)
from PyQt5.QtCore import (Qt)
# Figshare Desktop Imports
from Figshare_desktop.custom_widgets.button_field import QButtonField
from Figshare_desktop.custom_widgets.author_field import AuthorField
from Figshare_desktop.custom_widgets.categories_field import CategoriesField
from Figshare_desktop.formatting.formatting import (grid_label, grid_edit, press_button)
# Figshare API Interface Imports
from figshare_interface import (Projects)
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class ArticleEditWindow(QMdiSubWindow):
def __init__(self, app, OAuth_token, parent, project_id, article_ids):
super().__init__()
self.app = app
self.token = OAuth_token
self.parent = parent
self.project_id = project_id
self.article_ids = article_ids
self.initFig()
self.initUI()
def initFig(self):
"""
Initialises the figshare data
:return:
"""
# Get the number of articles
n_articles = len(self.article_ids)
# For more than one article
if n_articles > 1:
# Get the type of article for the first in the list
article = self.parent.figshare_articles[str(self.article_ids[0])]
initial_type = article.get_type()
# Initially set all files as the same type
self.same_type = True
# Loop through all files and check to see if they are all the same
for article_id in self.article_ids:
article = self.parent.figshare_articles[str(article_id)]
type = article.get_type()
if type != initial_type:
self.same_type = False
break
# Set the dictionary of metadata keys
figshare_metadata = {}
article = self.parent.figshare_articles[str(self.article_ids[0])]
for d in article.input_dicts()[0:1]:
figshare_metadata = {**figshare_metadata, **d}
self.figshare_metadata = dict.fromkeys(figshare_metadata)
# Set the dictionary of file specific metadata keys
self.file_metadata = None
if self.same_type:
if len(article.input_dicts()) > 2:
file_dict = {}
for d in article.input_dicts()[2:]:
file_dict = {**file_dict, **d}
self.file_metadata = dict.fromkeys(file_dict)
# For a single article
else:
# Set the dictionary of metadata keys and values
figshare_metadata = {}
article = self.parent.figshare_articles[str(self.article_ids[0])]
for d in article.input_dicts()[0:1]:
figshare_metadata = {**figshare_metadata, **d}
self.figshare_metadata = figshare_metadata
# Set the dictionary of file specific metadata keys and values
self.file_metadata = None
if len(article.input_dicts()) > 2:
file_dict = {}
for d in article.input_dicts()[2:]:
file_dict = {**file_dict, **d}
self.file_metadata = file_dict
# Metadata Dictionaries
self.defined_type_dict = {'': 0, 'figure': 1, 'media': 2, 'dataset': 3, 'fileset': 4, 'poster': 5, 'paper': 6,
'presentation': 7, 'thesis': 8, 'code': 9, 'metadata': 10}
self.license_dict = {0: '', 1: 'CC BY', 2: 'CC-0', 3: 'MIT', 4: 'GPL', 5: 'GPL-2.0', 6: 'GPL-3.0',
7: 'Apache-2.0'}
def initUI(self):
# Format the geometry of the window
self.format_window()
# Create a horizontal layout
self.hbox = QHBoxLayout()
# Add the save and exit buttons
self.hbox.addLayout(self.control_button_layout())
# Add the tab widget
self.tabs = self.metadata_tab_window()
self.hbox.addWidget(self.tabs)
# Create a central widget for the article edit window
window_widget = QWidget()
# Add the horizontal box layout
window_widget.setLayout(self.hbox)
# Set the projects window widget
self.setWidget(window_widget)
#####
# Window Formatting
#####
def format_window(self):
"""
Sets the window geometry
:return:
"""
# Gets the QRect of the main window
geom = self.parent.geometry()
# Gets the Qrect of the sections window
section_geom = self.parent.section_geom
# Define geometries for the projects window
x0 = section_geom.x() + section_geom.width()
y0 = section_geom.y()
w = geom.width() - x0
h = ((geom.height() - y0) * 0.375)
self.setGeometry(x0, y0, w, h)
# Remove frame from the window
self.setWindowFlags(Qt.FramelessWindowHint)
#####
# Window Widgets
#####
def control_button_layout(self):
"""
Creates a layout with the save and exit buttons
:return: QVBoxLayout
"""
# Create the layout
vbox = QVBoxLayout()
# Add the exit button
exit_btn = self.exit_button()
vbox.addWidget(exit_btn)
# Add the save button
save_btn = self.save_button()
vbox.addWidget(save_btn)
return vbox
def exit_button(self):
"""
Creates an exit button to close the article edit window without saving changes
:return: QPushButton
"""
btn = QPushButton()
btn.setIcon(QIcon(os.path.normpath(__file__ + '/../../img/exit.png')))
press_button(self.app, btn)
btn.pressed.connect(self.on_exit_pressed)
return btn
def save_button(self):
"""
Creates a save button to push changes to Figshare
:return: QPushButton
"""
btn = QPushButton()
btn.setIcon(QIcon(os.path.normpath(__file__ + '/../../img/figshare_upload.png')))
press_button(self.app, btn)
btn.pressed.connect(self.on_save_pressed)
return btn
def metadata_tab_window(self):
"""
Creates a tab layout to hold the different metadata tabs
:return:
"""
# Create Tab Widget
tab_wid = QTabWidget()
# Add Figshare Metadata Tab
self.figshare_tab = self.init_figshare_metadata_tab()
tab_wid.addTab(self.figshare_tab, 'Figshare Metadata')
if self.file_metadata is not None:
self.filespecific_tab = self.init_filespecific_metadata_tab()
tab_wid.addTab(self.filespecific_tab, 'File Specific Metadata')
return tab_wid
def init_figshare_metadata_tab(self):
"""
Creates a QWidget for the default Figshare metadata
:return:
"""
# Create widget object to fill with metadata
tab = QScrollArea()
scroll_wid = QWidget()
# Create metadata labels and fields
title_lbl, title_edit = self.create_lineedit('Title', self.figshare_metadata['title'])
if len(self.article_ids) > 1:
title_edit.setEnabled(False)
title_edit.clear()
title_edit.setPlaceholderText('Files will retain their individual titles')
descr_lbl, descr_edit = self.create_textedit('Description', self.figshare_metadata['description'])
ref_lbl, ref_field = self.create_buttonfield('References', self.figshare_metadata['references'])
tags_lbl, tags_field = self.create_buttonfield('Tags', self.figshare_metadata['tags'])
cat_lbl, cat_field = self.create_categories_field('Categories', self.figshare_metadata['categories'])
auth_lbl, auth_field = self.create_author_field('Authors', self.figshare_metadata['authors'])
def_lbl, def_combo = self.create_combo('Defined Type', self.defined_type_dict,
self.figshare_metadata['defined_type'])
fund_tags = self.figshare_metadata['funding']
if self.figshare_metadata['funding'] is not None:
fund_tags = self.figshare_metadata['funding'].split(':_:')
if '' in fund_tags:
fund_tags.remove('')
if ' ' in fund_tags:
fund_tags.remove(' ')
fund_lbl, fund_field = self.create_buttonfield('Funding', fund_tags)
lic_lbl, lic_combo = self.create_combo('License', self.license_dict, self.figshare_metadata['license'])
# Create layout
grid = QGridLayout()
# Add widgets to layout
grid.addWidget(title_lbl, 0, 0)
grid.addWidget(title_edit, 0, 1)
grid.addWidget(descr_lbl, 1, 0)
grid.addWidget(descr_edit, 1, 1)
grid.addWidget(ref_lbl, 2, 0)
grid.addWidget(ref_field, 2, 1)
grid.addWidget(tags_lbl, 3, 0)
grid.addWidget(tags_field, 3, 1)
grid.addWidget(cat_lbl, 4, 0)
grid.addWidget(cat_field, 4, 1)
grid.addWidget(auth_lbl, 5, 0)
grid.addWidget(auth_field, 5, 1)
grid.addWidget(def_lbl, 6, 0)
grid.addWidget(def_combo, 6, 1)
grid.addWidget(fund_lbl, 7, 0)
grid.addWidget(fund_field, 7, 1)
grid.addWidget(lic_lbl, 8, 0)
grid.addWidget(lic_combo, 8, 1)
scroll_wid.setLayout(grid)
tab.setWidget(scroll_wid)
return tab
def init_filespecific_metadata_tab(self):
"""
Creates a QTabWidget to add to the article edit window
:return:
"""
# Get the first article from the article is list
article = self.parent.figshare_articles[str(self.article_ids[0])]
# Check to see if the article is a known file format
if self.file_metadata is not None:
# Create widget object to fill with metadata
tab = QScrollArea()
scroll_wid = QWidget()
grid = QGridLayout()
row_number = 0
for key, value in self.file_metadata.items():
value = str(value)
lbl, edit = self.create_lineedit(key, value)
grid.addWidget(lbl, row_number, 0)
grid.addWidget(edit, row_number, 1)
row_number += 1
scroll_wid.setLayout(grid)
tab.setWidget(scroll_wid)
return tab
def create_label(self, label):
"""
Creates and formats a QLabel
:param label: String.
:return: QLabel
"""
lbl = QLabel(label)
grid_label(self.app, lbl)
lbl.setMaximumWidth(self.geometry().width() * 0.2)
return lbl
def create_lineedit(self, label, fill):
"""
Creates and formats a QLabel and QlineEdit pair
:param label: String.
:param fill: String
:return: QLabel, QLineEdit
"""
lbl = self.create_label(label)
edit = QLineEdit(fill)
grid_edit(self.app, edit)
return lbl, edit
def create_textedit(self, label, fill):
"""
Creates and formats a QLabel and QTextEdit pair
:param label: String.
:param fill: String.
:return: QLabel, QTextEdit
"""
lbl = self.create_label(label)
edit = QTextEdit()
edit.setPlainText(fill)
grid_edit(self.app, edit)
return lbl, edit
def create_buttonfield(self, label, fill_list):
"""
Creates and formats a QLabel and QButtonfield pair
:param label: String
:param fill_list: List of Strings
:return: QLabel, QButtonField
"""
lbl = self.create_label(label)
button_field = QButtonField(parent=self)
if fill_list is not None:
for tag in fill_list:
if type(tag) == dict:
button_field.add_tag(tag['id'])
else:
button_field.add_tag(tag)
return lbl, button_field
def create_author_field(self, label, fill_list):
"""
Creates and formats a QLabel and Author formatted QButtonField pair.
Args:
label: Name of the field.
fill_list: List of authors.
Returns:
lbl (QLabel): Label object.
auth_field (AuthorField): QButton field, authors sub class.
"""
lbl = self.create_label(label)
auth_field = AuthorField(parent=self)
if fill_list is not None:
for auth_dict in fill_list:
auth_field.add_tag(auth_dict)
return lbl, auth_field
def create_categories_field(self, label: str, fill_list: list):
"""
Creates a label, QButton field formatted for Figshare categories.
Args:
label: Field label.
fill_list: list of category integers.
Returns:
lbl (QLabel): Label object for UI.
cat_field (CategoriesField): QButtonField formatted for figshare categories.
"""
lbl = self.create_label(label)
cat_field = CategoriesField(self.parent.id_categories, self.parent.name_categories, parent=self)
if fill_list is not None:
for cat in fill_list:
cat_field.add_tag(cat)
return lbl, cat_field
def create_combo(self, label, metadata_dict, fill):
"""
Creates and formats a QLabel and QComboBox pair
:param label: String
:param fill_list: list of strings.
:return: QLabel, QComboBox
"""
lbl = self.create_label(label)
combo = QComboBox()
for key, value in metadata_dict.items():
if type(key) is str:
combo.addItem(key)
else:
combo.addItem(value)
if type(fill) is int:
combo.setCurrentIndex(fill)
elif type(fill) is str:
try:
fill = int(fill)
combo.setCurrentIndex(metadata_dict[fill])
except:
combo.setCurrentIndex(0)
return lbl, combo
#####
# Widget Actions
#####
def on_exit_pressed(self):
"""
Called when the exit button is pressed. Closes the article edit window without saving any changes
:return:
"""
# Close article edit window
self.parent.open_windows.remove('article_edit_window')
self.parent.article_edit_window.close()
if 'project_info_window' in self.parent.open_windows:
# Open project articles window
self.parent.project_info_window.on_articles_pressed()
elif 'collection_info_window' in self.parent.open_windows:
self.parent.collection_info_window.on_articles_pressed()
def on_save_pressed(self):
"""
Called when the save button is pressed. Pushes changes to figshare and creates a confirmation dialog
:return:
"""
self.update_all_articles(self.article_ids)
#####
# Figshare Actions
#####
def update_all_articles(self, article_list):
"""
Updates multiple articles
:param article_list: list of int.
:return:
"""
all_errors = []
for article_id in article_list:
errors = self.update_single_article(article_id)
if errors != []:
for err in errors:
all_errors.append(err)
if all_errors == []:
msg = "All articles updated"
resp = QMessageBox.information(self, "Update Confirmation", msg, QMessageBox.Ok)
else:
msg_box = QMessageBox()
msg_box.setIcon(QMessageBox.Warning)
msg_box.setText("Error in update.")
detailed_msg = ""
for err in all_errors:
for arg in err.args:
detailed_msg += arg + '\n'
msg_box.setDetailedText(detailed_msg)
msg_box.setStandardButtons(QMessageBox.Ok)
msg_box.show()
def update_single_article(self, article_id):
"""
Updates a single figshare article
:param article_id:
:return:
"""
err_figshare = self.update_article_figshare_metadata(article_id)
if self.file_metadata is not None:
err_filespecific = self.update_article_file_metadata(article_id)
errors = []
if err_figshare is not None:
errors.append(err_figshare)
if self.file_metadata is not None and err_filespecific is not None:
errors.append(err_filespecific)
return errors
def update_article_figshare_metadata(self, article_id):
"""
Updates the figshare metadata of a single article
:param article_id:
:return:
"""
# Get the current/old figshare metadata
article = self.parent.figshare_articles[str(article_id)]
old_figshare_metadata = article.figshare_metadata
# Get the new/edited figshare metadata
new_figshare_metadata = {}
figshare_grid = self.figshare_tab.widget().layout()
# Title
title = figshare_grid.itemAtPosition(0, 1).widget().text()
new_figshare_metadata['title'] = title
# Description
description = figshare_grid.itemAtPosition(1, 1).widget().toPlainText()
new_figshare_metadata['description'] = description
# References
references = figshare_grid.itemAtPosition(2, 1).widget().get_tags()
new_figshare_metadata['references'] = references
# Tags
tags = figshare_grid.itemAtPosition(3, 1).widget().get_tags()
new_figshare_metadata['tags'] = tags
# Categories
cat_list = figshare_grid.itemAtPosition(4, 1).widget().get_tags()
new_figshare_metadata['categories'] = cat_list
# Authors
auth_list = figshare_grid.itemAtPosition(5, 1).widget().get_tags()
new_figshare_metadata['authors'] = auth_list
# Defined Type
defined_type = figshare_grid.itemAtPosition(6, 1).widget().currentText()
new_figshare_metadata['defined_type'] = defined_type
# Funding
fund_tags = figshare_grid.itemAtPosition(7, 1).widget().get_tags()
funding = ''
for tag in fund_tags:
funding += tag + ':_:'
new_figshare_metadata['funding'] = funding
# License
license = figshare_grid.itemAtPosition(8, 1).widget().currentIndex()
new_figshare_metadata['license'] = license
# Create an empty dictionary to add updates/edits
update_dict = {}
# Check for changes
for key, value in new_figshare_metadata.items():
if value != 'None' and value is not None and value != '':
if value != old_figshare_metadata[key]:
update_dict[key] = value
try:
project = Projects(self.token)
proj_info = project.update_article(self.token, article_id, update_dict)
# Update local version of article
article.update_info(update_dict)
# Change up_to_date
article.figshare_metadata['up_to_date'] = False
return None
except HTTPError as err:
return err
except TypeError as err:
return err
except ValueError as err:
return err
def update_article_file_metadata(self, article_id):
"""
Updates an articles custom fields metadata
:param article_id: int. Figshare article id number
:return:
"""
# Get the current/old file specific metadata
article = self.parent.figshare_articles[str(article_id)]
old_file_dicts = article.input_dicts()[2:]
old_file_metadata = {}
for d in old_file_dicts:
for key, value in d.items():
old_file_metadata[key] = value
# Get the new/edited figshare metadata
new_file_metadata = {}
file_grid = self.filespecific_tab.widget().layout()
# Get the number of rows in the grid layout
n_rows = file_grid.rowCount()
# Get the new file metadata
for row in range(n_rows):
lbl = file_grid.itemAtPosition(row, 0).widget().text()
edit = file_grid.itemAtPosition(row, 1).widget().text()
new_file_metadata[lbl] = edit
# Check for changes
update_dict = {}
for key, value in new_file_metadata.items():
if value != 'None' and value is not None:
if value != old_file_metadata[key]:
update_dict[key] = value
# Update local version of article
article.update_info(update_dict)
# Reformat update dictionary
update_dict = {'custom_fields': update_dict}
try:
project = Projects(self.token)
proj_info = project.update_article(self.token, article_id, update_dict)
# Update local version of article
article.update_info(update_dict)
# Change up_to_date
article.figshare_metadata['up_to_date'] = False
return None
except HTTPError as err:
return err
except TypeError as err:
return err
except ValueError as err:
return err
<file_sep>/data_window/data_articles_window.py
"""
"""
import os
from PyQt5.QtWidgets import (QWidget, QLabel, QPushButton, QLineEdit, QMessageBox, QFileDialog, QMdiSubWindow,
QTextEdit, QGridLayout, QHBoxLayout, QVBoxLayout, QSizePolicy, QFrame)
from PyQt5.QtGui import (QIcon, QFont, QPalette, QColor)
from PyQt5.QtCore import (Qt, QThread, pyqtSlot)
from Figshare_desktop.formatting.formatting import (press_button)
from Figshare_desktop.data_window.search_index import (ArticleIndex)
from Figshare_desktop.article_edit_window.local_metadata_window import LocalMetadataWindow
from Figshare_desktop.custom_widgets.local_article_list import LocalArticleList
from Figshare_desktop.data_window.figshare_add_article_list import TreeAddWorker
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class DataArticlesWindow(QMdiSubWindow):
def __init__(self, app, OAuth_token, parent):
super().__init__()
self.app = app
self.token = OAuth_token
self.parent = parent
self.__threads = []
self.initIndex()
self.initUI()
def initIndex(self):
"""
Initiates the Whoosh search index
:return:
"""
if self.parent.local_article_index is None:
# Create the Article Index
self.parent.local_article_index = ArticleIndex()
# Create the default figshare metadata schema dictionary
self.parent.local_article_index.create_schema('local_articles')
self.parent.local_article_index.add_ID(schema='local_articles', field_name='id', stored=True, unique=True)
self.parent.local_article_index.add_TEXT('local_articles', 'title', True)
self.parent.local_article_index.add_TEXT('local_articles', 'description')
self.parent.local_article_index.add_KEYWORD('local_articles', 'tags', True)
self.parent.local_article_index.add_ID('local_articles', 'references')
self.parent.local_article_index.add_KEYWORD('local_articles', 'categories')
self.parent.local_article_index.add_KEYWORD('local_articles', 'authors')
self.parent.local_article_index.add_ID('local_articles', 'defined_type')
self.parent.local_article_index.add_TEXT('local_articles', 'funding')
self.parent.local_article_index.add_ID('local_articles', 'license')
self.parent.local_article_index.document_types.add('article')
def initUI(self):
self.format_window()
# Create layout for the control buttons
control_btns_layout = QVBoxLayout()
# Add control buttons
control_btns_layout.addWidget(self.delete_btn())
control_btns_layout.addWidget(self.project_btn())
# Create the article tree
self.article_tree = LocalArticleList(self.app, self.token, self.parent)
# Create edit button layout
edit_layout = QVBoxLayout()
# Add the edit button
edit_layout.addWidget(self.edit_btn())
# Create encompassing horizontal layout
hbox = QHBoxLayout()
# Add the control buttons layout
hbox.addLayout(control_btns_layout)
# Add the article tree
hbox.addWidget(self.article_tree)
# Add the edit button layout
hbox.addLayout(edit_layout)
# Set the widget and layout of the sub window
window_widget = QWidget()
window_widget.setLayout(hbox)
self.setWidget(window_widget)
self.check_edit()
def format_window(self):
"""
Form the local data window
:return:
"""
# Gets the QRect of the main window
geom = self.parent.geometry()
# Gets the Qrect of the sections window
section_geom = self.parent.section_geom
# Define geometries for the projects window
x0 = section_geom.x() + section_geom.width()
y0 = section_geom.y()
w = geom.width() - x0
h = ((geom.height() - y0) / 3)
self.setGeometry(x0, y0, w, h)
# Remove frame from projects window
self.setWindowFlags(Qt.FramelessWindowHint)
####
# Window Widgets
####
def delete_btn(self):
"""
Creates a QPushButton that can be used to remove local articles from the list and memory
:return:
"""
btn = QPushButton()
btn.setIcon(QIcon(os.path.normpath(__file__ + '/../../img/delete.png')))
btn.setToolTip('Delete selected articles')
btn.setToolTipDuration(1)
press_button(self.app, btn)
btn.pressed.connect(self.on_delete_pressed)
return btn
def project_btn(self):
"""
Creates a QPushButton than can be used to add selected articles to an existing figshare project
:return:
"""
btn = QPushButton()
btn.setIcon(QIcon(os.path.normpath(__file__ + '/../../img/Insert Row Below-48.png')))
btn.setToolTip('Add selection to upload queue')
btn.setToolTipDuration(1)
press_button(self.app, btn)
btn.pressed.connect(self.on_project_pressed)
self.proj_btn = btn
return self.proj_btn
def edit_btn(self):
"""
Creates a QPushButton that opens the metadata edit window for the selected articles.
:return:
"""
btn = QPushButton()
btn.setIcon(QIcon(os.path.normpath(__file__ + '/../../img/Pencil-52.png')))
btn.setToolTip('Edit metadata of selected articles')
btn.setToolTipDuration(1)
press_button(self.app, btn)
btn.pressed.connect(self.on_edit_pressed)
btn.setEnabled(False)
self.edit_btn = btn
return self.edit_btn
#####
# Widget Actions
#####
@pyqtSlot(bool)
def check_edit(self):
"""
:return:
"""
if self.article_tree.tree.topLevelItemCount() == 0:
self.disable_edit()
else:
self.enable_edit()
def enable_edit(self):
"""
Enables the edit QPushButton
:return:
"""
self.edit_btn.setEnabled(True)
def disable_edit(self):
"""
Disables the edit QPushButton
:return:
"""
self.edit_btn.setEnabled(False)
def on_delete_pressed(self):
"""
Called when the delete article button is pressed
:return:
"""
selection_ids = self.article_tree.get_selection()
for article_id in selection_ids:
# Remove article from the set of tree articles
self.article_tree.article_ids.remove(article_id)
# Remove article from the dictionary of local articles
del(self.parent.local_articles[article_id])
# Get the doc num for the article in the index
results = self.parent.local_article_index.perform_search(schema='local_articles', field='id',
query=article_id)
# Check that returned results explicitly match the article id. If so them remove the document from the index
for doc_num, val_dict in results.items():
if val_dict['id'] == article_id:
self.parent.local_article_index.removeDocument(schema='local_articles', docnum=doc_num)
# Re-fill the tree
self.article_tree.fill_tree(self.article_tree.tree_headers, self.article_tree.articles_ids)
self.check_edit()
def on_edit_pressed(self):
"""
Called when the edit article button is pressed. Opens the local article edit window
:return:
"""
# Get the list of selected articles
selected_articles = list(self.article_tree.get_selection())
if selected_articles != []:
# Close the article list window
self.parent.open_windows.remove('data_articles_window')
self.parent.data_articles_window.close()
# Create and open the article edit window
self.parent.open_windows.add('local_article_edit_window')
self.parent.local_article_edit_window = LocalMetadataWindow(self.app, self.token, self.parent,
selected_articles)
self.parent.mdi.addSubWindow(self.parent.local_article_edit_window)
self.parent.local_article_edit_window.show()
def on_project_pressed(self):
"""
Called when the add to upload queue button is pressed
:return:
"""
article_id_set = self.article_tree.get_selection()
upload_queue = self.parent.figshare_add_window.upload_queue
worker = TreeAddWorker(article_id_set)
worker.sig_step.connect(upload_queue.add_to_tree)
queue_add_thread = QThread()
self.__threads.append((queue_add_thread, worker))
worker.moveToThread(queue_add_thread)
queue_add_thread.started.connect(worker.work)
queue_add_thread.start()
<file_sep>/collections_windows/collection_info_window.py
"""
Collection Info Window
This window gives an overview of the metadata associated with a given Figshare Collection.
ToDo:
* Abstract project info window, then sub class to here.
"""
# Standard Imports
from requests import HTTPError
# PyQt Imports
from PyQt5.QtWidgets import (QHBoxLayout, QVBoxLayout, QWidget, QGridLayout, QScrollArea, QMessageBox)
# Figshare Desktop Imports
from Figshare_desktop.abstract_windows.object_info_window import ObjectInfoWindow
from Figshare_desktop.collections_windows.collection_articles_window import CollectionsArticlesWindow
from Figshare_desktop.custom_widgets.button_field import QButtonField
from Figshare_desktop.custom_widgets.categories_field import CategoriesField
from Figshare_desktop.custom_widgets.author_field import AuthorField
# Figshare API imports
from figshare_interface.figshare_structures.collections import Collections
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class CollectionInfoWindow(ObjectInfoWindow):
"""
Creates a window to view the information of a given Figshare collection.
"""
def initFig(self):
"""
Get information on the given collection from Figshare.
Args:
Returns:
object_info (dict): Dictionary containing key, value pairs of metadata on the collection.
"""
collections = Collections(self.token)
object_info = collections.get_info(self.object_id)
return object_info
def initUI(self):
"""
Initilizes the GUI
:return:
"""
self.format_window()
# Create a Horizontal and Vertical Box layout
self.hbox = QHBoxLayout()
self.vbox = QVBoxLayout()
# Add the title to the vertical layout
self.vbox.addLayout(self.title_hbox(self.object_info['title']))
# Create a vertical layout for the save and articles buttons
self.buttons_layout = QVBoxLayout()
self.buttons_layout.addWidget(self.save_changes_button())
self.buttons_layout.addWidget(self.articles_button())
# Add the Buttons Layout to the horizontal layout
self.hbox.addLayout(self.buttons_layout)
# Add the description layout to the horizontal layout
self.hbox.addLayout(self.textedit_vbox('Description', self.object_info['description']))
# Add a separator to the horizontal layout
self.hbox.addWidget(self.v_separator())
# Add the project info grid to the horizontal layout
self.hbox.addWidget(self.info_grid())
# Add the horizontal layout to the vertical layout
self.vbox.addLayout(self.hbox)
# Create a central widget for the projects window
window_widget = QWidget()
# Add the vertical box layout
window_widget.setLayout(self.vbox)
# Set the projects window widget
self.setWidget(window_widget)
# Window Widgets
# ==============
def info_grid(self):
"""
Creates a grid layout with detailed information on the collection.
Returns:
grid (QGridLayout): Layout containing all the information fields and labels.
"""
scroll_area = QScrollArea()
grid = QGridLayout()
scroll_area.setMaximumWidth(self.geometry().width() * 0.5)
scroll_area.setMinimumWidth(self.geometry().width() * 0.5)
# Create Labels
# -------------
# Collection ID Label
id_lbl = self.create_label('Collection ID')
# Published Label
pub_lbl = self.create_label('Published')
# Version Label
ver_lbl = self.create_label('Version')
# Group Label
group_lbl = self.create_label('Group')
# Authors Label
auth_lbl = self.create_label('Authors')
# Categories Label
cat_lbl = self.create_label('Categories')
# Tags Label
tag_lbl = self.create_label('Tags')
# References Label
ref_lbl = self.create_label('References')
# Article Count Label
count_lbl = self.create_label('Article Count')
# Citation Label
cit_lbl = self.create_label('Citation')
# Create Edit Fields
# ------------------
# Collection ID Field
id_field = self.create_label(str(self.object_id))
# Published Field
published_date = self.object_info['published_date']
if published_date is None:
published_date = 'Private'
pub_field = self.create_label(published_date)
# Version Field
ver_field = self.create_label("v{}".format(self.object_info['version']))
# Group Field
group_field = self.create_label(str(self.object_info['group_id']))
# Authors Field
auth_field = AuthorField(parent=scroll_area)
for auth_dict in self.object_info['authors']:
auth_field.add_tag(auth_dict)
self.auth_field = auth_field
# Categories Field
cat_field = CategoriesField(self.parent.id_categories, self.parent.name_categories, parent=scroll_area)
for cat in self.object_info['categories']:
cat_field.add_tag(cat)
self.cat_field = cat_field
# Tags Field
tag_field = QButtonField(scroll_area)
for tag in self.object_info['tags']:
tag_field.add_tag(tag)
self.tag_field = tag_field
# References Field
ref_field = QButtonField(scroll_area)
for ref in self.object_info['references']:
ref_field.add_tag(ref)
self.ref_field = ref_field
# Article Count Field
article_field = self.create_label(str(self.object_info['articles_count']))
# Citation Field
citation_field = self.create_label(self.object_info['citation'])
# Create Grid
# -----------
grid.addWidget(id_lbl, 0, 0)
grid.addWidget(id_field, 0, 1)
grid.addWidget(pub_lbl, 0, 2)
grid.addWidget(pub_field, 0, 3)
grid.addWidget(group_lbl, 1, 0)
grid.addWidget(group_field, 1, 1)
grid.addWidget(ver_lbl, 1, 2)
grid.addWidget(ver_field, 1, 3)
grid.addWidget(auth_lbl, 2, 0)
grid.addWidget(auth_field, 2, 1, 1, 3)
grid.addWidget(cat_lbl, 3, 0)
grid.addWidget(cat_field, 3, 1, 1, 3)
grid.addWidget(tag_lbl, 4, 0)
grid.addWidget(tag_field, 4, 1, 1, 3)
grid.addWidget(ref_lbl, 5, 0)
grid.addWidget(ref_field, 5, 1, 1, 3)
grid_widget = QWidget()
grid_widget.setLayout(grid)
scroll_area.setWidget(grid_widget)
return scroll_area
# Widget Actions
# ==============
def on_save_pressed(self):
"""
Called when the save button is pressed.
:return:
"""
update_dict = {}
# Check Title
current_title = self.object_info['title']
new_title = self.title_wid.text()
if new_title != current_title:
update_dict['title'] = new_title
# Check Description
current_description = self.object_info['description']
new_description = self.desc_wid.toPlainText()
if new_description != current_description:
update_dict['description'] = new_description
# Check Authors
current_authors = self.object_info['authors']
new_authors = self.auth_field.get_tags()
if new_authors != []:
update_dict['authors'] = new_authors
# Check Categories
current_categories = self.object_info['categories']
categories = self.cat_field.get_tags()
new_categories = []
for cat in categories:
try:
cat = int(cat)
new_categories.append(cat)
except:
for cat_id, cat_name in self.parent.categories.items():
if cat_name == cat:
new_categories.append(cat_id)
break
if new_categories != []:
update_dict['categories'] = new_categories
# Check Tags
current_tags = self.object_info['tags']
new_tags = self.tag_field.get_tags()
if new_tags != current_tags:
update_dict['tags'] = new_tags
# Check References
current_references = self.object_info['references']
new_references = self.ref_field.get_tags()
if new_references != current_references:
update_dict['references'] = new_references
# Update Collection
if update_dict != {}:
resp_code, resp_data = self.update_object(update_dict)
if resp_code != 205:
self.error_message_box(resp_data)
else:
self.success_message_box()
def success_message_box(self):
"""
Creates an message dialog box to confirm a successful update.
Returns:
"""
msg_box = QMessageBox()
msg_box.setIcon(QMessageBox.Information)
msg_box.setText('Updated collection: \n{}'.format(self.object_info['title']))
msg_box.setStandardButtons(QMessageBox.Ok)
msg_box.buttonClicked.connect(lambda: self.on_msgbtn_pressed(msg_box))
msg_box.show()
def error_message_box(self, errors):
"""
Creates an error message dialog box for errors raised during an update.
Args:
errors: list of errors.
Returns:
"""
msg_box = QMessageBox()
msg_box.setIcon(QMessageBox.Warning)
msg_box.setText("Errors in updating Collection:\n{}".format(self.object_info['title']))
detailed_msg = ""
for err in errors:
detailed_msg += "{code}: {msg}\n\n".format(code=err[0], msg=err[1])
msg_box.setDetailedText(detailed_msg)
msg_box.setStandardButtons(QMessageBox.Ok)
msg_box.buttonClicked.connect(lambda: self.on_msgbtn_pressed(msg_box))
msg_box.show()
def reopen_object_window(self):
"""
Called to close and reopen the collection info window.
Returns:
"""
for i in range(2):
self.parent.section_window.on_collections_btn_pressed()
self.parent.collections_window.on_object_pressed(self.object_id)
def on_articles_pressed(self):
"""
Called when the articles button is pressed. This will open or close the articles window.
Returns:
"""
if 'collection_articles_window' in self.open_windows:
self.open_windows.remove('collection_articles_window')
self.parent.collection_articles_window.close()
elif 'article_edit_window' in self.open_windows:
self.open_windows.remove('article_edit_window')
self.parent.article_edit_window.close()
else:
self.open_windows.add('collection_articles_window')
self.parent.collection_articles_window = CollectionsArticlesWindow(self.app, self.token, self.parent,
self.object_id)
self.parent.mdi.addSubWindow(self.parent.collection_articles_window)
self.parent.collection_articles_window.show()
# Figshare API Functions
# ======================
def update_object(self, update_dict: dict):
"""
Uploads changes to the Figshare object.
Args:
object_id: Figshare object ID number.
update_dict: Dictionary with key, value pairs for info to update.
Returns:
"""
collections = Collections(self.token)
resp_code, resp_data = collections.update(self.object_id, update_dict)
return resp_code, resp_data
<file_sep>/abstract_windows/object_info_window.py
"""
"""
# Standard Imports
import os
from requests import HTTPError
# PyQt Imports
from PyQt5.QtWidgets import (QMdiSubWindow, QLabel, QPushButton, QTextEdit, QGridLayout, QMainWindow, QApplication,
QLineEdit, QVBoxLayout, QSizePolicy, QMessageBox, QHBoxLayout, QWidget, QFrame)
from PyQt5.QtGui import (QIcon)
from PyQt5.QtCore import (Qt)
# Figshare Desktop Imports
from Figshare_desktop.formatting.formatting import (grid_title, press_button, grid_label, label_font, grid_edit,
checkable_button)
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class ObjectInfoWindow(QMdiSubWindow):
"""
Abstract class for viewing information on a figshare object.
"""
def __init__(self, app: QApplication, OAuth_token: str, parent: QMainWindow, object_id: int):
"""
Args:
app: Application instance of the program.
OAuth_token: Authentication token obtained at login.
parent: Reference to the framing window where useful variables are kept.
"""
super().__init__()
# Create class variables of init args
self.app = app
self.token = OAuth_token
self.parent = parent
self.object_id = object_id
# Create shortned path to open windows set
self.open_windows = self.parent.open_windows
self.object_info = self.initFig()
# Initialise the UI
self.initUI()
def initFig(self):
"""
Initilizes Fighsare information for the given project
:return:
"""
pass
def initUI(self):
"""
Initilizes the GUI
:return:
"""
self.format_window()
# Create a Horizontal and Vertical Box layout
self.hbox = QHBoxLayout()
self.vbox = QVBoxLayout()
# Create a central widget for the projects window
window_widget = QWidget()
# Add the vertical box layout
window_widget.setLayout(self.vbox)
# Set the projects window widget
self.setWidget(window_widget)
# Window Formatting
# =================
def format_window(self):
"""
Format the current window to the available space in primary screen.
Returns:
None
"""
# Gets the QRect of the main window
geom = self.parent.geometry()
# Gets the Qrect of the sections window
section_geom = self.parent.section_geom
# Define geometries for the projects window
x0 = section_geom.x() + section_geom.width()
y0 = section_geom.y()
w = geom.width() - x0
h = ((geom.height() - y0) / 3)
self.setGeometry(x0, y0, w, h)
# Remove frame from projects window
self.setWindowFlags(Qt.FramelessWindowHint)
# Window Widgets
# ==============
def create_label(self, label: str):
"""
Creates a QLabel with a default formatting
Args:
label: String to be displayed in the label
Return:
lbl (QLabel): label widget.
"""
lbl = QLabel(label)
grid_label(self.app, lbl)
return lbl
def create_lineedit(self):
"""
Creates a QLineEdit with a default formatting
Args:
Returns:
edit (QLineEdit): Line edit widget.
"""
edit = QLineEdit()
grid_edit(self.app, edit)
return edit
def create_textedit(self):
"""
Creates a QTextEdit with a default formatting
Args:
Returns:
edit (QTextEdit): Text edit widget.
"""
edit = QTextEdit()
grid_edit(self.app, edit)
return edit
def title_hbox(self, title: str):
"""
Creates a Horizontal box layout containing the title lineedit and an edit button
:return: QHBoxLayout
"""
# Create Edit/Label
title_edit = QLineEdit(title)
grid_title(self.app, title_edit)
title_edit.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
title_edit.setEnabled(False)
self.title_wid = title_edit
# Create Edit Button
edit_btn = QPushButton()
edit_btn.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/Pencil-52.png')))
edit_btn.setMaximumWidth(self.geometry().width() / 50)
edit_btn.setMaximumHeight(self.geometry().width() / 50)
checkable_button(self.app, edit_btn)
# Add an action to the edit button
edit_btn.clicked[bool].connect(lambda: self.on_edit_pressed(title_edit))
# Create Layout
hbox = QHBoxLayout()
hbox.addWidget(title_edit)
hbox.addWidget(edit_btn)
return hbox
def textedit_vbox(self, label: str, text: str):
"""
Creates a Vertical box layout containing the description label and edit button and a textedit field
:return: QVBoxLayout
"""
# Create the Description Label
lbl = self.create_label(label)
lbl.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
# Create Edit Button
edit_btn = QPushButton()
edit_btn.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/Pencil-52.png')))
edit_btn.setMaximumWidth(self.geometry().width() / 50)
edit_btn.setMaximumHeight(self.geometry().width() / 50)
checkable_button(self.app, edit_btn)
# Create TextEdit
text_edit = QTextEdit()
if text is not None and text != '':
text_edit.setText(text)
grid_edit(self.app, text_edit)
text_edit.setEnabled(False)
self.desc_wid = text_edit
# Add an action to the edit button
edit_btn.clicked[bool].connect(lambda: self.on_edit_pressed(text_edit))
# Create a horizontal layout for the label and edit button
hbox = QHBoxLayout()
hbox.addWidget(lbl)
hbox.addWidget(edit_btn)
# Create a Vertical layout to hold the label layout and the edit field
vbox = QVBoxLayout()
vbox.addLayout(hbox)
vbox.addWidget(text_edit)
return vbox
def articles_button(self):
"""
Creates a click button to open and close the project articles window
:return: QPushButton
"""
btn = QPushButton()
btn.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/Magazine-50.png')))
checkable_button(self.app, btn)
btn.setMaximumWidth(self.geometry().width() / 20)
btn.setMinimumWidth(self.geometry().width() / 20)
btn.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Expanding)
btn.setToolTip('Open Articles Window')
btn.setToolTipDuration(1000)
btn.clicked[bool].connect(self.on_articles_pressed)
return btn
def save_changes_button(self):
"""
Creates a save changes button to push edits to Figshare
:return: QMessageWindow
"""
btn = QPushButton()
btn.setIcon(QIcon(os.path.abspath(__file__ + '/../../img/figshare_upload.png')))
btn.setMaximumWidth(self.geometry().width() / 20)
btn.setMinimumWidth(self.geometry().width() / 20)
btn.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Expanding)
btn.setToolTip('Save Changes to Figshare')
btn.setToolTipDuration(1000)
btn.pressed.connect(self.on_save_pressed)
return btn
@staticmethod
def v_separator():
"""
Creates a vertical sepearator.
:return: QFrame
"""
sep = QFrame()
sep.setFrameShape(QFrame.VLine)
sep.setFrameShadow(QFrame.Sunken)
return sep
@staticmethod
def h_separator():
"""
Creates a horizontal separator widget.
Returns:
sep (QFrame):
"""
sep = QFrame()
sep.setFrameShape(QFrame.HLine)
sep.setFrameShadow(QFrame.Sunken)
return sep
# Widget Actions
# ==============
def on_articles_pressed(self):
"""
Called when the articles button is pressed. This will open or close the articles window.
:return:
"""
pass
@staticmethod
def on_edit_pressed(edit_field):
"""
Called when a edit button is pressed. This will activate or deactivate the passed edit field
:param edit_field: QLineEdit or QTextEdit
:return:
"""
if edit_field.isEnabled():
edit_field.setEnabled(False)
else:
edit_field.setEnabled(True)
def on_save_pressed(self):
"""
Called when the save button is pressed.
:return:
"""
pass
def reopen_object_window(self):
"""
Closes and reopens the current object window.
Returns:
"""
pass
def on_msgbtn_pressed(self, box: QMessageBox):
"""
Called when an error message button is pressed.
Args:
box: Error message box created by error in save process.
exit_parent: Should the new object window be closed.
Returns:
None
"""
box.close()
self.reopen_object_window()
# Figshare API Functions
# ======================
def update_object(self, object_id: int, update_dict: dict):
"""
Uploads changes to the Figshare object.
Args:
object_id: Figshare object ID number.
update_dict: Dictionary with key, value pairs for info to update.
Returns:
"""
pass
def invite_collaborators(self, object_id: int, collaborators: list):
"""
Invites collaborators to a figshare project
:param project_id: int. Figshare project id number
:param token: OAuth token
:param collaborators: List of Dict. Containing either user ids or email addresses
:return:
"""
pass
<file_sep>/data_window/figshare_projects_button.py
"""
"""
import os
from PyQt5.QtWidgets import (QWidget, QPushButton, QLineEdit, QMessageBox, QFileDialog, QAbstractItemView,
QTextEdit, QGridLayout, QHBoxLayout, QVBoxLayout, QSizePolicy, QTreeWidgetItem,
QInputDialog)
from PyQt5.QtGui import (QIcon, QFont, QPalette, QColor)
from PyQt5.QtCore import (Qt, pyqtSlot, pyqtSignal, QObject)
from Figshare_desktop.formatting.formatting import (press_button)
from figshare_interface.figshare_structures.projects import Projects
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class ProjectButton(QWidget):
def __init__(self, app, OAuth_token, parent):
super().__init__()
self.app = app
self.token = OAuth_token
self.parent = parent
self.initUI()
def initUI(self):
vbox = QVBoxLayout()
# Add project selection button
vbox.addWidget(self.initButton())
self.setLayout(vbox)
def initButton(self):
btn = QPushButton()
btn.setIcon(QIcon(os.path.normpath(__file__ + '/../../img/Folder-48.png')))
btn.setToolTip('Select Figshare Project for upload')
btn.setToolTipDuration(2500)
press_button(self.app, btn)
btn.setStyleSheet("background-color: red")
btn.pressed.connect(self.select_project)
self.proj_button = btn
return self.proj_button
def select_project(self):
"""
Called when the select project button is pressed
:return:
"""
projects = Projects(self.token)
project_list = projects.get_list()
titles = []
for project in project_list:
titles.append(project['title'])
project_title, chosen = QInputDialog.getItem(self, 'Choose Figshare Project for upload',
'Choose Figshare Project for upload', titles, 0, editable=False)
if chosen:
for proj in project_list:
if proj['title'] == project_title:
project_id = proj['id']
self.parent.figshare_add_window.upload_project = project_id
self.proj_button.setStyleSheet("background-color: green")
# Enable to start upload button
self.parent.figshare_add_window.control_widget.enable_start()
<file_sep>/figshare_articles/stm_articles/topography_article.py
"""
"""
from figshare_interface.figshare_structures.projects import Projects
from ..article import Article
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class TopoArticle(Article):
def __init__(self, OAuth_token, project_id, article_id):
# Initialize STM topography metadata dictionary.
self.stm_topo_metadata = {'type': None,
'vgap': None,
'current': None,
'xres': None,
'yres': None,
'xinc': None,
'yinc': None,
'xreal': None,
'yreal': None,
'unit': None,
'unitxy': None,
'date': None,
'direction': None,
'sample': None,
'users': None,
'substrate': None,
'adsorbate': None,
'prep': None,
'notebook': None,
'notes': None
}
super().__init__(OAuth_token, project_id, article_id)
def gen_stm_topo_metadata(self, input_dict):
"""
Fill values in the stm_topo_metadata dict from an input dictionary.
:param input_dict: dict. Only extracts values from keys in both stm_topo_metadata and input_dict dictionaries.
:return:
"""
for key in input_dict:
if key in self.stm_topo_metadata:
if input_dict[key] != 'None' and input_dict[key] is not None:
self.stm_topo_metadata[key] = input_dict[key]
def fill_info(self):
"""
Fill in the metadata dictionaries.
:return:
"""
project = Projects(self.token)
basic_info = project.get_article(self.project_id, self.article_id)
stm_topo_info = self.recreate_custom_fields(basic_info['custom_fields'])
self.gen_figshare_metadata(basic_info)
self.gen_stm_topo_metadata(stm_topo_info)
self.check_basic()
def update_info(self, input_dict):
self.gen_figshare_metadata(input_dict)
self.gen_stm_topo_metadata(input_dict)
self.check_basic()
def input_dicts(self):
return [self.figshare_metadata, self.figshare_desktop_metadata, self.stm_topo_metadata]
def check_file_specific(self):
pass
def get_upload_dict(self):
"""
Takes the different metadata dictionaries and ensures that their contents are of for upload to figshare.
:return:
"""
self.check_basic()
ignore_list = {'id', 'size', 'version', 'created_date', 'modified_date', 'published_date', 'up_to_date',
'status', 'group_id'}
upload_dict = {}
for key, value in self.figshare_metadata.items():
if key not in ignore_list:
if value is not None:
upload_dict[key] = value
upload_dict['custom_fields'] = {}
for key, value in self.stm_topo_metadata.items():
if value is not None:
upload_dict['custom_fields'][key] = value
return upload_dict
def get_type(self):
return 'stm_topo'
def index_schema(self):
"""
Creates a dictionary to create a Whoosh index schema from
:return:
"""
schema_dict = {'type': ('id', True),
'vgap': ('numeric', True),
'current': ('numeric', True),
'xres': ('numeric', True),
'yres': ('numeric', True),
'xinc': ('numeric', True),
'yinc': ('numeric', True),
'xreal': ('numeric', True),
'yreal': ('numeric', True),
'unit': ('id', True),
'unitxy': ('id', True),
'date': ('text', True),
'direction': ('keyword', True),
'sample': ('text', True),
'users': ('keyword', True),
'substrate': ('text', True),
'adsorbate': ('text', True),
'prep': ('text', True),
'notebook': ('keyword', True),
'notes': ('text', True)
}
return schema_dict
<file_sep>/data_window/figshare_collections_button.py
"""
"""
import os
from PyQt5.QtWidgets import (QWidget, QPushButton, QLineEdit, QMessageBox, QFileDialog, QAbstractItemView,
QTextEdit, QGridLayout, QHBoxLayout, QVBoxLayout, QSizePolicy, QTreeWidgetItem,
QInputDialog)
from PyQt5.QtGui import (QIcon, QFont, QPalette, QColor)
from PyQt5.QtCore import (Qt, pyqtSlot, pyqtSignal, QObject)
from Figshare_desktop.formatting.formatting import (press_button)
from figshare_interface.figshare_structures.collections import Collections
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class CollectionButton(QWidget):
def __init__(self, app, OAuth_token, parent):
super().__init__()
self.app = app
self.token = OAuth_token
self.parent = parent
self.initUI()
def initUI(self):
vbox = QVBoxLayout()
# Add project selection button
vbox.addWidget(self.initButton())
self.setLayout(vbox)
def initButton(self):
btn = QPushButton()
btn.setIcon(QIcon(os.path.normpath(__file__ + '/../../img/Folder-48.png')))
btn.setToolTip('Select Figshare Collection for upload')
btn.setToolTipDuration(2500)
press_button(self.app, btn)
btn.pressed.connect(self.select_collection)
self.coll_button = btn
return self.coll_button
def select_collection(self):
"""
Called when the select project button is pressed
:return:
"""
collection = Collections(self.token)
collection_list = collection.get_list()
titles = ['']
for collection in collection_list:
titles.append(collection['title'])
collection_title, chosen = QInputDialog.getItem(self, 'Choose Figshare Collection for upload',
'Choose Figshare Collection for upload', titles, 0,
editable=False)
if chosen:
for coll in collection_list:
if coll['title'] == collection_title:
collection_id = coll['id']
self.parent.figshare_add_window.upload_collection = collection_id
self.coll_button.setText(collection_title)
<file_sep>/article_edit_window/article_edit_window_old.py
"""
"""
import os
from PyQt5.QtWidgets import (QWidget, QSizePolicy, QPushButton, QLabel, QHBoxLayout, QVBoxLayout, QTabWidget,
QGridLayout, QTextEdit, QLineEdit, QScrollArea, QButtonGroup, QComboBox)
from PyQt5.QtGui import (QIcon, QFont)
from PyQt5.QtCore import (Qt, QPoint)
from figshare_interface.figshare_structures.projects import Projects
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class ArticleEditWindow(QWidget):
def __init__(self, app, OAuth_token, main_window, parent_window_loc, article_ids, project_id=None,
collection_id=None):
super().__init__()
self.app = app
self.token = OAuth_token
self.main_window = main_window
self.piw_loc = parent_window_loc
self.articles_ids = article_ids
if project_id is not None:
self.project_id = project_id
if collection_id is not None:
self.collection_id = collection_id
self.initUI()
def initUI(self):
self.formatWindow()
self.label_font = self.article_label_font()
self.edit_font = self.article_edit_font()
self.button_groups = {}
hbox = QHBoxLayout()
self.tab_layout = QTabWidget()
hbox.addLayout(self.confirmation_layout())
self.basic_info_widget = self.basic_info_layout()
self.decide_basic_layout(self.articles_ids)
self.tab_layout.addTab(self.basic_info_widget, 'Figshare Metadata')
self.file_specific_layout = self.decide_file_layout(self.articles_ids)
if self.file_specific_layout is not None:
self.tab_layout.addTab(self.file_specific_layout, 'File Metadata')
hbox.addWidget(self.tab_layout)
self.setLayout(hbox)
def formatWindow(self):
piw_x0 = self.piw_loc.x()
piw_y0 = self.piw_loc.y()
piw_width = self.piw_loc.width()
piw_height = self.piw_loc.height()
screen = self.app.primaryScreen().availableGeometry()
x0 = piw_x0
y0 = piw_y0 + piw_height + 10
w_width = screen.width() - x0
w_height = screen.height() / 3
self.setGeometry(x0, y0, w_width, w_height)
self.setWindowFlags(Qt.FramelessWindowHint)
def mousePressEvent(self, event):
self.oldPos = event.globalPos()
def mouseMoveEvent(self, event):
delta = QPoint(event.globalPos() - self.oldPos)
self.move(self.x() + delta.x(), self.y() + delta.y())
self.oldPos = event.globalPos()
def confirmation_layout(self):
sizepolicy = QSizePolicy()
sizepolicy.setVerticalPolicy(QSizePolicy.Expanding)
sizepolicy.setVerticalPolicy(QSizePolicy.Preferred)
btn_exit = QPushButton()
btn_exit.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/exit.png')))
btn_exit.setSizePolicy(sizepolicy)
btn_exit.pressed.connect(self.on_exit_pressed)
btn_save = QPushButton()
btn_save.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/figshare_upload.png')))
btn_save.setSizePolicy(sizepolicy)
btn_save.pressed.connect(self.on_save_pressed)
vbox = QVBoxLayout()
vbox.addWidget(btn_exit)
vbox.addWidget(btn_save)
return vbox
def basic_info_layout(self):
"""
Create a Layout containing editable fields for the basic figshare article metadata.
:return:
"""
# Layout
basic_info_widget = QWidget()
vbox = QVBoxLayout(basic_info_widget)
# Title
self.add_lineedit(vbox, 'title', '')
# Description
self.add_textedit(vbox, 'description', '')
# Tags
self.add_buttonlist(vbox, 'tags', [''])
# Categories
self.add_buttonlist(vbox, 'categories', [''])
# References
self.add_buttonlist(vbox, 'references', [''])
# Authors
self.add_buttonlist(vbox, 'authors', [''])
# Defined Type
self.add_dropdownlist(vbox, 'defined_type', [''])
# Funding Label
self.add_textedit(vbox, 'funding', '')
# License
self.add_dropdownlist(vbox, 'license', [''])
scroll_area = QScrollArea()
scroll_area.setWidget(basic_info_widget)
scroll_area.setWidgetResizable(True)
return scroll_area
def decide_basic_layout(self, article_ids):
article = self.main_window.articles[article_ids[0]]
basic_info_dict = article.figshare_metadata
basic_info_layout = self.basic_info_widget.widget().layout()
for widget_pos in range(0, basic_info_layout.count() - 1, 2):
lbl = basic_info_layout.itemAt(widget_pos).widget().text()
edit_widget = basic_info_layout.itemAt(widget_pos + 1).widget()
if edit_widget is None:
edit_widget = basic_info_layout.itemAt(widget_pos + 1).layout()
edit_widget_type = type(edit_widget)
if edit_widget_type is QLineEdit:
if len(article_ids) > 1:
if lbl == 'title':
edit_widget.setText('Multiple Files')
edit_widget.setReadOnly(True)
else:
edit_widget.setText(basic_info_dict[lbl])
elif edit_widget_type is QTextEdit:
edit_widget.setText(basic_info_dict[lbl])
elif edit_widget_type is QHBoxLayout: # This is a button list
info = basic_info_dict[lbl]
button_group = self.button_groups[lbl]
info_strings = []
info_type = type(info)
if info_type is list and info != []:
list_type = type(info[0])
if list_type is dict:
for item in info:
for value in item.values():
info_strings.append(str(value))
elif list_type is str:
info_strings = info
elif list_type is int:
for item in info:
info_strings.append(str(item))
else:
pass
elif info_type is str:
info_strings = [info]
for item in info_strings:
self.on_add_button_to_list(button_group, edit_widget, None, item)
elif edit_widget_type is QComboBox:
edit_widget.clear()
if lbl == 'defined_type':
info_string = basic_info_dict[lbl]
type_dict = {1: 'figure', 2: 'media', 3: 'dataset', 4: 'fileset', 5: 'poster', 6: 'paper',
7: 'presentation', 8: 'thesis', 9: 'code', 10: 'metadata'}
for info_pos in range(len(type_dict)):
item = type_dict[info_pos + 1]
edit_widget.addItem(item)
if type(info_string) is int:
edit_widget.setCurrentIndex(info_string)
elif type(info_string) is str:
for key, value in type_dict.items():
if value == info_string:
edit_widget.setCurrentIndex(key)
break
elif info_string is None:
edit_widget.setCurrentIndex(3)
elif lbl == 'license':
info_int = basic_info_dict[lbl]
type_list = [None, 'CC BY', 'CC-0', 'MIT', 'GPL', 'GPL-2.0', 'GPL-3.0', 'Apache']
for item in type_list:
edit_widget.addItem(item)
if info_int is None:
edit_widget.setCurrentIndex(0)
else:
edit_widget.setCurrentIndex(info_int)
def file_specific_info_layout(self, article_dicts):
window_size = self.geometry()
# Fonts
# - Label Font
s = window_size.height() / 20
min_s = 10
if s < min_s:
lbl_font_size = min_s
else:
lbl_font_size = s
lbl_font = QFont('SansSerif', lbl_font_size)
lbl_font.setBold(True)
# - Edit Font
s = window_size.height() / 25
min_s = 7
if s < min_s:
edit_font_size = min_s
else:
edit_font_size = s
edit_font = QFont('SansSerif', edit_font_size)
# - Expand Horizontally
horSizePolicy = QSizePolicy()
horSizePolicy.setVerticalPolicy(QSizePolicy.Preferred)
horSizePolicy.setHorizontalPolicy(QSizePolicy.Expanding)
file_info_widget = QWidget()
vbox = QVBoxLayout(file_info_widget)
for dictionary in article_dicts:
for key, value in dictionary.items():
lbl = QLabel(str(key))
lbl.setFont(lbl_font)
lbl.setSizePolicy(horSizePolicy)
vbox.addWidget(lbl)
edit = QLineEdit()
edit.setFont(edit_font)
edit.setText(str(value))
edit.setSizePolicy(horSizePolicy)
vbox.addWidget(edit)
scroll_area = QScrollArea()
scroll_area.setWidget(file_info_widget)
scroll_area.setWidgetResizable(True)
return scroll_area
def known_file_type(self, article_id):
article = self.main_window.articles[article_id]
article_type = article.get_type()
if article_type != 'article':
return article.input_dicts()[2:]
else:
return None
def decide_file_layout(self, articles_ids):
# Local reference to the articles dictionary.
articles = self.main_window.articles
# If more than one article is to be edited check to see if all files are of the same type.
if len(articles_ids) > 1:
# Get the type of the first article.
first_type = articles[articles_ids[0]].get_type()
# Check that all other articles are the same.
for article in articles_ids:
article_type = articles[article].get_type()
# If article is not the same type as the first return None. Else continue.
if article_type != first_type:
return None
# At this point we know all files are the same type, but will have different values for their metadata.
# Here we create a new blank dictionary from the keys of the first article.
article_dict = self.known_file_type(articles_ids[0])
if article_dict is not None:
blank_dict = dict.fromkeys(article_dict[0], '')
return self.file_specific_info_layout([blank_dict])
else:
return None
# If a single article id has been given.
else:
# Find out if the file type is known.
article_dicts = self.known_file_type(articles_ids[0])
# If the file type is know generate a file specific metadata layout.
if article_dicts is not None:
return self.file_specific_info_layout(article_dicts)
# Otherwise return nothing.
else:
return None
def on_exit_pressed(self):
self.close()
self.main_window.centralWidget().projects_window.projects_info_window.article_edit_open = False
self.main_window.centralWidget().projects_window.projects_info_window.on_show_articles_pressed()
def on_save_pressed(self):
tab_layout = self.tab_layout
open_tab_index = tab_layout.currentIndex()
if open_tab_index == 0:
basic_info_dict = {}
basic_info_layout = self.basic_info_widget.widget().layout()
for widget_pos in range(0, basic_info_layout.count() - 1, 2):
lbl = basic_info_layout.itemAt(widget_pos).widget().text()
widget = basic_info_layout.itemAt(widget_pos + 1).widget()
if widget is None:
widget = basic_info_layout.itemAt(widget_pos + 1).layout()
widget_type = type(widget)
if widget_type is QLineEdit:
if len(self.articles_ids) > 1:
if lbl == 'title':
value = None
else:
value = widget.text()
else:
value = widget.text()
elif widget_type is QTextEdit:
value = widget.toPlainText()
elif widget_type is QHBoxLayout:
value = []
for btn_pos in range(0, widget.count() - 2, 1):
btn = widget.itemAt(btn_pos).widget()
value.append(btn.text())
elif widget_type is QComboBox:
value = widget.currentIndex()
if value is not []:
basic_info_dict[lbl] = value
else:
basic_info_dict[lbl] = None
update_dict = {**basic_info_dict}
elif open_tab_index == 1:
if self.file_specific_layout is not None:
file_specific_dict = {}
file_specific_layout = self.file_specific_layout.widget().layout()
for widget_pos in range(0, file_specific_layout.count() - 1, 2):
lbl = file_specific_layout.itemAt(widget_pos).widget().text()
widget = file_specific_layout.itemAt(widget_pos + 1).widget()
widget_type = type(widget)
if widget_type is QLineEdit:
value = widget.text()
elif widget_type is QTextEdit:
value = widget.toPlainText()
if value is not [] and value is not '':
file_specific_dict[lbl] = value
else:
file_specific_dict[lbl] = None
update_dict = {**file_specific_dict}
if len(self.articles_ids) > 1:
articles = self.main_window.articles
for article_id in self.articles_ids:
a_id = int(article_id)
article = articles[a_id]
article.update_info(update_dict)
upload_dict = article.get_upload_dict()
Projects.update_article(self.token, a_id, upload_dict)
private_modified_date = Projects(self.token).get_article(self.project_id, a_id)['modified_date']
article.figshare_metadata['modified_date'] = private_modified_date
article.check_uptodate()
else:
a_id = self.articles_ids[0]
article = self.main_window.articles[a_id]
article.update_info(update_dict)
upload_dict = article.get_upload_dict()
Projects.update_article(self.token, a_id, upload_dict)
private_modified_date = Projects(self.token).get_article(self.project_id, a_id)['modified_date']
article.figshare_metadata['modified_date'] = private_modified_date
article.check_uptodate()
def article_label_font(self):
"""
Returns the font to use for label fields.
:return: QFont.
"""
window_size = self.geometry()
s = window_size.height() / 20
max_s = 14
min_s = 10
if s < min_s:
s = min_s
elif max_s < s:
s = max_s
lbl_font = QFont('SansSerif', s)
lbl_font.setBold(True)
return lbl_font
def article_edit_font(self):
"""
Returns the font to use for edit fields.
:return: QFont.
"""
window_size = self.geometry()
s = window_size.height() / 25
max_s = 12
min_s = 7
if s < min_s:
s = min_s
elif max_s < s:
s = max_s
edit_font = QFont('SansSerif', s)
return edit_font
def add_lineedit(self, layout, label, value, row=None, column=None, rowspan=None, columnspan=None):
"""
Use this to add a QLabel, QLineEdit pair from the given values to the provided layout. If the layout is a
QGridLayout then the row and column values are required.
:param layout: QLayout to add widgets to.
:param label: String to name the line edit field.
:param value: String to fill the line edit field.
Optional
:param row: Grid row to add widgets from.
:param column: Grid column to add widgets to.
:param rowspan: Grid rows to span each widget.
:param columnspan: Grid columns to span each widget.
:return:
"""
# Create the QLabel
lbl = QLabel(label)
lbl.setFont(self.label_font)
# Create the QLineEdit
edit = QLineEdit(value)
edit.setFont(self.edit_font)
if type(layout) is QGridLayout:
if rowspan is not None and columnspan is not None:
layout.addWidget(lbl, row, column, rowspan, columnspan)
layout.addWidget(edit, row + rowspan + 1, column, rowspan, columnspan)
else:
layout.addWidget(lbl, row, column)
layout.addWidget(edit, row + 1, column)
else:
layout.addWidget(lbl)
layout.addWidget(edit)
def add_textedit(self, layout, label, value, row=None, column=None, rowspan=None, columnspan=None):
"""
Use this to add a QLabel, QTextEdit pair from the given values to the provided layout. If the layout is a
QGridLayout then the row and column values are required.
:param layout: QLayout to add widgets to.
:param label: String. Name for label.
:param value: String. Text to fill QTextEdit with.
Optional
:param row: int. QGridLayout row from which to add widgets.
:param column: int. QGridLayout column to add widgets to.
:param rowspan: int. Number of rows to span widgets.
:param columnspan: int. Number of columns to span widgets.
:return:
"""
# Create the QLabel
lbl = QLabel(label)
lbl.setFont(self.label_font)
# Create the QLineEdit
edit = QTextEdit(value)
edit.setFont(self.edit_font)
edit.setTabChangesFocus(True)
if type(layout) is QGridLayout:
if rowspan is not None and columnspan is not None:
layout.addWidget(lbl, row, column, rowspan, columnspan)
layout.addWidget(edit, row + rowspan + 1, column, rowspan, columnspan)
else:
layout.addWidget(lbl, row, column)
layout.addWidget(edit, row + 1, column)
else:
layout.addWidget(lbl)
layout.addWidget(edit)
def add_buttonlist(self, layout, label, values, key=None, row=None, column = None, rowspan=None,
columnspan=None):
"""
Add an array of buttons to a layout that can be used to display arrays of data, e.g. tags or categories.
The ability to add and remove items from the array is also added.
:param layout: QLayout. Layout to add widgets to.
:param label: String. Name of the field, e.g. tags, or categories.
:param values: list. List of either strings or dictionary objects.
Optional
:param key: Dictionary Key. If the values given are in a dictionary object a dictionary key must also be provided.
:param row: int. QGridLayout row from which to add widgets.
:param column: int. QGridLayout column to add widgets to.
:param rowspan: int. Number of rows to span widgets.
:param columnspan: int. Number of columns to span widgets.
:return:
"""
# Create Qlabel for edit field.
lbl = QLabel(label)
lbl.setFont(self.label_font)
# Create layout for buttons.
hbox = QHBoxLayout()
# Define a size policy for the buttons
btn_sizePolicy = QSizePolicy()
btn_sizePolicy.setHorizontalPolicy(QSizePolicy.Expanding)
btn_sizePolicy.setVerticalPolicy(QSizePolicy.Preferred)
# Create a QButtonGroup
btn_group = QButtonGroup()
self.button_groups[label] = btn_group
btn_group_id = 1
# Create buttons and add to group and layout.
list_element_type = type(values[0])
if list_element_type is dict:
for element in values:
btn_str = str(element[key])
btn = QPushButton(btn_str)
btn.setFont(self.edit_font)
btn.setFlat(True)
btn.setSizePolicy(btn_sizePolicy)
btn_group.addButton(btn, btn_group_id)
hbox.addWidget(btn)
btn_group_id += 1
elif list_element_type is list:
for element in values:
btn_str = str(element)
btn = QPushButton(btn_str)
btn.setFont(self.edit_font)
btn.setSizePolicy(btn_sizePolicy)
btn_group.addButton(btn, btn_group_id)
hbox.addWidget(btn)
btn_group_id += 1
txt_sizePolicy = QSizePolicy()
txt_sizePolicy.setVerticalPolicy(QSizePolicy.Preferred)
txt_sizePolicy.setHorizontalPolicy(QSizePolicy.Preferred)
new_btn = QTextEdit()
new_btn.setFont(self.edit_font)
new_btn.setSizePolicy(txt_sizePolicy)
hbox.addWidget(new_btn)
options_layout = QVBoxLayout()
options_btn_size_policy = QSizePolicy()
options_btn_size_policy.setHorizontalPolicy(QSizePolicy.Preferred)
options_btn_size_policy.setVerticalPolicy(QSizePolicy.Expanding)
delete_btn = QPushButton()
delete_btn.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/exit.png')))
delete_btn.setSizePolicy(options_btn_size_policy)
delete_btn.pressed.connect(lambda: self.on_delete_button_from_list(btn_group, hbox))
options_layout.addWidget(delete_btn)
add_btn = QPushButton()
add_btn.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/Plus-50.png')))
add_btn.setSizePolicy(options_btn_size_policy)
add_btn.pressed.connect(lambda: self.on_add_button_to_list(btn_group, hbox, new_btn))
options_layout.addWidget(add_btn)
hbox.addLayout(options_layout)
if type(layout) is QGridLayout:
if rowspan is not None and columnspan is not None:
layout.addWidget(lbl, row, column, rowspan, columnspan)
layout.addLayout(hbox, row + rowspan + 1, column, rowspan, columnspan)
else:
layout.addWidget(lbl, row, column)
layout.addLayout(hbox, row + 1, column)
else:
layout.addWidget(lbl)
layout.addLayout(hbox)
def on_delete_button_from_list(self, button_group, layout):
"""
Removes a button from a layout and button group.
:param button_group: QButtonGroup of button to be deleted.
:param layout: QLayout button is in.
:return:
"""
btn_to_delete = button_group.checkedButton()
button_group.removeButton(btn_to_delete)
layout.removeWidget(btn_to_delete)
btn_to_delete.deleteLater()
def on_add_button_to_list(self, button_group, layout, textedit, overide_str=None):
"""
Adds a button to a specified button group and layout.
:param button_group: QButtonGroup button is to be added to.
:param layout: QLayout button is to be added to.
:param lineedit: QLineEdit containing the new button string.
:return:
"""
if overide_str is not None:
new_btn_str = overide_str
else:
new_btn_str = textedit.toPlainText()
textedit.clear()
new_btn = QPushButton(new_btn_str)
new_btn.setFont(self.edit_font)
new_btn.setFlat(True)
new_btn.setCheckable(True)
new_btn.toggle()
size_policy = QSizePolicy()
size_policy.setHorizontalPolicy(QSizePolicy.Expanding)
size_policy.setVerticalPolicy(QSizePolicy.Preferred)
new_btn.setSizePolicy(size_policy)
button_group.addButton(new_btn)
inset_pos = layout.count() - 2
layout.insertWidget(inset_pos, new_btn)
def add_dropdownlist(self, layout, label, values, row=None, column=None, rowspan=None, columnspan=None):
"""
Adds a drop down list to the given layout.
:param layout: QLayout to add widget to.
:param label: String containing the list title.
:param values: List containing dropdown items.
Optional
:param row: int. If layout is a QGridLayout then the row must be given.
:param column: int. If the layout is a QGridLayout then the column must be given.
:param rowspan: int. For how many rows widget will span in QGridLayout.
:param columnspan: int. For how many columns widget will span in QGridLayout.
:return:
"""
size_policy = QSizePolicy()
size_policy.setHorizontalPolicy(QSizePolicy.Expanding)
size_policy.setVerticalPolicy(QSizePolicy.Preferred)
lbl = QLabel(label)
lbl.setFont(self.label_font)
lbl.setSizePolicy(size_policy)
drop_menu = QComboBox()
drop_menu.addItems(values)
drop_menu.setFont(self.edit_font)
drop_menu.setSizePolicy(size_policy)
if type(layout) is QGridLayout:
if rowspan is not None and columnspan is not None:
layout.addWidget(lbl, row, column, rowspan, columnspan)
layout.addWidget(drop_menu, row + rowspan + 1, column, rowspan, columnspan)
else:
layout.addWidget(lbl, row, column)
layout.addWidget(drop_menu, row + 1, column)
else:
layout.addWidget(lbl)
layout.addWidget(drop_menu)
<file_sep>/main_window/framing_window.py
"""
"""
import os
import sys
from PyQt5.QtWidgets import (QMainWindow, QMdiArea, QAction, qApp)
from PyQt5.QtGui import (QIcon, QFont, QKeySequence)
# Figshare API Imports
from figshare_interface.http_requests.figshare_requests import issue_request
from ..formatting.formatting import scaling_ratio
from .section_window import sectionWindow
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class MainWindow(QMainWindow):
"""
"""
def __init__(self, app, OAuth_token):
"""
Initialization of the Main window
:param app:
:param OAuth_token:
"""
super().__init__()
self.app = app
self.token = OAuth_token
self.open_windows = set()
# Menu window
self.section_window = None
# Projects Windows
self.projects_window = None
self.new_project_window = None
self.project_info_window = None
self.project_articles_window = None
self.article_edit_window = None
# Collection Windows
self.collections_window = None
self.new_collection_window = None
self.collection_info_window = None
self.collection_articles_window = None
# Local Data Windows
self.local_data_window = None
self.data_articles_window = None
self.local_article_edit_window = None
self.local_article_index = None
self.figshare_article_index = None
self.initFig()
self.initUI()
def initFig(self):
"""
Initialization of Figshare data
:return:
"""
self.figshare_articles = {}
self.local_articles = {}
self.next_local_id = 0
self.id_categories, self.name_categories = self.get_figshare_cats()
def initUI(self):
"""
User Interface initialization
"""
# Create a multiple document interface object
self.mdi = QMdiArea()
# Set the main window central widget as the MDI area
self.setCentralWidget(self.mdi)
self.format_window()
self.setWindowTitle('Figshare Desktop')
self.setWindowIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/figshare_logo.png')))
self.menu_bar()
# Add Section Window
self.section_window = sectionWindow(self.app, self.token, parent=self)
self.mdi.addSubWindow(self.section_window)
self.section_window.show()
def format_window(self):
"""
Maximizes the main window
"""
self.showMaximized()
geom = self.geometry()
x0 = geom.x()
y0 = geom.y()
geom = self.app.primaryScreen().availableGeometry()
w = geom.width() - x0
h = geom.height() - y0
self.setGeometry(x0, y0, w, h)
def menu_bar(self):
"""
"""
bar = self.menuBar()
file = bar.addMenu('&File')
file.addAction(self.exitAction())
def exitAction(self):
"""
:return: QAction
"""
self.exit_action = QAction(QIcon(os.path.abspath(__file__ + '/../..' + '/img/exit.png')), '&Exit', self)
self.exit_action.setShortcut('Ctrl+Q')
self.exit_action.triggered.connect(qApp.quit)
return self.exit_action
# Figshare API Functions
# ======================
def get_figshare_cats(self):
"""
Creates a dictionary object with collection id: name pairs.
Returns:
cat_dict (dict): Figshare categories dictionary.
"""
# Get a dictionary of categories from Figshare with id and name pairs
allowed_cats = issue_request(method='GET', endpoint='categories', token=self.token)
id_dict = {}
name_dict = {}
for cat in allowed_cats:
id_dict[cat['id']] = cat['title']
name_dict[cat['title']] = cat['id']
return id_dict, name_dict
<file_sep>/projects_windows/project_info_window.py
"""
"""
import os
import math
from requests import HTTPError
from PyQt5.QtWidgets import (QWidget, QLabel, QPushButton, QLineEdit, QMessageBox, QMainWindow, QMdiSubWindow,
QTextEdit, QGridLayout, QHBoxLayout, QVBoxLayout, QSizePolicy, QFrame)
from PyQt5.QtGui import (QIcon, QFont, QPalette, QColor)
from PyQt5.QtCore import (Qt, QPoint)
from Figshare_desktop.formatting.formatting import (label_font)
from Figshare_desktop.custom_widgets.button_field import QButtonField
from Figshare_desktop.formatting.formatting import (grid_label, grid_edit, checkable_button, grid_title)
from Figshare_desktop.projects_windows.articles_window import ProjectsArticlesWindow
from figshare_interface import (Groups, Projects)
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class ProjectInfoWindow(QMdiSubWindow):
def __init__(self, app, OAuth_token, parent, project_id):
super().__init__()
self.app = app
self.token = OAuth_token
self.parent = parent
self.project_id = project_id
self.open_windows = self.parent.open_windows
self.initFig(self.project_id)
self.initUI()
def initFig(self, project_id):
"""
Initilizes Fighsare information for the given project
:return:
"""
projects = Projects(self.token)
self.project_info = projects.get_info(project_id)
def initUI(self):
"""
Initilizes the GUI
:return:
"""
self.format_window()
# Create a Horizontal and Vertical Box layout
self.hbox = QHBoxLayout()
self.vbox = QVBoxLayout()
# Add the title to the vertical layout
self.vbox.addLayout(self.title_hbox())
# Create a vertical layout for the save and articles buttons
self.buttons_layout = QVBoxLayout()
self.buttons_layout.addWidget(self.save_changes_button())
self.buttons_layout.addWidget(self.articles_button())
# Add the Buttons Layout to the horizontal layout
self.hbox.addLayout(self.buttons_layout)
# Add the description layout to the horizontal layout
self.hbox.addLayout(self.description_vbox())
# Add a separator to the horizontal layout
self.hbox.addWidget(self.seperator())
# Add the project info grid to the horizontal layout
self.hbox.addLayout(self.info_grid())
# Add the horizontal layout to the vertical layout
self.vbox.addLayout(self.hbox)
# Create a central widget for the projects window
window_widget = QWidget()
# Add the vertical box layout
window_widget.setLayout(self.vbox)
# Set the projects window widget
self.setWidget(window_widget)
#####
# Window Formatting
#####
def format_window(self):
"""
Sets the window geometry
:return:
"""
# Gets the QRect of the main window
geom = self.parent.geometry()
# Gets the Qrect of the sections window
section_geom = self.parent.section_geom
# Define geometries for the projects window
x0 = section_geom.x() + section_geom.width()
y0 = section_geom.y()
w = geom.width() - x0
h = ((geom.height() - y0) * 0.375)
self.setGeometry(x0, y0, w, h)
# Remove frame from the window
self.setWindowFlags(Qt.FramelessWindowHint)
#####
# Window Widgets
#####
def articles_button(self):
"""
Creates a click button to open and close the project articles window
:return: QPushButton
"""
btn = QPushButton()
btn.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/Magazine-50.png')))
checkable_button(self.app, btn)
btn.setMaximumWidth(self.geometry().width() / 20)
btn.setMinimumWidth(self.geometry().width() / 20)
btn.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Expanding)
btn.setToolTip('Open Project Articles Window')
btn.setToolTipDuration(1000)
btn.clicked[bool].connect(self.on_articles_pressed)
return btn
def save_changes_button(self):
"""
Creates a save changes button to push edits to Figshare
:return: QMessageWindow
"""
btn = QPushButton()
btn.setIcon(QIcon(os.path.abspath(__file__ + '/../../img/figshare_upload.png')))
btn.setMaximumWidth(self.geometry().width() / 20)
btn.setMinimumWidth(self.geometry().width() / 20)
btn.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Expanding)
btn.setToolTip('Save Changes to Figshare')
btn.setToolTipDuration(1000)
btn.pressed.connect(self.on_save_pressed)
return btn
def create_label(self, label):
"""
Creates a QLabel with a default formatting
:param label: String to be displayed in the label
:return: QLabel
"""
lbl = QLabel(label)
grid_label(self.app, lbl)
return lbl
def create_lineedit(self):
"""
Creates a QLineEdit with a default formatting
:return: QLineEdit
"""
edit = QLineEdit()
grid_edit(self.app, edit)
return edit
def create_textedit(self):
"""
Creates a QTextEdit with a default formatting
:return: QTextEdit
"""
edit = QTextEdit()
grid_edit(self.app, edit)
return edit
def title_hbox(self):
"""
Creates a Horizontal box layout containing the title lineedit and an edit button
:return: QHBoxLayout
"""
# Create Edit/Label
title = self.project_info['title']
title_edit = QLineEdit(title)
grid_title(self.app, title_edit)
title_edit.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
title_edit.setEnabled(False)
self.title_wid = title_edit
# Create Edit Button
edit_btn = QPushButton()
edit_btn.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/Pencil-52.png')))
edit_btn.setMaximumWidth(self.geometry().width() / 50)
edit_btn.setMaximumHeight(self.geometry().width() / 50)
checkable_button(self.app, edit_btn)
# Add an action to the edit button
edit_btn.clicked[bool].connect(lambda: self.on_edit_pressed(title_edit))
# Create Layout
hbox = QHBoxLayout()
hbox.addWidget(title_edit)
hbox.addWidget(edit_btn)
return hbox
def description_vbox(self):
"""
Creates a Vertical box layout containing the description label and edit button and a textedit field
:return: QVBoxLayout
"""
# Create the Description Label
lbl = self.create_label('Description')
lbl.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
# Create Edit Button
edit_btn = QPushButton()
edit_btn.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/Pencil-52.png')))
edit_btn.setMaximumWidth(self.geometry().width() / 50)
edit_btn.setMaximumHeight(self.geometry().width() / 50)
checkable_button(self.app, edit_btn)
# Create TextEdit
description = self.project_info['description']
text_edit = QTextEdit()
if description is not None and description != '':
text_edit.setText(description)
grid_edit(self.app, text_edit)
text_edit.setEnabled(False)
self.desc_wid = text_edit
# Add an action to the edit button
edit_btn.clicked[bool].connect(lambda: self.on_edit_pressed(text_edit))
# Create a horizontal layout for the label and edit button
hbox = QHBoxLayout()
hbox.addWidget(lbl)
hbox.addWidget(edit_btn)
# Create a Vertical layout to hold the label layout and the edit field
vbox = QVBoxLayout()
vbox.addLayout(hbox)
vbox.addWidget(text_edit)
return vbox
def seperator(self):
"""
Creates a vertical sepearator.
:return: QFrame
"""
sep = QFrame()
sep.setFrameShape(QFrame.VLine)
sep.setFrameShadow(QFrame.Sunken)
return sep
def info_grid(self):
"""
Creates a grid layout containing more details on the project
:return: QGridlayout
"""
# Create Labels
# Project ID Label
id_lbl = QLabel('Project ID')
grid_label(self.app, id_lbl)
# Published Label
pub_lbl = QLabel('Published')
grid_label(self.app, pub_lbl)
# Collaborators Label
col_lbl = QLabel('Collaborators')
grid_label(self.app, col_lbl)
# Funding Label
fund_lbl = QLabel('Funding')
grid_label(self.app, fund_lbl)
# Group Label
group_lbl = QLabel('Group')
grid_label(self.app, group_lbl)
# Storage Label
stor_lbl = QLabel('Storage')
grid_label(self.app, stor_lbl)
# Create Edit Fields
# Project ID Field
id_field = QLabel()
grid_edit(self.app, id_field)
id_field.setText(str(self.project_info['id']))
# Published Field
published_date = self.project_info['published_date']
if published_date is None:
published_date = 'Private'
pub_field = QLabel()
pub_field.setText(published_date)
# Collaborators Field
collaborators = self.project_info['collaborators']
col_field = QButtonField(parent=self)
if collaborators is not None:
for col in collaborators:
name = col['name']
user_id = col['user_id']
tag = "{}: {}".format(name, user_id)
col_field.add_tag(tag)
self.col_wid = col_field
# Funding Field
funding = self.project_info['funding']
funding_field = QButtonField(parent=self)
if funding != '':
for funder in funding.split(':_:'):
if funder != '':
funding_field.add_tag(funder)
self.fund_wid = funding_field
# Group Field
group_id = self.project_info['group_id']
group_field = QLabel()
grid_label(self.app, group_field)
if group_id != 0:
group_field.setText(str(group_id))
else:
group_field.setText('Private Project')
# Storage Field
quota_text = self.get_quota_percentage()
storage_field = QLabel(quota_text)
grid_label(self.app, storage_field)
# Create and Populate grid
grid = QGridLayout()
grid.addWidget(id_lbl, 0, 0)
grid.addWidget(id_field, 0, 1)
grid.addWidget(pub_lbl, 1, 0)
grid.addWidget(pub_field, 1, 1)
grid.addWidget(col_lbl, 2, 0)
grid.addWidget(col_field, 2, 1, 1, 3)
grid.addWidget(fund_lbl, 3, 0)
grid.addWidget(funding_field, 3, 1, 1, 3)
grid.addWidget(group_lbl, 0, 2)
grid.addWidget(group_lbl, 0, 3)
grid.addWidget(stor_lbl, 1, 2)
grid.addWidget(storage_field, 1, 3)
return grid
def get_quota_percentage(self):
"""
Returns a string containg the current percentage of the figshare quota used for a given project
:return:
"""
group_id = self.project_info['group_id']
quota = self.project_info['quota']
if group_id != 0:
used_quota = self.project_info['used_quota_public']
else:
used_quota = self.project_info['used_quota_private']
if quota != 0:
quota_percentage = round(100 * used_quota / quota, 2)
else:
quota_percentage = 0
quota_gb = round(quota / (10**9), 1)
return "{} % of {} GB".format(quota_percentage, quota_gb)
#####
# Widget Actions
#####
def on_articles_pressed(self):
"""
Called when the articles button is pressed. This will open or close the articles window.
:return:
"""
if 'project_articles_window' in self.open_windows:
self.open_windows.remove('project_articles_window')
self.parent.project_articles_window.close()
elif 'article_edit_window' in self.open_windows:
self.open_windows.remove('article_edit_window')
self.parent.article_edit_window.close()
else:
self.open_windows.add('project_articles_window')
self.parent.project_articles_window = ProjectsArticlesWindow(self.app, self.token, self.parent,
self.project_id)
self.parent.mdi.addSubWindow(self.parent.project_articles_window)
self.parent.project_articles_window.show()
def on_edit_pressed(self, edit_field):
"""
Called when a edit button is pressed. This will activate or deactivate the passed edit field
:param edit_field: QLineEdit or QTextEdit
:return:
"""
if edit_field.isEnabled():
edit_field.setEnabled(False)
else:
edit_field.setEnabled(True)
def on_save_pressed(self):
"""
Called when the save button is pressed.
:return:
"""
# Empty dictionary to hold update information
update_dict = {}
collaborators_list = []
# Check to see if there has been a change to the title
old_title = self.project_info['title']
new_title = self.title_wid.text()
if old_title != new_title:
update_dict['title'] = new_title
# Check to see if there has been a change to the description
old_desc = self.project_info['description']
new_desc = self.desc_wid.toPlainText()
if old_desc != new_desc:
update_dict['description'] = new_desc
# Check to see if there has been a change to the collaborators
old_col = self.project_info['collaborators']
old_col_ids = [col['user_id'] for col in old_col]
col_tags = self.col_wid.get_tags()
new_col = []
for tag in col_tags:
value = tag.split(':')
if len(value) > 1:
key, value = tag.split(':')
else:
value = value[0]
if value[0] == ' ':
value = value[1:]
try:
value = int(value)
key = 'user_id'
except:
key = 'email'
new_col.append({key: value})
for col in new_col:
if 'user_id' in col:
if col['user_id'] not in old_col_ids:
collaborators_list.append(col)
elif 'email' in col:
collaborators_list.append(col)
# Check to see if there has been a change to the funding
old_fund = self.project_info['funding']
fund_tags = self.fund_wid.get_tags()
new_fund = ''
for tag in fund_tags:
new_fund += ':_:{}'.format(tag)
if old_fund != new_fund:
update_dict['funding'] = new_fund
# Call Figshare API Interface functions
successful = self.update_project(self.project_id, self.token, update_dict)
if collaborators_list is not None:
print('need to update collaborators')
col_successful = self.invite_collaborators(self.project_id, self.token, collaborators_list)
if col_successful:
resp = QMessageBox.information(self, 'Collaborators Invitation Confirmation',
'Collaborators Invited.\nCollaborators will not show up in project until'
' they have accepted the invitation on the figshare website.',
QMessageBox.Ok)
else:
resp = QMessageBox.warning(self, 'Collaborators Invitation Confirmation',
'Error occurred.\n{}'.format(col_successful.args),
QMessageBox.Ok)
if successful is True:
resp = QMessageBox.information(self, 'Update Confirmation', 'Project successfully updated',
QMessageBox.Ok)
if resp == QMessageBox.Ok:
self.reopen_project_info()
else:
self.reopen_project_info()
else:
resp = QMessageBox.warning(self, 'Update Confirmation',
'Error occurred.\nProject may not have updated.\n{}'.format(successful.args),
QMessageBox.Ok)
if resp == QMessageBox.Ok:
self.reopen_project_info()
else:
self.reopen_project_info()
def reopen_project_info(self):
"""
Closes and reopens the current project info window.
:return:
"""
for i in range(2):
self.parent.section_window.on_projects_btn_pressed()
self.parent.projects_window.on_project_pressed(self.project_id)
#####
# Figshare API Interface Actions
#####
def update_project(self, project_id, token, update_dict):
"""
Uploads changes to a figshare project
:param project_id: int. Figshare project id number
:param token: OAuth token
:param update_dict: dict. Dictionary holding named arguments to pass to Projects.update()
:return:
"""
try:
projects = Projects(token)
info = projects.update(project_id, **update_dict)
return True
except TypeError as err:
return err
except ValueError as err:
return err
except HTTPError as err:
return err
def invite_collaborators(self, project_id, token, collaborators):
"""
Invites collaborators to a figshare project
:param project_id: int. Figshare project id number
:param token: OAuth token
:param collaborators: List of Dict. Containing either user ids or email addresses
:return:
"""
for col in collaborators:
col['role_name'] = 'collaborator'
try:
projects = Projects(token)
for col in collaborators:
info = projects.invite(project_id, col)
print(info)
return True
except TypeError as err:
return err
except ValueError as err:
return err
except HTTPError as err:
print(err.args)
return err
<file_sep>/abstract_windows/figshare_structure_list.py
"""
Figshare Strucure List
This module abstract the visualisation of Figshare structures such as Projects and Collections.
It produces a QMdiSubWindow object with custom widgets within it that view the different Figshare objects as an array of
QPushButtons. Other widgets provide Figshare object creation, and deletion functionality. In addition a search field is
provided to allow for the list to be filtered.
Notes:
The FigshareObjectWindow Class is intended to have specific functions and variables overwritten by child classes,
and therefore will not work correctly is called directly. Child classes should overwrite the following functions and
variables.
Functions:
on_create_button_pressed()
is_info_open()
close_object_info_window()
create_new_object_info_window()
reopen_objects()
get_object_list()
search_objects()
delete_object()
"""
# Standard Lib Imports
import os
# PyQt Imports
from PyQt5.QtWidgets import (QMdiSubWindow, QLabel, QPushButton, QMessageBox, QMainWindow, QApplication,
QWidget, QLineEdit, QHBoxLayout, QVBoxLayout, QSizePolicy, QScrollBar)
from PyQt5.QtGui import (QIcon, QFont)
from PyQt5.QtCore import (Qt)
# Figshare Desktop Imports
from Figshare_desktop.formatting.formatting import (scaling_ratio, checkable_button, search_bar)
# Figshare API Imports
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class FigshareObjectWindow(QMdiSubWindow):
"""
An abstract base class for viewing high level Figshare objects.
"""
def __init__(self, app: QApplication, OAuth_token: str, parent: QMainWindow):
"""
Initialise the window
Args:
app: Main thread application object.
OAuth_token: Users Figshare authentication token obtained at login.
parent: Reference to the applications main window, where various variables are kept.
"""
# Super the QMdiSubWindow init function
super().__init__()
# Create class variables of init args
self.app = app
self.token = OAuth_token
self.parent = parent
# Create shortned reference to open windows set in the main window
self.open_windows = self.parent.open_windows
# Initialise the Figshare information and UI
self.initFig()
self.initUI()
def initFig(self):
"""
Function should create a class variable with a list of the Figshare objects to be visualised.
Should be overwritten by child classes
Returns:
None
"""
self.object_list = self.get_object_list()
def initUI(self):
"""
Formats and shows the window.
Returns:
None
"""
# Call the window formatting function
self.format_window()
# Create a horizontal box layout to hold the figshare object buttons
self.object_buttons_box = QHBoxLayout()
# Create a vertical box layout to hold the project window widgets and layouts
self.vbox = QVBoxLayout()
# Add the Figshare Object buttons to the vertical box layout
init_finish = len(self.object_list)
if init_finish > 4:
init_finish = 4
self.create_object_bar(0, init_finish)
self.vbox.addLayout(self.object_buttons_box)
# Add the scroll bar to the vertical box layout
self.s_bar = self.scroll_bar()
self.vbox.addWidget(self.s_bar)
# Create an encompassing layout
self.hbox = QHBoxLayout()
# Create a layout for the search and managment widgets
control_layout = QVBoxLayout()
control_layout.addWidget(self.search_bar())
control_layout.addLayout(self.management_buttons())
# Add the control layout and the vertical button layout to the encompassing layout
self.hbox.addLayout(control_layout)
self.hbox.addLayout(self.vbox)
# Create a central widget for the objects window
window_widget = QWidget()
# Add the vertical box layout
window_widget.setLayout(self.hbox)
# Set the projects window widget
self.setWidget(window_widget)
# Window Formatting
# =================
def format_window(self):
"""
Formats the window based on the available space in the primary screen.
Returns:
None
"""
# Gets the QRect of the main window
geom = self.parent.geometry()
# Gets the Qrect of the sections window
section_geom = self.parent.section_geom
# Define geometries for the projects window
x0 = section_geom.x() + section_geom.width()
y0 = section_geom.y()
w = geom.width() - x0
h = ((geom.height() - y0) / 6)
self.setGeometry(x0, y0, w, h)
# Remove frame from projects window
self.setWindowFlags(Qt.FramelessWindowHint)
# Set the default tool tip time duration to 1 second
self.tool_tip_time = 1000
# Window Widgets
# ==============
def scroll_bar(self):
"""
Creates a QScrollBar set to the size of the figshare objects list.
Returns:
s_bar (QScrollBar): Scroll bar used to move through the list of Figsahre objects.
"""
s_bar = QScrollBar(Qt.Horizontal)
s_bar.setMaximum(len(self.object_list) - 4)
s_bar.sliderMoved.connect(self.slider_val)
s_bar.valueChanged.connect(self.slider_val)
return s_bar
def create_obj_thumb(self, title: str, published_date: str, object_id: int):
"""
Creates a large QPushButton with information on the objects title, and published date.
Args:
title: Name of the Figshare object.
published_date: String representation of when/if the object was made public.
object_id: Figshare object ID number
Returns:
btn (QPushButton): Button connected to open a subwindow with its specific ID number.
"""
# Get the scaling rations for the current display
w_ratio, f_ratio = scaling_ratio(self.app)
# Scale the font sizes
title_fnt_size = 12 * f_ratio
date_ftn_size = 8 * f_ratio
# Create the Title Label
# Create the title label
title_lbl = QLabel()
title_lbl.setText("{}".format(title))
title_lbl_fnt = QFont('SansSerif', title_fnt_size)
title_lbl_fnt.setBold(True)
title_lbl.setFont(title_lbl_fnt)
title_lbl.setWordWrap(True)
# Create the date label
date_lbl = QLabel()
if published_date is None:
published_date = 'Private'
date_lbl.setText("Published: {}".format(published_date))
date_lbl_fnt = QFont('SansSerif', date_ftn_size)
date_lbl.setFont(date_lbl_fnt)
date_lbl.setStyleSheet('color: gray')
date_lbl.setWordWrap(True)
# Create a layout to hold the labels
lbl_box = QVBoxLayout()
# Add labels to layout
lbl_box.addWidget(title_lbl)
lbl_box.addWidget(date_lbl)
# Create a button for the project
btn = QPushButton(self)
checkable_button(self.app, btn)
btn.setLayout(lbl_box)
btn.clicked[bool].connect(lambda: self.on_object_pressed(object_id))
self.object_buttons_box.addWidget(btn)
def create_object_bar(self, start: int, finish: int):
"""
Creates a series of Object Push Buttons from a defined subset of the total list.
Args:
start: Starting element from the objects list.
finish: Finishing element from the objects list.
Returns:
None
"""
self.buttons = {}
i = 0
for object_pos in range(start, finish):
title = self.object_list[object_pos]['title']
pub_date = self.object_list[object_pos]['published_date']
object_id = self.object_list[object_pos]['id']
self.create_obj_thumb(title, pub_date, object_id)
self.buttons[object_id] = self.object_buttons_box.itemAt(i).widget()
i += 1
def management_buttons(self):
"""
Creates a QLayout object that hold the buttons used for creating and deleting Figsahre objects.
Returns:
hbox (QHBoxLayout): Horizontal layout with the create and delete buttons within it.
"""
# Create New Project Button
np_btn = QPushButton()
np_btn.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Expanding)
np_btn.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/Folder-48.png')))
np_btn.setToolTip('Create a new Figshare Object')
np_btn.setToolTipDuration(self.tool_tip_time)
np_btn.pressed.connect(self.on_create_btn_pressed)
# Create Delete Project Button
del_btn = QPushButton()
del_btn.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Expanding)
del_btn.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/del_folder.png')))
del_btn.setToolTip('Delete Selected Object')
del_btn.setToolTipDuration(self.tool_tip_time)
del_btn.pressed.connect(self.on_delete_btn_pressed)
# Create layout to hold buttons
hbox = QHBoxLayout()
# Add Buttons to layout
hbox.addWidget(np_btn)
hbox.addWidget(del_btn)
return hbox
def search_bar(self):
"""
Creates a QLineEdit object for the user to enter search queries that will filter the total object list.
Returns:
edit (QLineEdit): Edit field connected to perform a search either when return is pressed or focus is
shifted away from the edit.
"""
# Create text box
edit = QLineEdit()
# Set font style
search_bar(self.app, edit)
# Set place holder text
edit.setPlaceholderText('Search')
# Add a clear button to the line edit
edit.setClearButtonEnabled(True)
# Add mouse over text
edit.setToolTip('Search for specific Figshare objects')
edit.setToolTipDuration(self.tool_tip_time)
# Connect search function to the return key
edit.returnPressed.connect(lambda: self.search_on_return(edit.text()))
edit.textChanged.connect(lambda: self.search_on_clear(edit.text()))
return edit
# Widget Actions
# ==============
def slider_val(self):
"""
Called when the objects slider is changed. Removes all existing buttons and regenerates from the new list
position.
Returns:
None
"""
# Remove all existing button widgets
while self.object_buttons_box.count():
item = self.object_buttons_box.takeAt(0)
item.widget().deleteLater()
# Get the current value of the scroll bar
s_bar_pos = self.s_bar.value()
# Define how many buttons to visualise
if 1 < len(self.object_list) < 4:
number = len(self.object_list)
else:
number = 4
self.s_bar.setMaximum(len(self.object_list) - number) # Will be zero if less than 4 items in list
self.create_object_bar(s_bar_pos, s_bar_pos + number) # Recreates the button view from the new position
def search_init(self):
"""
Called when the object search bar is used. Removes all existing buttons anfe regenerates from the new list.
Returns:
None
"""
# Remove all existing button widgets
while self.object_buttons_box.count():
item = self.object_buttons_box.takeAt(0)
item.widget().deleteLater()
# Define how many buttons to visualise
if 1 <= len(self.object_list) <= 4:
number = len(self.object_list)
else:
number = 4
self.s_bar.setMaximum(len(self.object_list) - number) # Will be zero if less than 4 items in list
self.create_object_bar(0, number) # Recreates the button view from the new position
def search_on_return(self, search_text: str):
"""
Called when the return key is pressed in the search bar. Will search the relavant Figshare object endpoint based
on the query string. Will overwrite the existing objects list with that of the search result.
Args:
search_text: Elastic search query with which to search the object titles for.
Returns:
None
"""
self.object_list = self.search_objects(search_text) # Perform the search
self.search_init() # Redraw the object buttons
def search_on_clear(self, search_text: str):
"""
Called when the search bar is cleared, or if the focus is removed and the search string is empty
Args:
search_text: strign from the search LineEdit
Returns:
None
"""
if search_text == '':
self.object_list = self.get_object_list()
self.slider_val()
def on_create_btn_pressed(self):
"""
Called when the create new object button is pressed.
MUST BE OVERWRITTEN BY CHILDREN.
Examples:
Example of code is given where '' denotes the sections that should be manually defined to the specific
figshare object being used.
if new_''_window in self.open_Windows:
self.open_windows.remove(new_''_window)
self.parent.new_''_window.close()
else:
self.open_windows.remove(''_window)
self.close()
self.open_windows.add('new_''_window)
self.parent.new_''_window = New''Window(self.app, self.token, self.parent)
self.parent.mdi.addSubWindow(self.parent.new_''_window)
self.parent.new_''_window.show()
Returns:
None
"""
pass
def is_info_open(self):
"""
Called to see if there is a Figshare object info window open.
MUST BE OVERWRITTEN BY CHILDREN.
Examples:
Example of code is given where '' denotes the sections that should be manually defined to the specific
figshare object being used.
if ''_info_window in self.open_windows:
open_obj_id = self.parent.''_info_window.''_id
return True, open_obj_id
else:
return False, None
Returns:
open (bool): True, or False dependent on if info window is already open
object_id (int): Figshare object ID number
"""
pass
def close_object_info_window(self):
"""
Called when the existing object info window needs to be closed.
MUST BE OVERWRITTEN BY CHIDREN.
Examples:
Example of code is given where '' denotes the sections that should be manually defined to the specifc
figshare object being used.
self.open_windows.remove("''_info_window")
self.parent.''_info_window.close()
if ''_articles_window in self.open_windows:
self.open_windows.remove("''_articles_window")
self.parent.''_articles_window.close()
if 'article_edit_window' in self.open_windows:
self.open_windows.remove('article_edit_window')
self.parent.article_edit_window.close()
Returns:
None
"""
def create_new_object_info_window(self, object_id: int):
"""
Called when a new object info window is to be created.
MUST BE OVERWRITTEN BY CHILDREN.
Examples:
Example of code is given where '' denotes the sections that should be manually defined to the specific
figshare object being used.
self.open_windows.add("''_info_window")
self.parent.''_info_window = ''InfoWindow(self.app, self.token, self.parent, self.object_id)
self.parent.mdi.addSubWindow(self.parent.''_info_window)
self.parent.''_info_window.show()
Args:
object_id: Figshare object ID number.
Returns:
None
"""
pass
def on_object_pressed(self, object_id: int):
"""
Called when an object button is clicked. If an object info window is already open will see if it is the same
as the object just pressed. If it is, then it is closed. If it is not the same then the currently open info
window is closed and the new object window is opened in its place.
Args:
object_id: Figshare object ID number.
Returns:
None
"""
# Check to see if an object window is open
info_open, open_obj_id = self.is_info_open()
if info_open:
# Check to see if the open object is the same as the object that was just pressed
if open_obj_id != object_id:
# Check to see if we need to toggle a button by seeing if the object button still exists.
# It may have been scrolled away from.
if open_obj_id in self.buttons:
# Finally check that it is checked, and un-check it if so.
if self.buttons[open_obj_id].isChecked():
self.buttons[open_obj_id].toggle()
# Close the currently open info window
self.close_object_info_window()
# Create and open new object info window
self.create_new_object_info_window(object_id)
# If the button pressed corresponds to the existing object
else:
# Close the object info window
self.close_object_info_window()
else:
self.create_new_object_info_window(object_id)
def on_delete_btn_pressed(self):
"""
Called when the object delete button is pressed.
Returns:
None
"""
# See if an info window is open, and get its object ID number if so
info_open, open_obj_id = self.is_info_open()
if info_open:
# Create a confirmation dialog
msg = "Are you sure you want to delete the open Figshare Object?"
msg_box = QMessageBox.question(self, "Deletion Confirmation", msg, QMessageBox.Yes, QMessageBox.No)
if msg_box == QMessageBox.Yes:
# Attempt to delete the Figshare object
successful = self.delete_object(open_obj_id)
if successful:
con_reply = QMessageBox.information(self, "Deletion Confirmation", "Object successfully deleted.",
QMessageBox.Ok)
if con_reply is not None:
self.reopen_objects()
else:
con_reply = QMessageBox.warning(self, "Deletion Confirmation", "Object could not be deleted",
QMessageBox.Ok)
if con_reply is not None:
self.reopen_objects()
def reopen_objects(self):
"""
Called to open and close the figshare objects window
MUST BE OVERWRITTEN BY CHILDREN.
Examples:
Example of code is given where '' denotes the sections that should be manually defined to the specific
figshare object being used.
for i in range(2):
self.parent.section_window.on_''_btn_pressed()
Returns:
None
"""
# Figshare API Interface Functions
# ================================
def get_object_list(self):
"""
Called to return a list of Figshare object associated to the user.
MUST BE OVERWRITTEN BY CHILDREN.
Examples:
Example of code is given where '' denotes the sections that should be manually defined to the specific
figshare object being used.
'' = ''(self.token)
object_list = ''.get_list()
return object_list
Returns:
object_list (list of dicts): List of users Figshare objects.
"""
pass
def search_objects(self, search_text: str):
"""
Gets a list of objects matching the users search query.
MUST BE OVERWRITTEN BY CHILDREN.
Examples:
Example of code is given where '' denotes the sections that should be manually defined to the specific
figshare object being used.
'' = ''(self.token)
result = ''.search(search_text)
if len(result) == 0:
result = ''.get_list()
return result
Args:
search_text: Figshare style elastic search string
Returns:
result (list of dicts): Gives a list of dictionary objects that either match those of the search criteria,
or returns the full set if no matches found.
"""
pass
def delete_object(self, object_id: int):
"""
Called to delete the given figshare object.
MUST BE OVERWRITTEN BY CHILDREN.
Examples:
Example of code is given where '' denotes the sections that should be manually defined to the specific
figshare object being used.
'' = ''(self.token)
try:
''.delete(object_id, safe=False) # Suppresses command line requirement for confirmation
return True
except:
return False
Args:
object_id:
Returns:
bool: True of False dependent on if the deletion was successful.
"""
<file_sep>/collections_windows/new_collection_window.py
"""
New Collection Window
This module contains the new collection window class that allows a user to create a new figshare collection.
"""
# Standard Imports
from PyQt5.QtCore import (Qt)
from PyQt5.QtWidgets import (QLabel, QGridLayout, QMessageBox)
from requests import HTTPError
from Figshare_desktop.abstract_windows.new_object_window import NewObjectWindow
from Figshare_desktop.custom_widgets.button_field import QButtonField
from Figshare_desktop.custom_widgets.author_field import AuthorField
from Figshare_desktop.custom_widgets.categories_field import CategoriesField
from Figshare_desktop.formatting.formatting import (grid_label)
from figshare_interface.figshare_structures.collections import Collections
from figshare_interface.http_requests.figshare_requests import issue_request
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class NewCollectionWindow(NewObjectWindow):
"""
Window subclasses the abstract NewObjectWindow class.
"""
def create_button_field(self, label: str):
"""
Creates a label button field pair.
Args:
label: Name of field to be associated to the button field.
Returns:
lbl (QLabel): Label of the button field
btn_field (QButtonField): Button field layout.
"""
lbl = QLabel(label)
grid_label(self.app, lbl)
button_field = QButtonField(self)
return lbl, button_field
def create_author_field(self, label: str):
"""
Creates a label button-field pair that is specifically formatted for author metadata.
Args:
label: Name of the field to be associated to the button-field widget
Returns:
lbl (QLabel): Label of the button field.
btn_field (QButtonField): Author formatted button field.
"""
lbl = QLabel(label)
grid_label(self.app, lbl)
btn_field = AuthorField(self)
return lbl, btn_field
def create_categories_field(self, label: str):
"""
Creats a label, button-field pair that is specifically formated for Figshare categories.
Args:
label: name of the field to be associated to te button-field widget.
Returns:
lbl (QLabel): Label of the button field.
btn_field (QButtonField): Categories button field.
"""
lbl = QLabel(label)
grid_label(self.app, lbl)
btn_field = CategoriesField(self.parent.id_categories, self.parent.name_categories, parent=self)
return lbl, btn_field
def create_object_info_layout(self):
"""
Creates a layout with label and edit fields for creating a new figshare collection.
Returns:
grid (QGridLayout): grid layout containing the different info fields and labels.
"""
# TITLE
title_lbl, self.title_field = self.create_lineedit('Title')
self.title_field.setPlaceholderText('Enter Collection title here.')
# DESCRIPTION
desc_lbl, self.descr_field = self.create_edit('Description')
self.descr_field.setPlaceholderText('Enter meaningful collection description here.')
# AUTHORS
auth_lbl, self.auth_field = self.create_author_field('Authors')
# CATEGORIES
cat_lbl, self.cat_field = self.create_categories_field('Categories')
# TAGS
tag_lbl, self.tag_field = self.create_button_field('Tags')
# REFERENCES
ref_lbl, self.ref_field = self.create_button_field('References')
# Create Layout
grid = QGridLayout()
# Add Title
grid.addWidget(title_lbl, 0, 0, Qt.AlignLeft)
grid.addWidget(self.title_field, 0, 1)
# Add Description
grid.addWidget(desc_lbl, 1, 0, Qt.AlignLeft)
grid.addWidget(self.descr_field, 1, 1)
# Add Authors
grid.addWidget(auth_lbl, 2, 0, Qt.AlignLeft)
grid.addWidget(self.auth_field, 2, 1)
# Add Categories
grid.addWidget(cat_lbl, 3, 0, Qt.AlignLeft)
grid.addWidget(self.cat_field, 3, 1)
# Add Tags
grid.addWidget(tag_lbl, 4, 0, Qt.AlignLeft)
grid.addWidget(self.tag_field, 4, 1)
# Add References
grid.addWidget(ref_lbl, 5, 0, Qt.AlignLeft)
grid.addWidget(self.ref_field, 5, 1)
grid.setColumnStretch(1, 3)
return grid
# Widget Actions
# ==============
def on_save_pressed(self):
"""
Called when the save button is pressed. Will create a new Figshare project from the filled fields.
Returns:
None
Raises:
ValueError: Error may be raised if information given is not formatted correctly for Figshare.
TypeError: Error may occur if the value passed to Figshare API Interface is not the correct type.
KeyError: Error may occur if mandatory default information in not provided.
HTTPError: Error may occur if there is a fault in the upload, or if the formatting is incorrect.
"""
# Get Collection Info
title = self.title_field.text()
description = self.descr_field.toPlainText()
authors = self.auth_field.get_tags()
categories = self.cat_field.get_tags()
tags = self.tag_field.get_tags()
references = self.ref_field.get_tags()
# Format References
formatted_references = []
for ref in references:
if ref[0:7] == 'http://':
formatted_references.append(ref)
# Create Collection Info Dictionary
creation_dict = {
'title': title,
'description': description
}
if authors != []:
creation_dict['authors'] = authors
if categories != []:
creation_dict['categories'] = categories
if tags != []:
creation_dict['tags'] = tags
if references != []:
creation_dict['referecnes'] = formatted_references
# Create Collection
try:
collection_info = self.create_object(creation_dict)
msgBox = QMessageBox()
msgBox.setIcon(QMessageBox.Information)
msgBox.setText("New Project Created\n{}".format(collection_info['title']))
msgBox.setStandardButtons(QMessageBox.Ok)
msgBox.buttonClicked.connect(lambda: self.on_msgbtn_pressed(msgBox, exit_parent=True))
msgBox.show()
except ValueError as err:
err_args = err.args
msgBox = QMessageBox()
msgBox.setIcon(QMessageBox.Critical)
msgBox.setText(err_args[0])
msgBox.setStandardButtons(QMessageBox.Ok)
msgBox.buttonClicked.connect(lambda: self.on_msgbtn_pressed(msgBox))
msgBox.show()
except TypeError as err:
err_args = err.args
msgBox = QMessageBox()
msgBox.setIcon(QMessageBox.Critical)
msgBox.setText(err_args[0])
msgBox.setStandardButtons(QMessageBox.Ok)
msgBox.buttonClicked.connect(lambda: self.on_msgbtn_pressed(msgBox))
msgBox.show()
except KeyError as err:
err_args = err.args
msgBox = QMessageBox()
msgBox.setIcon(QMessageBox.Critical)
msgBox.setText(err_args[0])
msgBox.setStandardButtons(QMessageBox.Ok)
msgBox.buttonClicked.connect(lambda: self.on_msgbtn_pressed(msgBox))
msgBox.show()
except HTTPError as err:
err_args = err.args
msgBox = QMessageBox()
msgBox.setIcon(QMessageBox.Critical)
msgBox.setText(err_args[0])
msgBox.setStandardButtons(QMessageBox.Ok)
msgBox.buttonClicked.connect(lambda: self.on_msgbtn_pressed(msgBox))
msgBox.show()
def on_cancel_pressed(self):
"""
Called when the cancel button is pressed. Will return to the collections window without creating a new
collection
Returns:
None
"""
self.open_windows.remove('new_collection_window')
self.close()
self.parent.section_window.on_collections_btn_pressed()
# Figshare API Functions
# ======================
def create_object(self, info_dict: dict):
"""
Args:
info_dict:
Returns:
"""
collections = Collections(self.token)
required_fields = ['title']
for field in required_fields:
if field not in info_dict:
raise KeyError("Mandatory field: {} not found in input dictionary.".format(field))
object_info = collections.create(**info_dict)
return object_info
<file_sep>/local_articles/local_article.py
"""
"""
import os
from PyQt5.QtWidgets import (QTreeWidgetItem)
from ..figshare_articles.article import Article
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class LocalArticle(Article):
def __init__(self, OAuth_token, filename):
# Initialize dictionary for basic figshare metadata.
self.figshare_metadata = {'title': None,
'id': None,
'description': None,
'tags': None,
'references': None,
'categories': None,
'authors': None,
'defined_type': None,
'funding': None,
'license': None,
'version': None,
'size': None,
'status': 'local',
'type': 'article'
}
self.figshare_desktop_metadata = {'location': None,
'thumb': None
}
# Define the local file path and file title.
self.local_path = os.path.abspath(filename)
file_title = os.path.split(filename)[-1]
self.figshare_metadata['title'] = file_title
self.figshare_desktop_metadata['location'] = self.local_path
# Initialize an empty object that will hold generated QTreeWidgetItem representations of the article.
self.qtreeitem = None
# Save the OAuth token for later use
self.token = OAuth_token
def fill_info(self):
"""
Overwriting parent function to prevent unexpected usage.
:return:
"""
pass
def read_file(self, filename):
"""
If the file type is recognised parse the file to get metadata.
:param filename: local path to file.
:return:
"""
# This is not required for a simple figshare article. It should be re-defined for specific file types.
return None
def index_schema(self):
return {}
<file_sep>/data_window/figshare_add_article_list.py
"""
"""
import os
from PyQt5.QtWidgets import (QWidget, QPushButton, QLineEdit, QMessageBox, QFileDialog, QAbstractItemView,
QTextEdit, QGridLayout, QHBoxLayout, QVBoxLayout, QSizePolicy, QTreeWidgetItem,
QTreeWidget)
from PyQt5.QtGui import (QIcon, QFont, QPalette, QColor)
from PyQt5.QtCore import (Qt, pyqtSlot, pyqtSignal, QObject)
from Figshare_desktop.formatting.formatting import (press_button)
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class ArticleList(QWidget):
def __init__(self, app, OAuth_token, parent):
super().__init__()
self.app = app
self.token = OAuth_token
self.parent = parent
self.initLocal()
self.initTree()
self.initUI()
def initThreads(self):
"""
Intialises variable for threads
:return:
"""
self.__threads = []
def initLocal(self):
"""
Intialises the local article variables
:return:
"""
self.local_ids = set()
def initUI(self):
"""
Initialised the user interface
:return:
"""
hbox = QHBoxLayout()
# Add the tree widget to the layout
hbox.addWidget(self.initTree())
self.setLayout(hbox)
#####
# Widgets
#####
def initTree(self):
"""
Initialises the QTreeWidget to hold the articles prior to their upload to figshare
:return:
"""
tree = QTreeWidget()
# Format the tree to allow for multiple items to be selected
tree.setSelectionMode(QAbstractItemView.ExtendedSelection)
# Allow for sorting of columns
tree.setSortingEnabled(True)
# Create the tree column headers
headers = ['id', 'title']
header_item = QTreeWidgetItem(headers)
tree.setHeaderItem(header_item)
self.tree = tree
self.tree.itemDoubleClicked.connect(self.item_double_clicked)
return self.tree
#####
# Widget Actions
#####
@pyqtSlot(bool)
def fill_tree(self):
"""
Clears and then re-fills the tree from the given set of local id numbers
:param article_id_set: set of article id numbers
:return:
"""
self.tree.clear()
for a_id in self.local_ids:
# Create a tree widget item from the article id and title
title = self.parent.local_articles[a_id].figshare_metadata['title']
tree_item = QTreeWidgetItem([a_id, title])
# Add to the tree
self.tree.addTopLevelItem(tree_item)
# Resize columns to contents
for column in range(self.tree.columnCount()):
self.tree.resizeColumnToContents(column)
@pyqtSlot(str)
def add_to_tree(self, local_article_id: str):
"""
Adds a local article to the tree
:param local_article_id: string containing the local article id number
:return:
"""
if local_article_id not in self.local_ids:
# Add the id to the local set
self.local_ids.add(local_article_id )
# Create a tree widget item from the article id and title
title = self.parent.local_articles[local_article_id].figshare_metadata['title']
tree_item = QTreeWidgetItem([local_article_id, title])
# Add to the tree
self.tree.addTopLevelItem(tree_item)
# Resize columns to contents
for column in range(self.tree.columnCount()):
self.tree.resizeColumnToContents(column)
def item_double_clicked(self, item, column):
article_id = item.text(0)
self.remove_from_tree(article_id)
@pyqtSlot(str)
def remove_from_tree(self, local_article_id):
"""
Attempts to remove an article from the tree
:param local_article_id: string containing the local article id number
:return:
"""
self.local_ids.remove(local_article_id)
self.fill_tree()
class TreeAddWorker(QObject):
sig_step = pyqtSignal(str)
sig_done = pyqtSignal(bool)
def __init__(self, article_id_set):
super().__init__()
self.article_id_set = article_id_set
@pyqtSlot()
def work(self):
"""
Adds articles to the tree
:return:
"""
while self.article_id_set:
article_id = self.article_id_set.pop()
self.sig_step.emit(article_id)
self.sig_done.emit(True)<file_sep>/custom_widgets/categories_combo.py
"""
"""
# PyQt Imports
from PyQt5.Qt import (QStandardItemModel, QStandardItem)
# Figshare Desktop Imports
from Figshare_desktop.custom_widgets.extended_combo import ExtendedCombo
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class CategoriesCombo(ExtendedCombo):
"""
A QComboBox widget specifically designed to work with Figshare categories.
"""
def __init__(self, id_dict: dict, name_dict: dict, parent=None):
"""
Supers QComboBox, but also creates class references to the categories dictionaries.
Args:
id_dict: Categories dictionary with id numbers as keys.
name_dict: Categories dictionary with names as keys.
parent: Widget parent.
"""
super().__init__()
self.id_dict = id_dict
self.name_dict = name_dict
cat_list = sorted(list(self.name_dict.keys()))
self.fill_combo(cat_list)
model = QStandardItemModel()
for i, word in enumerate(cat_list):
item = QStandardItem(word)
model.setItem(i, 0, item)
self.setModel(model)
self.setModelColumn(0)
if parent is not None:
self.setParent(parent)
def fill_combo(self, fill_list: list):
"""
Fills the combo box with categories from the fill list.
Args:
fill_list: list of strings to put as items in the combo box.
Returns:
"""
self.clear()
self.addItem('')
self.addItems(fill_list)
<file_sep>/main_window/section_window.py
"""
"""
from PyQt5.QtWidgets import (QWidget, QPushButton, QVBoxLayout, QMdiSubWindow)
from PyQt5.QtCore import (Qt)
from Figshare_desktop.formatting.formatting import scaling_ratio
from Figshare_desktop.formatting.formatting import checkable_button
from Figshare_desktop.projects_windows.projects_window import ProjectsWindow
from Figshare_desktop.collections_windows.collections_window import CollectionsWindow
from Figshare_desktop.data_window.data_window import DataWindow
from Figshare_desktop.data_window.data_articles_window import DataArticlesWindow
from Figshare_desktop.data_window.figshare_add_window import FigshareAddWindow
from Figshare_desktop.selection_window.selection_window import SelectionWindow
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class sectionWindow(QMdiSubWindow):
"""
Creates the beginning window from which the different Figshare structures can be navigated to
"""
def __init__(self, app, OAuth_token, parent):
"""
:param app: QApplication object
:param OAuth_token: Figshare OAuth token
:param parent: QMDI object that is the overall parent of the application
"""
super().__init__()
self.app = app
self.token = OAuth_token
self.parent = parent
self.open_windows = self.parent.open_windows
self.initUI()
def initUI(self):
"""
User Interface initialization
:return:
"""
self.format_window()
self.w = QWidget()
self.w.setLayout(self.button_layout())
self.setWidget(self.w)
def format_window(self):
"""
Formats the sections window
"""
self.setWindowFlags(Qt.FramelessWindowHint)
w_scale, f_scale = scaling_ratio(self.app)
# Gets the QRect of the main window
geom = self.parent.geometry()
# Define geometries for the section window
x0 = geom.x()
y0 = geom.y()
w = ((geom.width() / 8) - x0)
h = (geom.height() - y0)
self.setGeometry(x0, y0, w, h)
# Store the section window geometry in the main window
self.parent.section_geom = self.geometry()
#####
# Window Widgets
#####
def button_layout(self):
"""
Creates a QLayout object holding the section buttons
:return: QLayout Object containing toggle buttons for different sections
"""
# QLayout to hold buttons
button_box = QVBoxLayout()
# Projects
projects_btn = QPushButton('Projects', self)
checkable_button(self.app, projects_btn)
projects_btn.clicked[bool].connect(self.on_projects_btn_pressed)
self.projects_btn = projects_btn
# Collections
collections_btn = QPushButton('Collections', self)
checkable_button(self.app, collections_btn)
collections_btn.clicked[bool].connect(self.on_collections_btn_pressed)
self.collections_btn = collections_btn
# Local Data
localdata_btn = QPushButton('Local Data', self)
checkable_button(self.app, localdata_btn)
localdata_btn.clicked[bool].connect(self.on_local_data_btn_pressed)
self.localdata_btn = localdata_btn
# Selection
#selection_btn = QPushButton('Selection', self)
#checkable_button(self.app, selection_btn)
#self.selection_btn = selection_btn
# Add Buttons to Layout
button_box.addWidget(projects_btn)
button_box.addWidget(collections_btn)
button_box.addWidget(localdata_btn)
#button_box.addWidget(selection_btn)
return button_box
#####
# Widget Actions
#####
def on_projects_btn_pressed(self):
"""
Called when the projects button is pressed. Is also called after some project information edits.
"""
# Check to see if any other sections windows are open
if 'collections_window' in self.open_windows:
self.close_collections_window()
if self.collections_btn.isChecked():
self.collections_btn.toggle()
if 'new_collection_window' in self.open_windows:
self.close_new_collection_window()
if self.collections_btn.isChecked():
self.collections_btn.toggle()
if 'local_data_window' in self.open_windows:
self.close_local_data_window()
if self.localdata_btn.isChecked():
self.localdata_btn.toggle()
# Check to see if the projects window is already open
if 'projects_window' in self.open_windows:
self.close_projects_window()
# Check to see if the create new project window is open
elif 'new_project_window' in self.open_windows:
self.close_new_projects_window()
# If no projects windows are open then create a projects window and show
elif 'projects_window' not in self.open_windows and 'new_project_window' not in self.open_windows:
self.open_windows.add('projects_window')
self.parent.projects_window = ProjectsWindow(self.app, self.token, self.parent)
self.parent.mdi.addSubWindow(self.parent.projects_window)
self.parent.projects_window.show()
def on_collections_btn_pressed(self):
"""
Called when the collections button is pressed. Is also called by child windows to re-initialise after edits.
Returns:
None
"""
# Check to see if any other sections windows are open
if 'local_data_window' in self.open_windows:
self.close_local_data_window()
if self.localdata_btn.isChecked():
self.localdata_btn.toggle()
if 'projects_window' in self.open_windows:
self.close_projects_window()
if self.projects_btn.isChecked():
self.projects_btn.toggle()
if 'new_project_window' in self.open_windows:
self.close_new_projects_window()
if self.projects_btn.isChecked():
self.projects_btn.toggle()
# check to see if the collections window is already open
if 'collections_window' in self.open_windows:
self.close_collections_window()
elif 'new_collection_window' in self.open_windows:
self.close_new_collection_window()
# If no collections windows are open then create the collections window and show
elif 'collections_window' not in self.open_windows and 'new_collection_window' not in self.open_windows:
self.open_windows.add('collections_window')
self.parent.collections_window = CollectionsWindow(self.app, self.token, self.parent)
self.parent.mdi.addSubWindow(self.parent.collections_window)
self.parent.collections_window.show()
def on_local_data_btn_pressed(self):
"""
Called when the local data button is pressed.
:return:
"""
# Check to see if any other sections windows are open
if 'projects_window' in self.open_windows:
self.close_projects_window()
if self.projects_btn.isChecked():
self.projects_btn.toggle()
if 'new_project_window' in self.open_windows:
self.close_new_projects_window()
if self.projects_btn.isChecked():
self.projects_btn.toggle()
if 'collections_window' in self.open_windows:
self.close_collections_window()
if self.collections_btn.isChecked():
self.collections_btn.toggle()
if 'new_collection_window' in self.open_windows:
self.close_new_collection_window()
if self.collections_btn.isChecked():
self.collections_btn.toggle()
# Check to see if window is already open
if 'local_data_window' in self.open_windows:
self.close_local_data_window()
else:
self.open_windows.add('local_data_window')
self.parent.local_data_window = DataWindow(self.app, self.token, self.parent)
self.parent.mdi.addSubWindow(self.parent.local_data_window)
self.parent.local_data_window.show()
self.open_windows.add('data_articles_window')
self.parent.data_articles_window = DataArticlesWindow(self.app, self.token, self.parent)
self.parent.mdi.addSubWindow(self.parent.data_articles_window)
self.parent.data_articles_window.show()
self.open_windows.add('figshare_add_window')
self.parent.figshare_add_window = FigshareAddWindow(self.app, self.token, self.parent)
self.parent.mdi.addSubWindow(self.parent.figshare_add_window)
self.parent.figshare_add_window.show()
def close_projects_window(self):
"""
Called to close the proejcts window and any children
:return:
"""
self.open_windows.remove('projects_window')
self.parent.projects_window.close()
# Check to see if a project information window is open
if 'project_info_window' in self.open_windows:
self.open_windows.remove('project_info_window')
self.parent.project_info_window.close()
# Check to see if a project articles window is open
if 'project_articles_window' in self.open_windows:
self.open_windows.remove('project_articles_window')
self.parent.project_articles_window.close()
# Check to see if the article edit window is open
if 'article_edit_window' in self.open_windows:
self.open_windows.remove('article_edit_window')
self.parent.article_edit_window.close()
def close_new_projects_window(self):
"""
Called to close the new projects window
:return:
"""
self.open_windows.remove('new_project_window')
self.parent.new_project_window.close()
def close_collections_window(self):
"""
Called to close the collections window and any children
Returns:
None
"""
self.open_windows.remove('collections_window')
self.parent.collections_window.close()
# Check to see if a collections info window is open
if 'collection_info_window' in self.open_windows:
self.open_windows.remove('collection_info_window')
self.parent.collection_info_window.close()
# Check to see if a collection articles window is open
if 'collection_articles_window' in self.open_windows:
self.open_windows.remove('collection_articles_window')
self.parent.collection_articles_window.close()
# Check to see if a collection article edit window is open
if 'article_edit_window' in self.open_windows:
self.open_windows.remove('article_edit_window')
self.parent.article_edit_window.close()
def close_new_collection_window(self):
"""
Called to close the new collections window
Returns:
None
"""
self.open_windows.remove('new_collection_window')
self.parent.new_collection_window.close()
def close_local_data_window(self):
self.open_windows.remove('local_data_window')
self.parent.local_data_window.close()
self.open_windows.remove('data_articles_window')
self.parent.data_articles_window.close()
self.open_windows.remove('figshare_add_window')
self.parent.figshare_add_window.close()
def open_data_articles_window(self):
"""
Can be called from child windows to open the data articles window
:return:
"""
self.parent.open_windows.add('data_articles_window')
self.parent.data_articles_window = DataArticlesWindow(self.app, self.token, self.parent)
self.parent.mdi.addSubWindow(self.parent.data_articles_window)
self.parent.data_articles_window.show()
<file_sep>/figshare_articles/determine_type.py
"""
"""
from os.path import splitext
from .article import Article
from .stm_articles.spectroscopy_article import SpecArticle
from .stm_articles.topography_article import TopoArticle
from ..local_articles.local_article import LocalArticle
from ..local_articles.local_stm_articles.local_topography_article import LocalTopoArticle
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
def gen_article(filename, OAuth_token, project_id, article_id):
file_path, file_ext = splitext(filename)
file_types = {
# OMICRON FLAT FILES
'.Z_flat': TopoArticle,
'.I(V)_flat': SpecArticle,
'.Aux1(V)_flat': SpecArticle,
'.Aux2(V)_flat': SpecArticle,
# ZYVEX Files
'.zad': TopoArticle
}
if file_ext in file_types:
return file_types[file_ext](OAuth_token, project_id, article_id)
else:
return Article(OAuth_token, project_id, article_id)
def gen_local_article(OAuth_token, filename):
file_path, file_ext = splitext(filename)
file_types = {# OMICRON FLAT FILES
'.Z_flat': LocalTopoArticle
}
if file_ext in file_types:
return file_types[file_ext](OAuth_token, filename, file_ext)
else:
return LocalArticle(OAuth_token, filename)
<file_sep>/selection_window/selection_window.py
"""
"""
import os
import itertools
from requests.exceptions import HTTPError
from PyQt5.QtWidgets import (QWidget, QPushButton, QTreeWidget, QTreeWidgetItem, QAbstractItemView,
QHBoxLayout, QVBoxLayout, QSizePolicy, QMessageBox)
from PyQt5.QtGui import (QIcon)
from PyQt5.QtCore import (Qt, QPoint)
from figshare_interface import (Projects)
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class SelectionWindow(QWidget):
def __init__(self, app, OAuth_token, main_window):
super().__init__()
self.app = app
self.token = OAuth_token
self.main_window = main_window
self.selection_article_list = set()
self.initUI()
def initUI(self):
self.formatWindow()
self.hbox = QHBoxLayout()
self.vbox = QVBoxLayout()
self.current_view = 'list'
self.hbox.addLayout(self.view_options_layout())
self.hbox.addWidget(self.create_article_layout(self.current_view))
self.hbox.addLayout(self.upload_layout())
self.selection_open = self.main_window.centralWidget().selection_open
self.vbox.addLayout(self.hbox)
self.setLayout(self.vbox)
self.activate_project_article_selection_btn()
self.activate_data_save_btn()
def formatWindow(self):
mw_geom = self.main_window.geometry()
mw_x0 = mw_geom.x()
mw_y0 = mw_geom.y()
mw_width = mw_geom.width()
mw_height = mw_geom.height()
screen = self.app.primaryScreen().availableGeometry()
x0 = mw_x0 + mw_width + 10
y0 = mw_y0 + 0.75 * screen.height()
self.w_width = screen.width() - x0
self.w_height = (mw_y0 + mw_height) - y0
self.setGeometry(x0, y0, self.w_width, self.w_height)
self.setWindowFlags(Qt.FramelessWindowHint)
def mousePressEvent(self, event):
self.oldPos = event.globalPos()
def mouseMoveEvent(self, event):
delta = QPoint(event.globalPos() - self.oldPos)
self.move(self.x() + delta.x(), self.y() + delta.y())
self.oldPos = event.globalPos()
def view_options_layout(self):
sizepolicy = QSizePolicy()
sizepolicy.setVerticalPolicy(QSizePolicy.Expanding)
sizepolicy.setVerticalPolicy(QSizePolicy.Preferred)
btn_list = QPushButton()
btn_list.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/Bulleted List-50.png')))
btn_list.setCheckable(True)
btn_list.toggle()
btn_list.setSizePolicy(sizepolicy)
btn_list.clicked[bool].connect(lambda: self.change_view('list'))
self.list_view_btn = btn_list
btn_thumb = QPushButton()
btn_thumb.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + 'img/Picture-64.png')))
btn_thumb.setCheckable(True)
btn_thumb.setSizePolicy(sizepolicy)
btn_thumb.clicked[bool].connect(lambda: self.change_view('thumb'))
self.thumb_view_btn = btn_thumb
layout = QVBoxLayout()
layout.addWidget(self.list_view_btn)
layout.addWidget(self.thumb_view_btn)
return layout
def change_view(self, view):
if self.current_view == view:
if view == 'list':
self.list_view_btn.toggle()
self.current_view = 'list'
else:
self.thumb_view_btn.toggle()
self.current_view = 'thumb'
else:
if view == 'list':
self.thumb_view_btn.toggle()
self.current_view = 'list'
else:
self.list_view_btn.toggle()
self.current_view = 'thumb'
def create_article_layout(self, view):
if view == 'list':
return self.article_list_layout()
elif view == 'thumb':
return self.create_article_layout('list')
def article_list_layout(self):
lst = QTreeWidget()
header_lst = ["Location", "Title", "id", "Status", "Tags"]
header = QTreeWidgetItem(header_lst)
lst.setHeaderItem(header)
lst.setSelectionMode(QAbstractItemView.ExtendedSelection)
for article in self.selection_article_list:
lst.addTopLevelItem(article)
for column in range(len(header_lst)):
lst.resizeColumnToContents(column)
self.article_tree = lst
return self.article_tree
def update_article_list_layout(self, headers=None):
"""
Re-formats the selection window QTreeWidget by a given set of column headers.
:param headers: List of strings containing metadata field names.
:return:
"""
# Set headers to default if none are given.
if headers is None:
headers = ["location", "title", "id", "status", "tags"]
self.article_tree.clear()
# Iterate through the article ids.
for article_id in self.selection_article_list:
# Get the type of the article
article_type = type(article_id)
if article_type is not str:
# If the article id is not a string it is likely an integer id for a figshare article. If so make it a
# string.
if article_type is int:
article_id = str(article_id)
# Otherwise it is an unrecognised format and should be ignored.
else:
article_id = None
# From the string value of the article_id determine if it is a local file.
# Local ids are prepended by an 'L'.
if article_id is not None:
if article_id[0] == 'L':
# Get article from local articles
article = self.main_window.local_articles[article_id]
else:
# Get article from figshare articles
article = self.main_window.articles[article_id]
# generate a qtreewidgetitem from the article and headers list.
article.gen_qtree_item(headers)
# Add qtreeitem as top level item in the tree.
self.article_tree.addTopLevelItem(article.qtreeitem)
# Format the Qtreewidget
for column in range(len(headers)):
# Set the width of the columns to the data.
self.article_tree.resizeColumnToContents(column)
# sort the list by local or figshare.
self.article_tree.sortItems(0, Qt.AscendingOrder)
def upload_layout(self):
sizepolicy = QSizePolicy()
sizepolicy.setVerticalPolicy(QSizePolicy.Expanding)
sizepolicy.setVerticalPolicy(QSizePolicy.Preferred)
btn_upload = QPushButton()
btn_upload.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/figshare_upload.png')))
btn_upload.setSizePolicy(sizepolicy)
btn_upload.pressed.connect(self.upload_selection)
self.upload_btn = btn_upload
layout = QVBoxLayout()
layout.addWidget(self.upload_btn)
return layout
def upload_selection(self):
header_item = self.article_tree.headerItem()
for column in range(header_item.columnCount()):
if header_item.data(column, 0) == 'id':
id_element = column
break
tree_items = self.article_tree.selectedItems()
article_ids = []
for item in tree_items:
article_ids.append(item.data(id_element, 0))
if tree_items == []:
reply = QMessageBox.question(self, 'Message', "Upload all files?",
QMessageBox.Yes, QMessageBox.No)
if reply == QMessageBox.Yes:
article_ids = []
for row in range(self.article_tree.topLevelItemCount()):
article_item = self.article_tree.topLevelItem(row)
article_ids.append(article_item.data(id_element, 0))
else:
article_ids = None
if article_ids != None:
upload_type, upload_id = self.projects_or_collections_upload()
if upload_type == 'project':
# Can only upload new files to a project. Cannot move figshare_articles between existing projects.
#Iterate through the selected articles.
for article in article_ids:
# Check that the article is a local file.
if article[0] == 'L':
self.upload_to_project(article, upload_id)
else:
pass
elif upload_type == 'collection':
# Can only add exsiting figshare_articles to a collection.
pass
for i in range(2):
self.main_window.centralWidget().projects_window.projects_info_window.on_show_articles_pressed()
def projects_or_collections_upload(self):
if self.main_window.centralWidget().projects_open:
projects_window = self.main_window.centralWidget().projects_window
if projects_window.projects_info_open:
projects_info_window = projects_window.projects_info_window
if projects_info_window.articles_window_open:
return 'project', projects_info_window.project_id
else:
return 'collection', 'collection_id'
def upload_to_project(self, local_article_id, project_id):
"""
Uploads a local file to a given figshare project.
:param local_article_id: str. ID of local file to be uploaded.
:param project_id: int. Project ID for file to be uploaded to.
:return:
"""
# Get the local article
local_article = self.main_window.local_articles[local_article_id]
# Generate the upload dictionary.
upload_dict = local_article.get_upload_dict()
local_file_location = local_article.figshare_desktop_metadata['location']
# Upload file to project.
projects = Projects(self.token)
try:
figshare_article_id = projects.create_article(project_id, upload_dict)
projects.upload_file(figshare_article_id, local_file_location)
except FileExistsError as err:
print(err)
except HTTPError as err:
print(err)
def activate_project_article_selection_btn(self):
if self.main_window.centralWidget().projects_open:
if self.main_window.centralWidget().projects_window.projects_info_open:
if self.main_window.centralWidget().projects_window.projects_info_window.articles_window_open:
window = self.main_window.centralWidget().projects_window.projects_info_window
window.articles_window.btn_selection.setEnabled(True)
def deactivate_project_article_selection_btn(self):
if self.main_window.centralWidget().projects_open:
if self.main_window.centralWidget().projects_window.projects_info_open:
if self.main_window.centralWidget().projects_window.projects_info_window.articles_window_open:
window = self.main_window.centralWidget().projects_window.projects_info_window
window.articles_window.btn_selection.setEnabled(False)
def activate_data_save_btn(self):
if self.main_window.centralWidget().data_open:
if self.main_window.centralWidget().data_window.local_article_edit_window_open:
window = self.main_window.centralWidget().data_window.local_metadata_window
window.btn_save.setEnabled(True)
def deactivate_data_save_btn(self):
if self.main_window.centralWidget().data_open:
if self.main_window.centralWidget().data_window.local_article_edit_window_open:
window = self.main_window.centralWidget().data_window.local_metadata_window
window.btn_save.setEnabled(False)
<file_sep>/formatting/formatting.py
"""
"""
import os
from PyQt5.QtWidgets import (QWidget, QPushButton, QToolTip, QMessageBox, QMainWindow,
QAction, qApp, QHBoxLayout, QVBoxLayout, QSizePolicy, QShortcut)
from PyQt5.QtGui import (QIcon, QFont, QKeySequence)
from PyQt5.QtCore import (Qt)
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
def scaling_ratio(app):
"""
Determines a scaling ratio for window and font sizes
:param app: QApplication object
:return: nothing
"""
# Reference Sizes
ref_dpi = 144.
ref_height = 1020
ref_width = 1920
# Current Device Sizes
dpi = app.primaryScreen().logicalDotsPerInch()
geom = app.primaryScreen().availableGeometry()
height = geom.height()
width = geom.width()
# Size Ratios
m_ratio = min(height/ref_height, width/ref_width)
m_ratiofont = min(height*ref_dpi/(dpi*ref_height),
width*ref_dpi/(dpi*ref_width))
# Check to see if the ratio is too small
if m_ratio < 1:
m_ratio = 1
if m_ratiofont < 1:
m_ratiofont = 1
return m_ratio, m_ratiofont
def button_font(app):
"""
Returns a QFont object for buttons
:param app: QApplication object
:return: QFont object
"""
# Gets window and font ratios
screen_ratio, font_ratio = scaling_ratio(app)
# Reference font size
ref_fontsize = 17
# Scale font size
fontsize = font_ratio * ref_fontsize
# Create and modify QFont object
font = QFont('SansSerif')
font.setBold(True)
font.setPointSize(fontsize)
return font
def title_font(app):
"""
Returns a QFont object for buttons
:param app: QApplication object
:return: QFont object
"""
# Gets window and font ratios
screen_ratio, font_ratio = scaling_ratio(app)
# Reference font size
ref_fontsize = 12
# Scale font size
fontsize = font_ratio * ref_fontsize
# Create and modify QFont object
font = QFont('SansSerif')
font.setBold(True)
font.setPointSize(fontsize)
return font
def label_font(app):
"""
Returns a QFont object for buttons
:param app: QApplication object
:return: QFont object
"""
# Gets window and font ratios
screen_ratio, font_ratio = scaling_ratio(app)
# Reference font size
ref_fontsize = 9
# Scale font size
fontsize = font_ratio * ref_fontsize
# Create and modify QFont object
font = QFont('SansSerif')
font.setBold(True)
font.setPointSize(fontsize)
return font
def edit_font(app):
"""
Returns a QFont object for buttons
:param app: QApplication object
:return: QFont object
"""
# Gets window and font ratios
screen_ratio, font_ratio = scaling_ratio(app)
# Reference font size
ref_fontsize = 10
# Scale font size
fontsize = font_ratio * ref_fontsize
# Create and modify QFont object
font = QFont('SansSerif')
font.setBold(False)
font.setPointSize(fontsize)
return font
def log_font(app):
"""
Returns a QFont object for buttons
:param app: QApplication object
:return: QFont object
"""
# Gets window and font ratios
screen_ratio, font_ratio = scaling_ratio(app)
# Reference font size
ref_fontsize = 8
# Scale font size
fontsize = font_ratio * ref_fontsize
# Create and modify QFont object
font = QFont('SansSerif')
font.setBold(False)
font.setPointSize(fontsize)
return font
def search_font(app):
"""
Returns a QFont object for search bars
:param app: QApplication object
:return: QFont object
"""
# Gets window and font ratios
screen_ratio, font_ratio = scaling_ratio(app)
# Reference font size
ref_fontsize = 11
# Scale font size
fontsize = font_ratio * ref_fontsize
# Create and modify QFont object
font = QFont('SansSerif')
font.setBold(False)
font.setPointSize(fontsize)
return font
def combo_font(app):
"""
Returns a QFont object for combo boxes
:param app: QApplication object
:return: QFont object
"""
# Gets window and font ratios
screen_ratio, font_ratio = scaling_ratio(app)
# Reference font size
ref_fontsize = 7
# Scale font size
fontsize = font_ratio * ref_fontsize
# Create and modify QFont object
font = QFont('SansSerif')
font.setBold(False)
font.setPointSize(fontsize)
return font
def checkable_button(app, button):
"""
Formats a QPushButton to be checkable
:param app: QApplication object
:param button: QPushButton
:return:
"""
button.setCheckable(True)
button.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
button.setFont(button_font(app))
def press_button(app, button):
"""
Formats a QPushButton
:param app: QApplication object
:param button: QPushButton
:return:
"""
size_policy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Expanding)
size_policy.setHorizontalStretch(0)
button.setSizePolicy(size_policy)
button.setFont(button_font(app))
def search_bar(app, lineedit):
"""
Formats a QLineEdit object for with a search bar style
:param app: QApplication object
:param lineedit: QLineEdit object to be styled
:return:
"""
lineedit.sizeHint()
lineedit.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
lineedit.setFont(search_font(app))
def grid_title(app, label):
"""
Formats a QLabel
:param app: QApplication
:param label: QLabel
:return:
"""
size_policy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
label.setSizePolicy(size_policy)
label.setFont(label_font(app))
def grid_label(app, label):
"""
Formats a QLabel
:param app: QApplication
:param label: QLabel
:return:
"""
size_policy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
label.setSizePolicy(size_policy)
label.setFont(label_font(app))
def grid_edit(app, edit):
"""
Formats a QLineEdit
:param app: QApplication
:param edit: QLineEdit, or QTextEdit
:return:
"""
size_policy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
edit.setSizePolicy(size_policy)
edit.setFont(edit_font(app))
def log_edit(app, edit):
"""
Formats a QTextEdit for use as a log window
:param app: QApplication
:param edit: QTextEdit or QLineEdit
:return:
"""
size_policy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
edit.setSizePolicy(size_policy)
edit.setFont(log_font(app))
def search_combo(app, combo):
"""
Formats a QComboBox being used as a search field setter
:param app: QApplication
:param combo: QComboBox
:return:
"""
size_policy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
combo.setSizePolicy(size_policy)
combo.setFont(combo_font(app))
<file_sep>/article_edit_window/local_metadata_window.py
"""
"""
# PyQt Imports
from PyQt5.QtWidgets import (QMdiSubWindow, QScrollArea,
QWidget, QGridLayout)
# Figshare Desktop Imports
from Figshare_desktop.article_edit_window.article_edit_window import ArticleEditWindow
# Figshare API Imports
from figshare_interface.http_requests.figshare_requests import issue_request
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class LocalMetadataWindow(ArticleEditWindow):
def __init__(self, app, OAuth_token, parent, article_ids):
super(QMdiSubWindow, self).__init__()
self.app = app
self.token = OAuth_token
self.parent = parent
self.article_ids = article_ids
self.initFig()
self.initUI()
def initFig(self):
"""
Initialises the figshare data
:return:
"""
n_articles = len(self.article_ids)
# For more than one files
if n_articles > 1:
# First generate an empty dictionary for the default figshare metadata
figshare_metadata = {}
article = self.parent.local_articles[self.article_ids[0]]
for d in article.input_dicts()[0:1]:
figshare_metadata = {**figshare_metadata, **d}
self.figshare_metadata = dict.fromkeys(figshare_metadata)
# Get the type of the first article
article = self.parent.local_articles[self.article_ids[0]]
initial_type = article.get_type()
# Initially set all files as the same type
self.same_type = True
for article_id, article in self.parent.local_articles.items():
article_type = article.get_type()
if article_type != initial_type:
self.same_type = False
break
# Set the file specific metdata dictionary as None by default
self.file_metadata = None
# If all files are of the same kind then generate an empty dictionary from the file specific metadata
if self.same_type:
if len(article.input_dicts()) > 2:
file_dict = {}
for d in article.input_dicts()[2:]:
file_dict = {**file_dict, **d}
self.file_metadata = dict.fromkeys(file_dict)
# For a Single Article
else:
# First get dictionary for the default figshare metadata
self.figshare_metadata = self.parent.local_articles[self.article_ids[0]].figshare_metadata
article = self.parent.local_articles[self.article_ids[0]]
# Set the dictionary of file specific metadata keys and values
self.file_metadata = None
if len(article.input_dicts()) > 2:
file_dict = {}
for d in article.input_dicts()[2:]:
file_dict = {**file_dict, **d}
self.file_metadata = file_dict
# Metadata Dictionaries
allowed_cats = issue_request(method='GET', endpoint='categories', token=self.token)
self.cat_dict = {}
for cat in allowed_cats:
self.cat_dict[cat['id']] = cat['title']
self.defined_type_dict = {'': 0, 'figure': 1, 'media': 2, 'dataset': 3, 'fileset': 4, 'poster': 5,
'paper': 6,
'presentation': 7, 'thesis': 8, 'code': 9, 'metadata': 10}
self.license_dict = {0: '', 1: 'CC BY', 2: 'CC-0', 3: 'MIT', 4: 'GPL', 5: 'GPL-2.0', 6: 'GPL-3.0',
7: 'Apache-2.0'}
#####
# Window Formatting
#####
#####
# Window Widgets
#####
def init_figshare_metadata_tab(self):
"""
Creates a QWidget for the default Figshare metadata
:return:
"""
# Create widget object to fill with metadata
tab = QScrollArea()
scroll_wid = QWidget()
# Create metadata labels and fields
title_lbl, title_edit = self.create_lineedit('Title', self.figshare_metadata['title'])
if len(self.article_ids) > 1:
title_edit.setEnabled(False)
title_edit.clear()
title_edit.setPlaceholderText('Files will retain their individual titles')
descr_lbl, descr_edit = self.create_textedit('Description', self.figshare_metadata['description'])
ref_lbl, ref_field = self.create_buttonfield('References', self.figshare_metadata['references'])
tags_lbl, tags_field = self.create_buttonfield('Tags', self.figshare_metadata['tags'])
cat_lbl, cat_field = self.create_categories_field('Categories', self.figshare_metadata['categories'])
auth_lbl, auth_field = self.create_author_field('Authors', self.figshare_metadata['authors'])
def_lbl, def_combo = self.create_combo('Defined Type', self.defined_type_dict,
self.figshare_metadata['defined_type'])
fund_lbl, fund_field = self.create_buttonfield('Funding', self.figshare_metadata['funding'])
lic_lbl, lic_combo = self.create_combo('License', self.license_dict, self.figshare_metadata['license'])
# Create layout
grid = QGridLayout()
# Add widgets to layout
grid.addWidget(title_lbl, 0, 0)
grid.addWidget(title_edit, 0, 1)
grid.addWidget(descr_lbl, 1, 0)
grid.addWidget(descr_edit, 1, 1)
grid.addWidget(ref_lbl, 2, 0)
grid.addWidget(ref_field, 2, 1)
grid.addWidget(tags_lbl, 3, 0)
grid.addWidget(tags_field, 3, 1)
grid.addWidget(cat_lbl, 4, 0)
grid.addWidget(cat_field, 4, 1)
grid.addWidget(auth_lbl, 5, 0)
grid.addWidget(auth_field, 5, 1)
grid.addWidget(def_lbl, 6, 0)
grid.addWidget(def_combo, 6, 1)
grid.addWidget(fund_lbl, 7, 0)
grid.addWidget(fund_field, 7, 1)
grid.addWidget(lic_lbl, 8, 0)
grid.addWidget(lic_combo, 8, 1)
scroll_wid.setLayout(grid)
tab.setWidget(scroll_wid)
return tab
def init_filespecific_metadata_tab(self):
"""
Creates a QTabWidget to add to the article edit window
:return:
"""
# Get the first article from the article is list
article = self.parent.local_articles['local_0']
# Check to see if the article is a known file format
if self.file_metadata is not None:
# Create widget object to fill with metadata
tab = QScrollArea()
scroll_wid = QWidget()
grid = QGridLayout()
row_number = 0
for key, value in self.file_metadata.items():
value = str(value)
lbl, edit = self.create_lineedit(key, value)
grid.addWidget(lbl, row_number, 0)
grid.addWidget(edit, row_number, 1)
row_number += 1
scroll_wid.setLayout(grid)
tab.setWidget(scroll_wid)
return tab
#####
# Widget Actions
#####
def on_exit_pressed(self):
"""
overrides parent
:return:
"""
# Close the article edit window
self.parent.open_windows.remove('local_article_edit_window')
self.parent.local_article_edit_window.close()
# Open the local articles window
self.parent.section_window.open_data_articles_window()
article_tree = self.parent.data_articles_window.article_tree
article_tree.article_ids = set(self.parent.local_articles.keys())
article_tree.fill_tree(article_tree.tree_headers, article_tree.article_ids)
article_tree.enable_fields()
self.parent.data_articles_window.edit_btn.setEnabled(True)
#####
# Figshare Actions
#####
def update_article_figshare_metadata(self, local_article_id: str):
"""
:param local_article_id: String containing the local article id, 'local_#'
:return:
"""
# Get the current/old figshare metadata
article = self.parent.local_articles[local_article_id]
old_figshare_metadata = article.figshare_metadata
# Get the new/edited figshare metadata
new_figshare_metadata = {}
figshare_grid = self.figshare_tab.widget().layout()
# Title
title = figshare_grid.itemAtPosition(0, 1).widget().text()
new_figshare_metadata['title'] = title
# Description
description = figshare_grid.itemAtPosition(1, 1).widget().toPlainText()
new_figshare_metadata['description'] = description
# References
references = figshare_grid.itemAtPosition(2, 1).widget().get_tags()
new_figshare_metadata['references'] = references
# Tags
tags = figshare_grid.itemAtPosition(3, 1).widget().get_tags()
new_figshare_metadata['tags'] = tags
# Categories
cat_list = figshare_grid.itemAtPosition(4, 1).widget().get_tags()
new_figshare_metadata['categories'] = cat_list
# Authors
auth_list = figshare_grid.itemAtPosition(5, 1).widget().get_tags()
new_figshare_metadata['authors'] = auth_list
# Defined Type
defined_type = figshare_grid.itemAtPosition(6, 1).widget().currentText()
new_figshare_metadata['defined_type'] = defined_type
# Funding
fund_tags = figshare_grid.itemAtPosition(7, 1).widget().get_tags()
funding = ''
for tag in fund_tags:
funding += tag + ':_:'
new_figshare_metadata['funding'] = funding
# License
license = figshare_grid.itemAtPosition(8, 1).widget().currentIndex()
license = str(license)
new_figshare_metadata['license'] = license
# Create an empty dictionary to add updates/edits
update_dict = {}
# Check for changes
for key, value in new_figshare_metadata.items():
if value != 'None' and value is not None and value != '' and value != []:
if value != old_figshare_metadata[key]:
update_dict[key] = value
# Update the local article
article.update_info(update_dict)
# If there is no file specific metadata then update the Whoosh index document now
if self.file_metadata is None:
self.update_document(local_article_id)
def update_article_file_metadata(self, local_article_id: str):
"""
overrides parent
:return:
"""
# Get the current/old file specific metadata
article = self.parent.local_articles[local_article_id]
old_file_dicts = article.input_dicts()[2:]
old_file_metadata = {}
for d in old_file_dicts:
for key, value in d.items():
old_file_metadata[key] = value
# Get the new/edited figshare metadata
new_file_metadata = {}
file_grid = self.filespecific_tab.widget().layout()
# Get the number of rows in the grid layout
n_rows = file_grid.rowCount()
# Get the new file metadata
for row in range(n_rows):
lbl = file_grid.itemAtPosition(row, 0).widget().text()
edit = file_grid.itemAtPosition(row, 1).widget().text()
new_file_metadata[lbl] = edit
# Check for changes
update_dict = {}
for key, value in new_file_metadata.items():
if value != 'None':
if value != old_file_metadata[key]:
update_dict[key] = value
# Update local version of article
article.update_info(update_dict)
# Update the Whoosh index document
self.update_document(local_article_id)
def update_document(self, article_id):
"""
Updates the Whoosh Index document of the given article
:param article_id:
:return:
"""
update_dict = {}
for d in self.parent.local_articles[article_id].input_dicts():
update_dict = {**update_dict, **d}
self.parent.local_article_index.updateDocument('local_articles', update_dict)
<file_sep>/data_window/figshare_add_window.py
"""
"""
from PyQt5.QtWidgets import (QWidget, QLabel, QPushButton, QLineEdit, QMessageBox, QFileDialog, QMdiSubWindow,
QTextEdit, QGridLayout, QHBoxLayout, QVBoxLayout, QSizePolicy, QFrame)
from PyQt5.QtGui import (QIcon, QFont, QPalette, QColor)
from PyQt5.QtCore import (Qt, pyqtSlot)
from Figshare_desktop.formatting.formatting import (press_button)
from Figshare_desktop.data_window.figshare_add_article_list import ArticleList
from Figshare_desktop.data_window.figshare_projects_button import ProjectButton
from Figshare_desktop.data_window.figshare_collections_button import CollectionButton
from Figshare_desktop.data_window.upload_control_widget import UploadControl
from Figshare_desktop.data_window.figshare_upload_log import UploadLog
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class FigshareAddWindow(QMdiSubWindow):
def __init__(self, app, OAuth_token, parent):
super().__init__()
self.app = app
self.token = OAuth_token
self.parent = parent
self.initFig()
self.initUI()
def initFig(self):
self.upload_project = None
self.upload_collection = None
def initUI(self):
"""
Initiates the user interface
:return:
"""
self.format_window()
# Create a layout to hold all widgets
hbox = QHBoxLayout()
# Create the article list widget
self.upload_queue = self.create_article_list()
hbox.addWidget(self.upload_queue)
hbox.addWidget(self.create_project_btn())
hbox.addWidget(self.create_collections_btn())
hbox.addWidget(self.create_control_btns())
hbox.addWidget(self.create_log())
# Create a central widget for the local data window
window_widget = QWidget()
# Add the vertical box layout
window_widget.setLayout(hbox)
# Set the projects window widget
self.setWidget(window_widget)
def format_window(self):
"""
Form the local data window
:return:
"""
# Gets the QRect of the main window
geom = self.parent.geometry()
# Gets the Qrect of the sections window
section_geom = self.parent.section_geom
# Define geometries for the projects window
x0 = section_geom.x() + section_geom.width()
y0 = section_geom.y()
w = geom.width() - x0
h = ((geom.height() - y0) / 3)
self.setGeometry(x0, y0, w, h)
# Remove frame from projects window
self.setWindowFlags(Qt.FramelessWindowHint)
#####
# Window Widgets
#####
def create_article_list(self):
"""
Creates a QTreeWidget that hold articles until they have been added to figshare
:return: QHBoxLayout
"""
article_list = ArticleList(self.app, self.token, self.parent)
return article_list
def create_project_btn(self):
"""
Creates a QPushButton that prompts the user to choose the figshare project to which to add the articles
:return: QPushButton
"""
project_btn = ProjectButton(self.app, self.token, self.parent)
return project_btn
def create_collections_btn(self):
"""
Creates a QPushButton that prompts the user to choose a collection to directly add articles to
:return: QPushButton
"""
collection_btn = CollectionButton(self.app, self.token, self.parent)
return collection_btn
def create_control_btns(self):
"""
Creates a Widget with buttons to control the upload process
:return: QWidget
"""
self.control_widget = UploadControl(self.app, self.token, self.parent)
return self.control_widget
def create_log(self):
"""
Creates a QTextEdit that logs the interaction with Figshare
:return: QTextEdit
"""
self.upload_log = UploadLog(self.app, self.token, self.parent)
return self.upload_log
<file_sep>/projects_windows/projects_window.py
"""
"""
import os
import math
from PyQt5.QtWidgets import (QMdiSubWindow, QLabel, QPushButton, QMessageBox, QMainWindow,
QWidget, QLineEdit, QHBoxLayout, QVBoxLayout, QSizePolicy, QScrollBar)
from PyQt5.QtGui import (QIcon, QFont, QPalette, QColor)
from PyQt5.QtCore import (Qt, QObject)
from ..formatting.formatting import (scaling_ratio, checkable_button, search_bar)
from Figshare_desktop.projects_windows.new_project_window import NewProjectWindow
from Figshare_desktop.projects_windows.project_info_window import ProjectInfoWindow
from figshare_interface import Projects
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class ProjectsWindow(QMdiSubWindow):
def __init__(self, app, OAuth_token, parent):
super().__init__()
self.app = app
self.token = OAuth_token
self.parent = parent
self.open_windows = self.parent.open_windows
self.initFig()
self.initUI()
def initFig(self):
"""
Initialize Figshare information
"""
self.project_list = self.get_project_list(self.token)
def initUI(self):
self.format_window()
# Create a horizontal box layout to hold the project buttons
self.project_buttons_box = QHBoxLayout()
# Create a vertical box layout to hold the project window widgets and layouts
self.vbox = QVBoxLayout()
# Add the Projects button to the vertical box layout
init_finish = len(self.project_list)
if init_finish > 4:
init_finish = 4
self.create_project_bar(0, init_finish)
self.vbox.addLayout(self.project_buttons_box)
# Add the scroll bar to the vertical box layout
self.s_bar = self.scroll_bar()
self.vbox.addWidget(self.s_bar)
self.hbox = QHBoxLayout()
temp = QVBoxLayout()
temp.addWidget(self.search_bar())
temp.addLayout(self.management_buttons())
self.hbox.addLayout(temp)
self.hbox.addLayout(self.vbox)
# Create a central widget for the projects window
window_widget = QWidget()
# Add the vertical box layout
window_widget.setLayout(self.hbox)
# Set the projects window widget
self.setWidget(window_widget)
#####
# Window Formatting
#####
def format_window(self):
"""
Formats the Projects window
"""
# Gets the QRect of the main window
geom = self.parent.geometry()
# Gets the Qrect of the sections window
section_geom = self.parent.section_geom
# Define geometries for the projects window
x0 = section_geom.x() + section_geom.width()
y0 = section_geom.y()
w = geom.width() - x0
h = ((geom.height() - y0) / 6)
self.setGeometry(x0, y0, w, h)
# Remove frame from projects window
self.setWindowFlags(Qt.FramelessWindowHint)
#####
# Window Widgets
#####
def scroll_bar(self):
"""
Creates a scroll bar set to the size of the projects list
:return: QScrollBar Object
"""
s_bar = QScrollBar(Qt.Horizontal)
s_bar.setMaximum(len(self.project_list) - 4)
s_bar.sliderMoved.connect(self.slider_val)
s_bar.valueChanged.connect(self.slider_val)
return s_bar
def create_proj_thumb(self, title, published_date, project_id):
"""
Creates a large pushbutton for a project
:param title: string. Project title
:param published_date: string. project published date
:param id: int. figshare project id number
:return: QPushButton object
"""
geom = self.geometry()
# Get the scalig ratios for the current window
w_ratio, f_ratio = scaling_ratio(self.app)
# Scale the font sizes
title_fnt_size = 12 * f_ratio
date_ftn_size = 8 * f_ratio
# Create the title label
title_lbl = QLabel()
title_lbl.setText("{}".format(title))
title_lbl_fnt = QFont('SansSerif', title_fnt_size)
title_lbl_fnt.setBold(True)
title_lbl.setFont(title_lbl_fnt)
title_lbl.setWordWrap(True)
# Create the date label
date_lbl = QLabel()
if published_date is None:
published_date = 'Private'
date_lbl.setText("Published: {}".format(published_date))
date_lbl_fnt = QFont('SansSerif', date_ftn_size)
date_lbl.setFont(date_lbl_fnt)
date_lbl.setStyleSheet('color: gray')
date_lbl.setWordWrap(True)
# Create a layout to hold the labels
lbl_box = QVBoxLayout()
# Add labels to layout
lbl_box.addWidget(title_lbl)
lbl_box.addWidget(date_lbl)
# Create a button for the project
btn = QPushButton(self)
checkable_button(self.app, btn)
btn.setLayout(lbl_box)
btn.clicked[bool].connect(lambda: self.on_project_pressed(project_id))
self.project_buttons_box.addWidget(btn)
def create_project_bar(self, start, finish):
"""
Creates a series of Project push buttons
:param start: start position in projects list
:param finish: finish position in projects list
"""
self.buttons = {}
i = 0
for project_pos in range(start, finish):
title = self.project_list[project_pos]['title']
pub_date = self.project_list[project_pos]['published_date']
project_id = self.project_list[project_pos]['id']
self.create_proj_thumb(title, pub_date, project_id)
self.buttons[project_id] = self.project_buttons_box.itemAt(i).widget()
i += 1
def management_buttons(self):
"""
Creates a layout that holds buttons to be used for creating and deleting projects
:return: QVBoxLayout holding the create, and delete projects buttons
"""
# Create New Project Button
np_btn = QPushButton()
np_btn.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Expanding)
np_btn.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/Folder-48.png')))
np_btn.setToolTip('Create a new Figshare Project')
np_btn.setToolTipDuration(1)
np_btn.pressed.connect(self.on_projects_btn_pressed)
# Create Delete Project Button
del_btn = QPushButton()
del_btn.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Expanding)
del_btn.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/del_folder.png')))
del_btn.setToolTip('Delete Selected Project')
del_btn.setToolTipDuration(1)
del_btn.pressed.connect(self.on_delete_btn_pressed)
# Create layout to hold buttons
hbox = QHBoxLayout()
# Add Buttons to layout
hbox.addWidget(np_btn)
hbox.addWidget(del_btn)
return hbox
def search_bar(self):
"""
Creates a QLineEdit object for the user to enter a search query
:return: Edits the projects list object according to the filter
"""
# Create text box
edit = QLineEdit()
# Set font style
search_bar(self.app, edit)
# Set place holder text
edit.setPlaceholderText('Search')
# Add a clear button to the line edit
edit.setClearButtonEnabled(True)
# Add mouse over text
edit.setToolTip('Search for specific Figshare Projects')
edit.setToolTipDuration(1)
# Connect search function to the return key
edit.returnPressed.connect(lambda: self.search_on_return(edit.text()))
edit.textChanged.connect(lambda: self.search_on_clear(edit.text()))
return edit
#####
# Widget Actions
#####
def slider_val(self):
"""
Called when the projects button slider is changed.
Removes all existing buttons and regenerates from the new position
:return:
"""
while self.project_buttons_box.count():
item = self.project_buttons_box.takeAt(0)
item.widget().deleteLater()
s_bar_pos = self.s_bar.value()
if 1 < len(self.project_list) < 4:
number = len(self.project_list)
else:
number = 4
self.s_bar.setMaximum(len(self.project_list) - number)
self.create_project_bar(s_bar_pos, s_bar_pos + number)
def search_init(self):
"""
Called when the projects search bar is used.
Removes all existing buttons and regenerates from new projects list
:return:
"""
while self.project_buttons_box.count():
item = self.project_buttons_box.takeAt(0)
item.widget().deleteLater()
if 1 <= len(self.project_list) <= 4:
number = len(self.project_list)
else:
number = 4
self.s_bar.setMaximum(len(self.project_list) - number)
self.create_project_bar(0, number)
def search_on_return(self, search_text):
"""
Called when return is pressed in the search bar.
:return:
"""
if search_text != '':
self.project_list = self.search_projects(search_text, self.token)
self.search_init()
def search_on_clear(self, lineedit_text):
"""
Called when the search bar is cleared
:return:
"""
if lineedit_text == '':
self.project_list = self.get_project_list(self.token)
self.slider_val()
def on_projects_btn_pressed(self):
"""
Called when the create new project button is pressed
"""
if 'new_project_window' in self.open_windows:
self.open_windows.remove('new_project_window')
self.parent.new_project_window.close()
else:
self.open_windows.remove('projects_window')
self.close()
if 'project_info_window' in self.open_windows:
self.parent.project_info_window.close()
self.open_windows.remove('project_info_window')
if 'project_articles_window' in self.open_windows:
self.parent.project_articles_window.close()
self.open_windows.remove('project_articles_window')
if 'article_edit_window' in self.open_windows:
self.open_windows.remove('article_edit_window')
self.parent.article_edit_window.close()
self.open_windows.add('new_project_window')
self.parent.new_project_window = NewProjectWindow(self.app, self.token, self.parent)
self.parent.mdi.addSubWindow(self.parent.new_project_window)
self.parent.new_project_window.show()
def on_project_pressed(self, project_id):
"""
Called when a project is clicked.
:return:
"""
# For if there is already a project info window open
if 'project_info_window' in self.open_windows:
# Get the project id number of the current window
open_proj = self.parent.project_info_window.project_id
# For a different project than the currently open project
if open_proj != project_id:
# If the current project is in the current view of project buttons (it may have been scrolled away from)
if open_proj in self.buttons:
# If that button is checked, uncheck it
if self.buttons[open_proj].isChecked():
self.buttons[open_proj].toggle()
# Close the currently open project info window
self.parent.project_info_window.close()
# Create a new project info window for the different project
self.parent.project_info_window = ProjectInfoWindow(self.app, self.token, self.parent, project_id)
# Add it as a sub window to the framing window
self.parent.mdi.addSubWindow(self.parent.project_info_window)
self.parent.project_info_window.show()
# If the current projects button is pressed
else:
# Close the window and remove from the open window list
self.open_windows.remove('project_info_window')
self.parent.project_info_window.close()
# If any sub windows are open close them as well
if 'project_articles_window' in self.open_windows:
self.open_windows.remove('project_articles_window')
self.parent.project_articles_window.close()
if 'article_edit_window' in self.open_windows:
self.open_windows.remove('article_edit_window')
self.parent.article_edit_window.close()
# For when no project info window is open
else:
self.open_windows.add('project_info_window')
self.parent.project_info_window = ProjectInfoWindow(self.app, self.token, self.parent, project_id)
self.parent.mdi.addSubWindow(self.parent.project_info_window)
self.parent.project_info_window.show()
def on_delete_btn_pressed(self):
"""
Called when the project delete button is pressed/
:return:
"""
open_proj = self.parent.project_info_window.project_id
project_title = self.parent.project_info_window.project_info['title']
msg = "Are you sure you want to delete Figshare Project: {}".format(project_title)
msg_box = QMessageBox.question(self, 'Message', msg, QMessageBox.Yes, QMessageBox.No)
if msg_box == QMessageBox.Yes:
successful = self.delete_project(self.token, open_proj)
if successful:
con_reply = QMessageBox.information(self, 'Deletion Confirmation', 'Project successfully deleted',
QMessageBox.Ok)
if con_reply == QMessageBox.Ok:
self.reopen_projects()
else:
self.reopen_projects()
else:
con_reply = QMessageBox.warning(self, 'Deletion Confirmation',
'Unknown error occurred.\n Project may not have been deleted.',
QMessageBox.Ok)
if con_reply == QMessageBox.Ok:
self.reopen_projects()
else:
self.reopen_projects()
def reopen_projects(self):
"""
Called to open and close the projects window.
:return:
"""
for i in range(2):
self.parent.section_window.on_projects_btn_pressed()
#####
# Figshare API Interface Calls
#####
def get_project_list(self, token):
"""
Returns the users private project list
:param token: Figshare OAuth token
:return: array of project
"""
projects = Projects(token)
return projects.get_list()
def search_projects(self, search_text, token):
"""
Returns a list of projects matching the users search criteria
:param search_text: String. Figshare style elastic search string
:param token: Figshare OAuth token
:return:
"""
projects = Projects(token)
result = projects.search(search_text)
if len(result) == 0:
result = projects.get_list()
return result
def delete_project(self, token, project_id):
"""
Deletes the given project from Figshare
:param token:
:param project_id: Int. Figshare project ID number
:return:
"""
projects = Projects(token)
try:
projects.delete(project_id, safe=False) # Suppresses command line requirement for acknowledgement
return True
except:
return False
<file_sep>/projects_windows/articles_window.py
"""Project Articles Window
This window is used to display the existing articles in a Figshare project and allow for the user to manage article
publishing, metadata editing, and allows for the article files to be downloaded locally. To aid with article
classification searching is possible by making use of the Figshare Elastic search API calls. All metadata fields can be
searched explicitly or all field general searches can be performed.
Todo:
* Add a button to allow for articles to be deleted.
"""
# Standard Imports
import os
from requests import HTTPError
# PyQt Imports
from PyQt5.QtWidgets import (QWidget, QApplication, QPushButton, QMainWindow, QMessageBox, QFileDialog, QMdiSubWindow,
QHBoxLayout, QVBoxLayout)
from PyQt5.QtGui import (QIcon)
from PyQt5.QtCore import (Qt)
# Figshare Desktop Imports
from Figshare_desktop.custom_widgets.article_list import ArticleList
from Figshare_desktop.formatting.formatting import (press_button)
from Figshare_desktop.article_edit_window.article_edit_window import ArticleEditWindow
from Figshare_desktop.figshare_articles.determine_type import gen_article
from Figshare_desktop.data_window.search_index import ArticleIndex
# Figshare API Imports
from figshare_interface import (Projects)
from figshare_interface.http_requests.figshare_requests import (download_file)
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class ProjectsArticlesWindow(QMdiSubWindow):
"""
SubWindow of the Projects Section
=================================
This window is used to display a list of articles within a given project and allow for the searching,publishing,
editing, deleting, and downloading of their files.
Searching is performed using the Fighshare elastic search engine.
Editing of article metadata is performed in a separate window that replaces this one.
"""
def __init__(self, app: QApplication, OAuth_token: str, parent: QMainWindow, project_id: int):
"""
Initialise the various components needed to form the articles window.
Args:
app: QApplication object of the current program. Is passed from window to window upon opening.
OAuth_token: Authentication token created at login to allow for interaction with the Figshare API.
parent: Reference to the programs parent window (framming window) where various global variables are kept.
project_id: Integer object, containing the Figshare project ID number for the currently open project.
Returns:
None
"""
super().__init__()
self.app = app
self.token = OAuth_token
self.parent = parent
self.project_id = project_id
self.initFig(self.project_id)
self.initIndex()
self.initUI()
def initFig(self, project_id: int):
"""
Initialises Figshare information for the given project by retrieving the list of articles in the project.
Args:
project_id: Figshare project ID number.
Returns:
None
"""
projects = Projects(self.token)
self.project_info = projects.get_info(project_id)
self.article_list = projects.list_articles(project_id)
def initIndex(self):
"""
Initiates the local search index for Figshare articles in the current project.
Returns:
None
"""
if self.parent.figshare_article_index is None:
# Create the Figshare article index
self.parent.figshare_article_index = ArticleIndex()
# Create the default Figshare metadata schema dictionary
self.parent.figshare_article_index.create_schema('figshare_articles')
self.parent.figshare_article_index.add_ID(schema='figshare_articles', field_name='id', stored=True,
unique=True)
self.parent.figshare_article_index.add_TEXT('figshare_articles', 'title', True)
self.parent.figshare_article_index.add_TEXT('figshare_articles', 'description')
self.parent.figshare_article_index.add_KEYWORD('figshare_articles', 'tags', True)
self.parent.figshare_article_index.add_ID('figshare_articles', 'references')
self.parent.figshare_article_index.add_KEYWORD('figshare_articles', 'categories')
self.parent.figshare_article_index.add_KEYWORD('figshare_articles', 'authors')
self.parent.figshare_article_index.add_ID('figshare_articles', 'defined_type')
self.parent.figshare_article_index.add_TEXT('figshare_articles', 'funding')
self.parent.figshare_article_index.add_ID('figshare_articles', 'license')
self.parent.figshare_article_index.document_types.add('article')
def initUI(self):
"""
Initilizes the window GUI.
Returns:
None
"""
# Encompassing horizontal layout
horizontal_layout = QHBoxLayout()
# Create left vertical layout
left_vertical_layout = QVBoxLayout()
# Create a central vertical layout
central_vertical_layout = QVBoxLayout()
# Add search bar to central layout
# central_vertical_layout.addLayout()
# Add article tree to central layout
self.article_list_widget = ArticleList(self.app, self.token, self.project_id, self.parent)
central_vertical_layout.addWidget(self.article_list_widget)
# Create right vertical layout
right_vertical_layout = QVBoxLayout()
# Add Figsahre command buttons to the right layout
right_vertical_layout.addWidget(self.publish_article_button())
right_vertical_layout.addWidget(self.download_article_button())
right_vertical_layout.addWidget(self.edit_article_button())
right_vertical_layout.addWidget(self.delete_article_button())
# Add left, central, and right layouts to the horizontal layout
horizontal_layout.addLayout(left_vertical_layout)
horizontal_layout.addLayout(central_vertical_layout)
horizontal_layout.addLayout(right_vertical_layout)
self.format_window()
# Create a central widget for the projects window
window_widget = QWidget()
# Add the vertical box layout
window_widget.setLayout(horizontal_layout)
# Set the projects window widget
self.setWidget(window_widget)
# Window Formatting
# =================
def format_window(self):
"""
Sets the window geometry
Returns:
None
"""
# Gets the QRect of the main window
geom = self.parent.geometry()
# Gets the Qrect of the sections window
section_geom = self.parent.section_geom
# Define geometries for the projects window
x0 = section_geom.x() + section_geom.width()
y0 = section_geom.y()
w = geom.width() - x0
h = ((geom.height() - y0) / 3)
self.setGeometry(x0, y0, w, h)
# Remove frame from the window
self.setWindowFlags(Qt.FramelessWindowHint)
# Window Widgets
# ==============
def edit_article_button(self):
"""
Create a QPushButton that is used to open the article edit window for the selected articles.
Returns:
QPushButton Widget connected to the on_edit_article_pressed function
"""
btn = QPushButton()
btn.setIcon(QIcon(os.path.normpath(__file__ + '/../../img/Pencil-52.png')))
press_button(self.app, btn)
btn.setToolTip('Open article edit window')
btn.setToolTipDuration(1000)
btn.pressed.connect(self.on_edit_article_pressed)
return btn
def delete_article_button(self):
"""
Creates a QPushButton that can be used to deleted selected articles.
Returns:
QPushButton Widget connected to the on_delete_article_pressed function
"""
btn = QPushButton()
btn.setIcon(QIcon(os.path.normpath(__file__ + '/../../img/delete.png')))
press_button(self.app, btn)
btn.setToolTip('Delete selected articles')
btn.setToolTipDuration(1000)
btn.pressed.connect(self.on_delete_article_pressed)
return btn
def publish_article_button(self):
"""
Creates a QPushButton that will publish the selected articles.
Returns:
QPushButton: Connected to the on_publish_article_pressed function.
"""
btn = QPushButton()
btn.setIcon(QIcon(os.path.normpath(__file__ + '/../../img/figshare_upload.png')))
press_button(self.app, btn)
btn.setToolTip('Publish selected articles')
btn.setToolTipDuration(1000)
btn.pressed.connect(self.on_publish_article_pressed)
return btn
def download_article_button(self):
"""
Creates a QPushButton that will download the selected articles.
Returns:
QPushButton: Connected to the on_download_article_pressed function.
"""
btn = QPushButton()
btn.setIcon(QIcon(os.path.normpath(__file__ + '/../../img/figshare_download.png')))
press_button(self.app, btn)
btn.setToolTip('Download selected articles')
btn.setToolTipDuration(1000)
btn.pressed.connect(self.on_download_article_pressed)
return btn
# Widgets Actions
# ===============
def on_edit_article_pressed(self):
"""
Called when the edit article button is pressed. Closes the current window and opens the article edit window in
its place.
Returns:
None
"""
if len(self.article_list) > 0:
# Get the list of article id numbers from the selected items in the article list widget
article_ids = list(self.article_list_widget.get_selection())
if len(article_ids) > 0:
# Close the current project ariticles window and remove from the set of open windows
self.parent.open_windows.remove('project_articles_window')
self.parent.project_articles_window.close()
# Create and open the article edit window, adding it to the list of open windows
self.parent.open_windows.add('article_edit_window')
self.parent.article_edit_window = ArticleEditWindow(self.app, self.token, self.parent, self.project_id,
article_ids)
self.parent.mdi.addSubWindow(self.parent.article_edit_window)
self.parent.article_edit_window.show()
def on_delete_article_pressed(self):
"""
Called when the delete articles button is pressed. Will ask for user confirmation, prior to deleting articles.
Returns:
None
"""
# Get a set of the articles currently selected
article_ids = self.article_list_widget.get_selection()
# If there is no selection then do nothing
if article_ids == set():
return
# If there is a selection create a dialog window to ask for deletion confirmation
else:
n_articles = len(article_ids)
msg = "Are you sure you want to permanently DELETE {} articles?".format(n_articles)
reply = QMessageBox.question(self, "Deletion Confirmation", msg, QMessageBox.Yes, QMessageBox.No)
# Upon a reply of Yes call the delete articles function
if reply == QMessageBox.Yes:
all_errors = self.delete_multiple_articles(self.project_id, article_ids)
# If any errors occured create a new dialog to notify the user
if all_errors != []:
msg_box = QMessageBox()
msg_box.setIcon(QMessageBox.Warning)
msg_box.setWindowIcon(QIcon(os.path.normpath(__file__ + '/../../img/figshare_logo.png')))
msg_box.setWindowTitle("Article Delete Errors")
msg_box.setText("Error occurred while trying to delete articles.")
detailed_msg = ""
for err in all_errors:
detailed_msg += err + '\n'
msg_box.setDetailedText(detailed_msg)
msg_box.setStandardButtons(QMessageBox.Ok)
self.delete_msg_box = msg_box
self.delete_msg_box.show()
self.delete_msg_box.buttonClicked.connect(self.reopen_window)
# If no errors occured then create a new dialog to notify the user
else:
msg = "All articles deleted"
reply = QMessageBox.information(self, "Articles Deleted", msg, QMessageBox.Ok)
if reply == QMessageBox.Ok:
self.reopen_window()
else:
self.reopen_window()
def on_publish_article_pressed(self):
"""
Called when the publish article button is pressed. Will ask user confirmation for if they want to make all
selected articles publicly available.
Returns:
None
"""
# Get a set of the articles currently selected
article_ids = self.article_list_widget.get_selection()
# If there is no selection made then select all the articles in the project
if article_ids == set():
article_ids = self.article_list_widget.get_all()
n_article = 'All' # Define the number of articles as All
else:
n_article = len(article_ids) # Define the number of articles to be published
# Ask user for publish confirmation
msg = "Are you sure you want to make {} articles public?".format(n_article)
reply = QMessageBox.question(self, "Publish Confirmation", msg, QMessageBox.Yes, QMessageBox.No)
# If the reply confirmation is Yes then publish the selection
if reply == QMessageBox.Yes:
# Passes the set of article id numbers to the publish
errors = self.publish_articles(article_ids)
if errors is not None:
msg_box = QMessageBox()
msg_box.setIcon(QMessageBox.Warning)
msg_box.setText("Error occurred when publishing.")
detailed_msg = ""
for err in errors:
for arg in err.args:
detailed_msg += arg + '\n'
detailed_msg += str(err.response.content)
detailed_msg += '\n'
msg_box.setDetailedText(detailed_msg)
msg_box.setStandardButtons(QMessageBox.Ok)
self.publish_msg_box = msg_box
self.publish_msg_box.show()
self.publish_msg_box.buttonClicked.connect(self.reopen_window)
else:
msg = "All articles published"
reply = QMessageBox.information(self, "Articles Published", msg, QMessageBox.Ok)
if reply == QMessageBox.Ok:
self.reopen_window()
else:
self.reopen_window()
else:
pass
def reopen_window(self):
"""
Closes and reopens the article window
:return:
"""
for i in range(2):
self.parent.project_info_window.on_articles_pressed()
def on_download_article_pressed(self):
"""
Called when the download article button is pressed.
:return:
"""
# Get the list of article id numbers from the selected items in the article list widget
article_ids = self.article_list_widget.get_selection()
# Ask if all articles are desired if there is no selection
if article_ids == []:
reply = QMessageBox.question(self, "Download Confirmation", "Download All Articles?", QMessageBox.Yes,
QMessageBox.No)
if reply == QMessageBox.Yes:
article_ids = self.article_list_widget.get_all()
else:
article_ids = None
# If there are articles to download
if article_ids is not None:
# Get a list of files from all articles
# keep the file names and the file download urls
downloads = []
for article in article_ids:
file_list = Projects(self.token).list_files(article)
for f in file_list:
if f['name'][-4:] != '.png':
downloads.append([f['name'], f['download_url']])
# Ask the user to choose a download directory
download_dir = str(QFileDialog.getExistingDirectory(self, "Select Directory"))
# An empty list to record any download errors
download_statuses = []
# Download all files in the list
for f in downloads:
local_path = os.path.abspath(download_dir + '/' + f[0])
url = f[1]
status = download_file(url, local_path, self.token)
# If there is an error in any of the downloads record the file name
if status != 200:
download_statuses.append(f[0])
# If there are any errors display a message that lists the affected files
if download_statuses != []:
msg = 'There was an error in downloading the following files.\n'
for f in download_statuses:
msg += f + '\n'
reply = QMessageBox.warning(self, "Download Error", msg, QMessageBox.Ok)
if reply == QMessageBox.Ok:
pass
else:
pass
# Otherwise confirm that the downloads have been successful
else:
reply = QMessageBox.information(self, 'Download Confirmation', "All files downloaded", QMessageBox.Ok)
if reply == QMessageBox.Ok:
pass
else:
pass
def publish_articles(self, article_ids):
"""
Publishes all articles given
:param article_ids: list of int. Figshare article id numbers.
:return:
"""
errors = []
for article in article_ids:
error = self.publish_article(article)
if error is not None:
errors.append(error)
if errors != []:
return errors
else:
return None
def publish_article(self, article_id):
"""
Publishes a single article
:param article_id: int. Figshare article id number
:return:
"""
try:
Projects.publish_article(self.token, article_id)
self.create_local_article(article_id)
return None
except HTTPError as err:
return err
def create_local_article(self, article_id):
"""
Given a Figshare article id number this function will create a local version if one does not already exist
:param figshare_article: Dict. Figshare article returned from Projects.list_articles()
:return:
"""
# Get the article id number and title
article_id = str(article_id) # Convert int to str
article_title = self.parent.figshare_articles[article_id].figshare_metadata['title']
article = gen_article(article_title, self.token, self.project_id, article_id)
self.parent.figshare_articles[article_id] = article
def delete_multiple_articles(self, project_id: int, article_ids: set):
"""
Used to delete multiple articles from a given Figshare project.
Args:
project_id: Figshare project ID number article is within.
article_ids: Figshare article ID number.
Returns:
error_msgs (list of str): List of error messages returned during deletion process.
"""
# Empty list to hold error messages if returned
error_msgs = []
# Go though set and attempt to delete each article
while article_ids:
article_id = article_ids.pop()
err_msg = self.delete_article(project_id, article_id)
if err_msg != '':
error_msgs.append(str(article_id) + ': ' + err_msg)
return error_msgs
def delete_article(self, project_id: int, article_id: int):
"""
Uses the Figshare API Interface package to delete the given article from Figshare.
Args:
project_id: Figshare project ID number, article is within.
article_id: Figshare article ID number.
Returns:
err_msg (str): Either an empty string or contains an error message that occurred during deletion process.
"""
projects = Projects(self.token)
err_msg = projects.article_delete(project_id, article_id)
return err_msg
<file_sep>/custom_widgets/cat_tag_button.py
"""
"""
# PyQt Imports
from PyQt5.QtWidgets import (QWidget, QPushButton)
from PyQt5.QtGui import (QFont, QFontMetrics)
from PyQt5.QtCore import (Qt)
# Figshare Desktop Imports
from Figshare_desktop.custom_widgets.tag_button import QTagButton
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class CategoryTagButton(QTagButton):
def mousePressEvent(self, event):
"""
Overides existing mousepressevent. If a right click occurs the tag is deleted
:param event:
:return:
"""
if event.button() == Qt.RightButton:
cat_id = int(self.toolTip())
self.tag_set.remove(cat_id)
self.deleteLater()
elif event.button() == Qt.LeftButton:
return QWidget.mousePressEvent(self, event)
<file_sep>/abstract_windows/new_object_window.py
"""
New Object Window
This module abstract the creation of a new Figshare object, e.g. a collection or project.
SubClasses should use this class as their parent, but the class itself is not meant to be used directly.
Notes:
The NewObjectWindow Class is to be SubClassed with the following variables and functions redefined.
Functions:
create_object_info_layout()
on_save_pressed()
on_cancel_pressed()
create_object()
"""
# Standard Imports
import os
from requests import HTTPError
# PyQt Imports
from PyQt5.QtWidgets import (QMdiSubWindow, QLabel, QPushButton, QTextEdit, QGridLayout, QMainWindow, QApplication,
QLineEdit, QVBoxLayout, QSizePolicy, QMessageBox, QHBoxLayout, QWidget)
from PyQt5.QtGui import (QIcon)
from PyQt5.QtCore import (Qt)
# Figshare Desktop Imports
from Figshare_desktop.formatting.formatting import (scaling_ratio, press_button, grid_label, label_font, grid_edit)
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class NewObjectWindow(QMdiSubWindow):
"""
An abstract class for creating a window that facilitates the creation of a new Figshare object.
"""
def __init__(self, app: QApplication, OAuth_token: str, parent: QMainWindow):
"""
Initialise the window.
Args:
app: main thread application object
OAuth_token: Figshare authentication token obtained at login.
parent:
"""
super().__init__()
# Create class variables of init args
self.app = app
self.token = OAuth_token
self.parent = parent
# Create shortned path to open windows set
self.open_windows = self.parent.open_windows
# Initialise the UI
self.initUI()
def initUI(self):
"""
Formats, and creates the window.
Returns:
None
"""
# Format the Window to the Primary Screen
self.format_window()
self.hbox = QHBoxLayout()
self.hbox.addLayout(self.create_command_buttons())
self.hbox.addLayout(self.create_object_info_layout())
window_widget = QWidget()
window_widget.setLayout(self.hbox)
self.setWidget(window_widget)
# Window Formatting
# =================
def format_window(self):
"""
Format the current window to the available space in primary screen.
Returns:
None
"""
# Gets the QRect of the main window
geom = self.parent.geometry()
# Gets the Qrect of the sections window
section_geom = self.parent.section_geom
# Define geometries for the projects window
x0 = section_geom.x() + section_geom.width()
y0 = section_geom.y()
w = geom.width() - x0
h = ((geom.height() - y0) / 3)
self.setGeometry(x0, y0, w, h)
# Remove frame from projects window
self.setWindowFlags(Qt.FramelessWindowHint)
# Window Widgets
# ==============
def create_lineedit(self, label: str):
"""
Creates a QLabel and QLineEdit pair.
Args:
label: Name of field to be associated to the line edit.
Returns:
lbl (QLabel): Formatted label widget.
edit (QLineEdit): Formatted line edit widget.
"""
# Create Label
lbl = QLabel(label)
grid_label(self.app, lbl)
# Create LineEdit
edit = QLineEdit()
edit.setClearButtonEnabled(True)
grid_edit(self.app, edit)
return lbl, edit
def create_edit(self, label):
"""
Creates a QLabel and QTextEdit pair.
Args:
label: Name of field to be associated to the text edit.
Returns:
lbl (QLabel): Formatted label widget.
edit (QLineEdit): Formatted text edit widget.
"""
# Create Label
lbl = QLabel(label)
lbl.setFont(label_font(self.app))
lbl.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Preferred)
# Create LineEdit
edit = QTextEdit()
grid_edit(self.app, edit)
return lbl, edit
def create_command_buttons(self):
"""
Create a layout containing two buttons. One to create a new object and the second to cancel.
Returns:
vbox (QVBoxLayout): Layout containing the create and cancel buttons.
"""
# Create save button
sv_btn = QPushButton()
press_button(self.app, sv_btn) # Format button
sv_btn.setIcon(QIcon(os.path.abspath(__file__ + '/../..' + '/img/figshare_upload.png')))
sv_btn.setToolTip('Save new object.')
sv_btn.setToolTipDuration(1000)
sv_btn.pressed.connect(self.on_save_pressed)
# Create cancel button
cl_btn = QPushButton()
press_button(self.app, cl_btn) # Format button
cl_btn.setIcon((QIcon(os.path.abspath(__file__ + '/../..' + '/img/exit.png'))))
cl_btn.setToolTip('Exit without saving.')
cl_btn.setToolTipDuration(1000)
cl_btn.pressed.connect(self.on_cancel_pressed)
# Create Layout
vbox = QVBoxLayout()
vbox.addWidget(sv_btn)
vbox.addWidget(cl_btn)
return vbox
def create_object_info_layout(self):
"""
Creates a layout with label and edit fields for creating a new figshare object.
MUST BE OVERWRITTEN BY CHILDREN
Examples:
Example of code is given for creating a new project info layout.
# Title
title_lbl, self.title_field = self.create_lineedit('Title')
self.title_field.setPlaceholderText('Enter Project Title Here.')
# Description
description_lbl, self.description_field = self.create_edit('Description')
self.description_field.setPlaceholderText('Enter meaningful project description here.')
# Funding
funding_lbl, self.funding_field = self.create_lineedit('Funding')
self.funding_field = QButtonField()
# Group
group_lbl, self.group_field = self.create_lineedit('Group ID')
self.group_field.setText(str(self.get_group())) # Auto fill with the users group id
# Create Layout
grid = QGridLayout()
grid.addWidget(title_lbl, 0, 0, Qt.AlignLeft)
grid.addWidget(self.title_field, 0, 1)
grid.addWidget(description_lbl, 1, 0, Qt.AlignLeft)
grid.addWidget(self.description_field, 1, 1)
grid.addWidget(funding_lbl, 2, 0, Qt.AlignLeft)
grid.addWidget(self.funding_field, 2, 1)
grid.addWidget(group_lbl, 3, 0, Qt.AlignLeft)
grid.addWidget(self.group_field, 3, 1)
grid.setColumnStretch(1, 3)
return grid
Returns:
grid (QGridLayout): grid layout containing the different info fields and labels.
"""
pass
# Widget Actions
# ==============
def on_save_pressed(self):
"""
Called when the save button is pressed. Will upload the new object to Figshare.
MUST BE OVERWRITTEN BY CHILDREN
Examples:
Example of code is given for creating a new project info layout.
title = self.title_field.text()
description = self.description_field.toPlainText()
funding = self.funding_field.get_tags()
fund_text = ''
for fund in funding:
fund_text += ':_:{}'.format(fund)
try:
group_id = self.group_field.text()
group_id = int(group_id)
available_groups = [i['id'] for i in Groups(self.token).get_list()]
if group_id not in available_groups:
raise ValueError('Not a valid group id.')
else:
project_info = self.create_project(title, description, fund_text, group_id)
msgBox = QMessageBox()
msgBox.setIcon(QMessageBox.Information)
msgBox.setText("New Project Created\n{}".format(project_info['title']))
msgBox.setStandardButtons(QMessageBox.Ok)
msgBox.buttonClicked.connect(lambda: self.on_msgbtn_pressed(msgBox, exit_parent=True))
msgBox.show()
except ValueError as err:
err_args = err.args
msgBox = QMessageBox()
msgBox.setIcon(QMessageBox.Critical)
msgBox.setText(err_args[0])
msgBox.setStandardButtons(QMessageBox.Ok)
msgBox.buttonClicked.connect(lambda: self.on_msgbtn_pressed(msgBox))
msgBox.show()
except TypeError as err:
err_args = err.args
msgBox = QMessageBox()
msgBox.setIcon(QMessageBox.Critical)
msgBox.setText(err_args[0])
msgBox.setStandardButtons(QMessageBox.Ok)
msgBox.buttonClicked.connect(lambda: self.on_msgbtn_pressed(msgBox))
msgBox.show()
except HTTPError as err:
err_args = err.args
msgBox = QMessageBox()
msgBox.setIcon(QMessageBox.Critical)
msgBox.setText(err_args[0])
msgBox.setStandardButtons(QMessageBox.Ok)
msgBox.buttonClicked.connect(lambda: self.on_msgbtn_pressed(msgBox))
msgBox.show()
Returns:
None
Raises:
ValueError:
TypeError:
HTTPError:
"""
pass
def on_cancel_pressed(self):
"""
Called when the cancel button is pressed. Will return to the objects window without creating a new object.
Examples:
Example of code is given for creating a new project info layout.
self.open_windows.remove('new_project_window')
self.close()
self.parent.section_window.on_projects_btn_pressed()
Returns:
None
"""
pass
def on_msgbtn_pressed(self, box: QMessageBox, exit_parent=False):
"""
Called when an error message button is pressed.
Args:
box: Error message box created by error in save process.
exit_parent: Should the new object window be closed.
Returns:
None
"""
box.close()
if exit_parent:
self.on_cancel_pressed()
# Figshare API Functions
# ======================
def create_object(self, info_dict: dict):
"""
Creates a new figshare object from the information dictionary passed to the function.
MUST BE OVERWRITTEN BY CHILDREN
Examples:
Example of code is given for creating a new project info layout.
projects = Projects(self.token)
required_fields = ['title', 'description', 'funding', 'group_id']
for field in required_fields:
if field not in info_dict:
return
object_info = projects.create(**info_dict)
return project_info
Args:
info_dict:
Returns:
object_info (dict): Dictionary containing information on the newly created object.
"""
pass
<file_sep>/figshare_articles/stm_articles/spectroscopy_article.py
"""
"""
from figshare_interface.figshare_structures.projects import Projects
from ..article import Article
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class SpecArticle(Article):
def __init__(self, OAuth_token, project_id, article_id):
# Initialize STM topography metadata dictionary.
self.stm_spec_metadata = {'type': None,
'vgap': None,
'current': None,
'vres': None,
'vinc': None,
'vreal': None,
'vstart': None,
'unitv': None,
'unit': None,
'date': None,
'direction': None,
'sample': None,
'users': None,
'substrate': None,
'adsorbate': None,
'prep': None,
'notebook': None,
'notes': None,
'vmod': None,
'vsen': None,
'freq': None,
'tmeas': None,
'phase': None,
'harm': None
}
super().__init__(OAuth_token, project_id, article_id)
def gen_stm_spec_metadata(self, input_dict):
"""
Fill values in the stm_topo_metadata dict from an input dictionary.
:param input_dict: dict. Only extracts values from keys in both stm_topo_metadata and input_dict dictionaries.
:return:
"""
for key in input_dict:
if key in self.stm_spec_metadata:
if input_dict[key] != 'None' and input_dict[key] is not None:
self.stm_spec_metadata[key] = input_dict[key]
def fill_info(self):
"""
Fill in the metadata dictionaries.
:return:
"""
project = Projects(self.token)
basic_info = project.get_article(self.project_id, self.article_id)
stm_top_info = self.recreate_custom_fields(basic_info['custom_fields'])
self.gen_figshare_metadata(basic_info)
self.gen_stm_spec_metadata(stm_top_info)
self.check_basic()
def update_info(self, input_dict):
self.gen_figshare_metadata(input_dict)
self.gen_stm_spec_metadata(input_dict)
self.check_basic()
def input_dicts(self):
return [self.figshare_metadata, self.figshare_desktop_metadata, self.stm_spec_metadata]
def check_file_specific(self):
pass
def get_upload_dict(self):
"""
Takes the different metadata dictionaries and ensures that their contents are of for upload to figshare.
:return:
"""
self.check_basic()
upload_dict = {}
for key, value in self.figshare_metadata.items():
if value is not None:
upload_dict[key] = value
upload_dict['custom_fields'] = {}
for key, value in self.stm_spec_metadata.items():
if value is not None:
upload_dict['custom_fields'][key] = value
return upload_dict
def get_type(self):
return 'stm_spec'
def index_schema(self):
"""
Creates a dictionary to create a Whoosh index schema from
:return:
"""
schema_dict = {'type': ('text', True),
'vgap': ('numeric', True),
'current': ('numeric', True),
'vres': ('numeric', True),
'vinc': ('numeric', True),
'vreal': ('numeric', True),
'vstart': ('numeric', True),
'unitv': ('text', True),
'unit': ('text', True),
'date': ('text', True),
'direction': ('text', True),
'sample': ('text', True),
'users': ('keyword', True),
'substrate': ('text', True),
'adsorbate': ('text', True),
'prep': ('text', True),
'notebook': ('keyword', True),
'notes': ('text', True),
'vmod': ('numeric', True),
'vsen': ('numeric', True),
'freq': ('numeric', True),
'tmeas': ('numeric', True),
'phase': ('numeric', True),
'harm': ('numeric', True)
}
return schema_dict
<file_sep>/collections_windows/collections_window.py
"""
Figshare Collections Window
This window presents a view of the collections in a users account and allows for them to be created, and deleted.
Collections can then be opened to examine their contents, an action that creates new child windows.
"""
# Standard Imports
# PyQt Imports
# Figshare Desktop Imports
from Figshare_desktop.abstract_windows.figshare_structure_list import FigshareObjectWindow
from Figshare_desktop.collections_windows.collection_info_window import CollectionInfoWindow
from Figshare_desktop.collections_windows.new_collection_window import NewCollectionWindow
from figshare_interface import Collections
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class CollectionsWindow(FigshareObjectWindow):
"""
Child of the Abstract FigshareObjectWindow with changes to call Collections objects from Figshare.
"""
def on_create_btn_pressed(self):
"""
Called when the create new collection button is pressed.
Returns:
None
"""
if 'new_collection_window' in self.open_windows:
self.open_windows.remove('new_collection_window')
self.parent.new_collection_window.close()
else:
self.open_windows.remove('collections_window')
self.close()
if 'collection_info_window' in self.open_windows:
self.open_windows.remove('collection_info_window')
self.parent.collection_info_window.close()
if 'collection_articles_window' in self.open_windows:
self.open_windows.remove('collection_articles_window')
self.parent.collection_articles_window.close()
if 'article_edit_window' in self.open_windows:
self.open_windows.remove('article_edit_window')
self.parent.article_edit_window.close()
self.open_windows.add('new_collection_window')
self.parent.new_collection_window = NewCollectionWindow(self.app, self.token, self.parent)
self.parent.mdi.addSubWindow(self.parent.new_collection_window)
self.parent.new_collection_window.show()
def is_info_open(self):
"""
Called to see if there is a Figshare collection info window open.
Returns:
open (bool): True, or False dependent on if info window is already open
object_id (int): Figshare collection ID number
"""
if 'collection_info_window' in self.open_windows:
open_obj_id = self.parent.collection_info_window.object_id
return True, open_obj_id
else:
return False, None
def close_object_info_window(self):
"""
Called when the existing object info window needs to be closed.
Returns:
None
"""
self.open_windows.remove('collection_info_window')
self.parent.collection_info_window.close()
if 'collection_articles_window' in self.open_windows:
self.open_windows.remove('collection_articles_window')
self.parent.collection_articles_window.close()
if 'articles_edit_window' in self.open_windows:
self.open_windows.remove('article_edit_window')
self.parent.article_edit_window.close()
def create_new_object_info_window(self, object_id: int):
"""
Called when a new object info window is to be created.
Args:
object_id: Figshare collection ID number
Returns:
None
"""
self.open_windows.add('collection_info_window')
self.parent.collection_info_window = CollectionInfoWindow(self.app, self.token, self.parent, object_id)
self.parent.mdi.addSubWindow(self.parent.collection_info_window)
self.parent.collection_info_window.show()
def reopen_objects(self):
"""
Called to open and close the figshare collection window
Returns:
None
"""
for i in range(2):
self.parent.section_window.on_collection_btn_pressed()
# Figshare API Interface Functions
# ================================
def get_object_list(self):
"""
Called to return a list of Figshare collections associated to the user.
Returns:
object_list (list of dicts): List of users Figshare objects.
"""
collections = Collections(self.token)
object_list = collections.get_list()
return object_list
def search_objects(self, search_text: str):
"""
Gets a list of objects matching the users search query.
Args:
search_text: Figshare style elastic search string
Returns:
result (list of dicts): Gives a list of dictionary objects that either match those of the search criteria,
or returns the full set if no matches found.
"""
collections = Collections(self.token)
result = collections.search(search_text)
if len(result) == 0:
result = collections.get_list()
return result
def delete_object(self, object_id: int):
"""
Called to delete the given figshare object.
Args:
object_id:
Returns:
bool: True of False dependent on if the deletion was successful.
"""
collections = Collections(self.token)
try:
collections.delete(object_id, safe=False) # Suppress command line confirmation
return True
except:
return False
<file_sep>/login_window/login_window.py
"""
"""
import os
import sys
from requests import HTTPError
from PyQt5.QtCore import (QCoreApplication, Qt, QPoint)
from PyQt5.QtWidgets import (QApplication, QWidget, QGridLayout, QLabel, QLineEdit, QPushButton)
from PyQt5.QtGui import (QPixmap, QFont)
from figshare_interface.http_requests.figshare_requests import login_request
from Figshare_desktop.main_window.framing_window import MainWindow
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class LoginWindow(QWidget):
def __init__(self):
super().__init__()
self.initUI()
def initUI(self):
self.grid = QGridLayout()
self.grid.setSpacing(10)
self.grid.setAlignment(Qt.AlignCenter)
self.label_font = QFont('SansSerif', 10)
self.edit_font = QFont('SansSerif', 8)
self.button_font = QFont('SansSerif', 10)
self.placeLogo()
self.placeAccountNameLabel()
self.placeAccountNameEdit()
self.placePasswordLabel()
self.placePasswordEdit()
self.placeQuitButton()
self.placeLoginButton()
self.setLayout(self.grid)
self.formatWindow()
self.accountNameEdit.setFocus()
def formatWindow(self):
screen = app.primaryScreen()
screen_rec = screen.availableGeometry()
screen_center = screen_rec.center()
frame_w = screen_rec.width() / 4
frame_h = screen_rec.height() / 4
self.setGeometry(screen_center.x() - frame_w / 2, screen_center.y() - frame_h / 2
, frame_w, frame_h)
self.setWindowFlags(Qt.FramelessWindowHint)
def mousePressEvent(self, event):
self.oldPos = event.globalPos()
def mouseMoveEvent(self, event):
delta = QPoint(event.globalPos() - self.oldPos)
self.move(self.x() + delta.x(), self.y() + delta.y())
self.oldPos = event.globalPos()
def placeLogo(self):
logo_img = QPixmap(os.path.abspath(__file__ + '/../..' + '/img/full-logo.png'))
logo = QLabel()
logo.setPixmap(logo_img)
logo.setAlignment(Qt.AlignCenter)
self.grid.addWidget(logo, 1, 0, 5 , 1)
def placeAccountNameLabel(self):
accountName = QLabel('Account')
accountName.setFont(self.label_font)
self.grid.addWidget(accountName, 1, 1, 1, 2)
def placeAccountNameEdit(self):
accountNameEdit = QLineEdit()
accountNameEdit.setFont(self.edit_font)
accountNameEdit.returnPressed.connect(self.on_pushLogin_clicked)
self.accountNameEdit = accountNameEdit
self.grid.addWidget(self.accountNameEdit, 2, 1, 1, 2)
def placePasswordLabel(self):
passwordLabel = QLabel('Password')
passwordLabel.setFont(self.label_font)
self.grid.addWidget(passwordLabel, 3, 1, 1, 2)
def placePasswordEdit(self):
self.passwordEdit = QLineEdit()
self.passwordEdit.setFont(self.edit_font)
self.passwordEdit.setEchoMode(QLineEdit.Password)
self.passwordEdit.returnPressed.connect(self.on_pushLogin_clicked)
self.grid.addWidget(self.passwordEdit, 4, 1, 1, 2)
def placeQuitButton(self):
quit_btn = QPushButton('Exit', self)
quit_btn.setFont(self.button_font)
quit_btn.clicked.connect(QCoreApplication.instance().quit)
quit_btn.resize(quit_btn.sizeHint())
self.grid.addWidget(quit_btn, 5, 1)
def placeLoginButton(self):
login_btn = QPushButton('Login', self)
login_btn.setFont(self.button_font)
login_btn.clicked.connect(self.on_pushLogin_clicked)
login_btn.resize(login_btn.sizeHint())
self.grid.addWidget(login_btn, 5, 2)
def on_pushLogin_clicked(self):
username = self.accountNameEdit.text()
password = self.<PASSWORD>Edit.text()
try:
OAuth_token = login_request(username, password)
self.close()
self.window = MainWindow(app, OAuth_token)
self.window.show()
except HTTPError as err:
#reason = err.response.content
self.accountNameEdit.setText("")
self.passwordEdit.setText("")
self.accountNameEdit.setFocus()
if __name__ == '__main__':
app = QApplication(sys.argv)
login_window = LoginWindow()
login_window.show()
sys.exit(app.exec_())
<file_sep>/custom_widgets/categories_field.py
"""
"""
# PyQt Imports
from PyQt5.QtWidgets import (QWidget)
from PyQt5.QtGui import (QFont)
# Figshare Desktop Imports
from Figshare_desktop.custom_widgets.button_field import QButtonField
from Figshare_desktop.custom_widgets.cat_tag_button import CategoryTagButton
from Figshare_desktop.custom_widgets.categories_combo import CategoriesCombo
__author__ = "<NAME>"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = ""
__version__ = "0.0.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
class CategoriesField(QButtonField):
"""
Subclass of the QButton field widget that is customised visualise and return Figshare author metadata.
"""
def __init__(self, id_dict: dict, name_dict: dict, parent=None):
super(QWidget, self).__init__()
if parent is not None:
self.setParent(parent)
self.id_dict = id_dict
self.name_dict = name_dict
self.initUI()
def create_linedit(self):
"""
Creates a formated line edit to create new tags with.
:return:
"""
edit = CategoriesCombo(self.id_dict, self.name_dict)
edit.setToolTip('Press return to add category')
font = QFont('SansSerif', 11)
edit.setFont(font)
edit.lineEdit().returnPressed.connect(lambda: self.on_return_pressed(edit))
edit.setMaximumWidth(self.width * (1 / 4))
return edit
def add_tag(self, cat_lbl):
"""
Adds an author tag button to the frame.
Args:
cat_lbl: category label.
Returns:
"""
add_tag = False
if type(cat_lbl) is dict:
cat_id = cat_lbl['id']
lbl = cat_lbl['title']
add_tag = True
else:
try:
cat_id = int(cat_lbl)
lbl = self.id_dict[cat_id]
add_tag = True
except:
if cat_lbl in self.name_dict:
cat_id = self.name_dict[cat_lbl]
lbl = cat_lbl
add_tag = True
if add_tag:
if cat_id not in self.tags:
btn = CategoryTagButton(lbl, self.tags, tooltip_lbl=str(cat_id))
self.tags.add(cat_id)
self.tag_box.addWidget(btn)
def on_return_pressed(self, edit):
"""
Called when a new tag is to be created
:param edit: QLineEdit from where to take text
:return:
"""
text = edit.currentText()
edit.lineEdit().setText('')
if text != '' and text not in self.tags:
self.add_tag(text)
|
f11440884ee641b2e9a86fa624dad3c913b72afb
|
[
"Markdown",
"Python"
] | 43
|
Python
|
tobias-gill/Figshare_desktop
|
ab04ca1c67839a8269f5275323907c5bc7f9af46
|
9e6636911f5c86fbf9201f6b6dff351a48877139
|
refs/heads/master
|
<repo_name>ekrikelis/Multilayer-perceptron<file_sep>/MLP.c
// implementation of a multilayer perceptron in C
// https://en.wikipedia.org/wiki/Multilayer_perceptron
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <time.h>
#include <string.h>
#define d 2 //input dimension (arithmos eisodwn MLP)
#define K 3 //output dimension (arithmos katigoriwn (eksodwn) MLP)
#define H1 7 //number of nodes in the 1st hidden layer (arithmos krimenwn nevronwn gia 1o epipedo)
#define H2 2 //number of nodes in the 2nd hidden layer (arithmos krimenwn nevronwn gia 2o epipedo)
#define e 0.0001 //error rate ( minimum metavoli sfalmatos )
#define N 1000 //Number of training set ( arithmos protypou synolou ekpaideushs)
#define n 0.01 //descent step (vima kathodou)
#define MaxEpoxh 30000 //number of epoches (max)
#define T 1000 //number of test set (arithmos synolou elegxou)
//int flag=1; //flag=1 logistic activation function (logistiki synartisi energopoihshs)
//flag=2 linear activation function (grammiki synartisi energopoihshs)
double E; //training error (synoliko sfalama ekpaideusis MLP)
double Epre; //training error per epoche (sfalma ekpaideusis prohgoumenhs epoxis)
//initilization of MLP architecture
/*Pinakes polosewn*/
double b1[H1]; //array of bias of nodes of the 1st hidden layer - pinakas polosewn 1ou epipedou
double b2[H2]; //array of bias of nodes of the 2nd hidden layer - pinakas polosewn 2ou epipedou
double b3[K]; //array of bias of nodes of the ouput layer - pinakas polosewn eksodou
/*arrays of weights*/
double w1[H1][d]; //array of weights pinakas from input to 1st hidden layer
double w2[H2][H1]; //array of weights pinakas from 1st hidden to 2nd hidden layer
double w3[K][H2]; //array of weights pinakas from 2nd hidden to output
//Outputs - eksodoi
double y[K]; //output of MLP (eksodoi tou MLP)
double z1[H1];//output of 1st layer (eksodoi nevrwnwn 1ou epipedou)
double z2[H2];//output of the 2nd layer (eksodoi nevrwnwn 2ou epipedou)
//Error - sfalmata
double d1[H1]; //errors of the 1st layer (sfalmata nevrwnwn 1ou epipedou)
double d2[H2]; //errors of the 2nd layer (sfalmata nevrwnwn 2ou epipedou)
double d3[K]; //errors of the output (sfalmata nevrwnwn eksodou)
// the sum of producers of weights- atroisma paragwgwn varwn
double dw1[H1][d]; //eisodou->1ou epipedou
double dw2[H2][H1]; //1ou-->2ou epipedou
double dw3[K][H2]; //2ou-->eksodou
//the sum of bias - athroisma paragwgwn polosewn
double db1[H1]; //nodes of 1st layer
double db2[H2]; //nodes of 2nd layer
double db3[K]; //nodes of output
//eswterika ginomena s=x*w
double s1[H1];
double s2[H2];
double s3[K];
//arrays for the training set - pinakes protypwn ekpaideusis
double x_train[N][d]; //input
double t_train[N][K]; //target
//arrays for the test set - pinakes protypwn elegxou
double x_test[T][d]; //input
double t_test[T][K]; //target
FILE* f1;
FILE* f2;
FILE* f3;
//----------synartiseis-----------------------------
//arxikopoihsh paragogwn polosewn sto 0
void initialiaze_parag_polosewn()
{
int i;
//paragwgoi polosewn 1ou epipedou
for(i=0; i<H1; i++)
db1[i]=0;
//paragwgoi polosewn 2ou epipedou
for(i=0; i<H2; i++)
db2[i]=0;
//paragwgoi polosewn eksodou
for(i=0; i<K; i++)
db3[i]=0;
}
//arxikopoihsh paragogwn varwn sto 0
void initialiaze_parag_varwn()
{
//paragwgoi polosewn 1ou epipedou
int i,j;
for(i=0; i<H1; i++)
for(j=0; j<d; j++)
dw1[i][j]=0;
//paragwgoi polosewn 2ou epipedou
for(i=0; i<H2; i++)
for(j=0; j<H1; j++)
dw2[i][j]=0;
//paragwgoi polosewn eksodou
for(i=0; i<K; i++)
for(j=0; j<H2; j++)
dw3[i][j]=0;
}
int init=0;
// creation of a random number - dimiourgia tyxaiou arithmou
double GetRand()
{
int i;
while(init==0)
{
srand((unsigned)(time(0)));
init=1;
}
double rr = ( ((double)rand() / ((double)(RAND_MAX))*2-1.0));
return(rr);
}
// initialization of bias - arxikopoihsh polosewn sto (-1,1)
void initialize_poloseis()
{
int i;
//poloseis 1ou epipedou
for(i=0; i<H1; i++)
b1[i]=GetRand();
//poloseis 2ou epipedou
for(i=0; i<H2; i++)
b2[i]=GetRand();
//poloseis eksodou
for(i=0; i<K; i++)
b3[i]=GetRand();
}
//initialization of weights in (-1,1)
void initialize_varwn()
{
int i,j;
//varoi eisodou-->1ou epipedou
for(i=0; i<H1; i++)
for(j=0; j<d; j++)
w1[i][j]=GetRand();
//varoi 1ou-->2ou epipedou
for(i=0; i<H2; i++)
for(j=0; j<H1; j++)
w2[i][j]=GetRand();
//varoi 2ou-->eksodou
for(i=0; i<K; i++)
for(j=0; j<H2; j++)
w3[i][j]=GetRand();
}
// activation function
double f(double u)
{
//if (flag==1) //logistiki synartisi energopoihshs
return (1/(1+exp(-u)));
//if(flag==2) //grammiki synartisi energopoihshs
return (u);
}
//forward_pass---euthy perasma
//prosoxi isws den xreiazetai o pinakas s1,s2,s3
void forward_pass(double* x)
{
int i,j;
double s;
//perasma nevrwnwn 1ou epipedou
for(i=0; i<H1; i++)
{
s=0;
for(j=0; j<d; j++)
s=s+w1[i][j]*x[j]; // athroisma x*w(eswterikou ginomenou)
s1[i]=s;
s=s+b1[i];
z1[i]=f(s); //eksodoi nevronwn prwtou epipedou (xrisi synartisi energopoihshs)
}
//perasma nevronwn 2ou epipedou
for(i=0; i<H2; i++)
{
s=0;
for(j=0; j<H1; j++)
s=s+w2[i][j]*z1[j]; // athroisma x*w(eswterikou ginomenou)
s2[i]=s;
s=s+b2[i];
z2[i]=f(s); //eksodoi nevronwn 2ou epipedou (xrisi synartisi energopoihshs)
}
//perasma epipedou K(eksodou)
for(i=0; i<K; i++)
{
s=0;
for(j=0; j<H2; j++)
s=s+w3[i][j]*z2[j]; // athroisma x*w(eswterikou ginomenou)
s3[i]=s;
s=s+b3[i];
y[i]=f(s); //eksodoi nevronwn 2ou epipedou (xrisi synartisi energopoihshs)
}
}
//backpropagation
void backprop(double* t)
{
int i,j, sum;
//sflama nevronwn eksodou
//if(flag==1)
for(i=0; i<K; i++)
d3[i]=(y[i]-t[i])*y[i]*(1-y[i]);
//f(flag==2)
for(i=0; i<K; i++)
d3[i]=(y[i]-t[i]);
//sfalma 2ou epipedou
for(i=0; i<H2; i++)
{
sum=0;
for(j=0; j<K; j++)
sum=sum+d3[j]*w3[j][i];
d2[i]=sum*z2[i]*(1-z2[i]);
}
//sfalma 1ou epipedou
for(i=0; i<H1; i++)
{
sum=0;
for(j=0; j<H2; j++)
sum=sum+d2[j]*w2[j][i];
d1[i]=sum*z1[i]*(1-z1[i]);
}
}
//ypologismos athoismatos paragwgon varwn
void sum_dw(double* x)
{
int i,j;
for(i=0; i<H1; i++)
for(j=0; j<d; j++)
dw1[i][j]=dw1[i][j]+(d1[i]*x[j]);
for(i=0; i<H2; i++)
for(j=0; j<H1; j++)
dw2[i][j]=dw2[i][j]+(d2[i]*z1[j]);
for(i=0; i<K; i++)
for(j=0; j<H2; j++)
dw3[i][j]=dw3[i][j]+(d3[i]*z2[j]);
}
//ypologismos athoismatos paragwgon polosewn
void sum_db()
{
int i,j;
for(i=0; i<H1; i++)
db1[i]=db1[i]+d1[i];
for(i=0; i<H2; i++)
db2[i]=db2[i]+d2[i];
for(i=0; i<K; i++)
db3[i]=db3[i]+d3[i];
}
//gradient-descent
void gradient_descent()
{
int i,j;
//enimerwsi varwn+polosewn eksodou
for(i=0;i<K;i++)
{
b3[i]=b3[i]-n*db3[i];
for(j=0; j<H2; j++)
w3[i][j]=w3[i][j]-n*dw3[i][j];
}
//enimerwsi varwn+polosewn 2ou epipedou
for(i=0;i<H2;i++)
{
b2[i]=b2[i]-n*db2[i];
for(j=0; j<H1; j++)
w2[i][j]=w2[i][j]-n*dw2[i][j];
}
//enimerwsi varwn+polosewn 1ou epipedou
for(i=0;i<H1;i++)
{
b1[i]=b1[i]-n*db1[i];
for(j=0; j<d; j++)
w1[i][j]=w1[i][j]-n*dw1[i][j];
}
}
//read from input files
void read_arxeio(){
int i,j;
double tempx1,tempx2;
int sum=0;
//protypa synolou ekpaideushs kai apothikeush tous se pinakes (prosoxi dimiourgountai mia fora)
for(i=0;i<N;i++)
{
tempx1=GetRand();
tempx2=GetRand();
x_train[i][0]=tempx1;
x_train[i][1]=tempx2;
sum=pow(tempx1,2)+pow(tempx2,2);
if(sum<=0.16)
{
t_train[i][0]=0;
t_train[i][1]=0;
t_train[i][2]=1;
}
if(sum>0.16 && sum<=0.64)
{
t_train[i][0]=0;
t_train[i][1]=1;
t_train[i][2]=0;
}
if(sum>0.64)
{
t_train[i][0]=1;
t_train[i][1]=0;
t_train[i][2]=0;
}
}
//protypa synolou elegxou
for(i=0;i<T;i++)
{
tempx1=GetRand();
tempx2=GetRand();
x_test[i][0]=tempx1;
x_test[i][1]=tempx2;
sum=pow(tempx1,2)+pow(tempx2,2);
if(sum<=0.16)
{
t_test[i][0]=0;
t_test[i][1]=0;
t_test[i][2]=1;
}
if(sum>0.16 && sum<=0.64)
{
t_test[i][0]=0;
t_test[i][1]=1;
t_test[i][2]=0;
}
if(sum>0.64)
{
t_test[i][0]=1;
t_test[i][1]=0;
t_test[i][2]=0;
}
}
}
//sinartisi termatismou
int finish(){
int i,j;
double diafora,Ei;
E=0;
for(i=0;i<N;i++){
for(j=0;j<K;j++){
forward_pass(x_train[i]);
Ei=y[j]-t_train[i][j];
E=E+(((double)1/(double)2)*pow(Ei,2));
}
}
//briskoume thn diafora metaksi prin sfalmatosk ai twrinou
diafora=fabs(Epre-E);
printf("\t\t errors of the previous epoche %lf\n", Epre);
printf("\t\t error of the current epoche %lf\n\n", E);
Epre=E;
//elenxoume
if(diafora<e)
return(1);
else
return(0);
}
//////------------------main--------
main()
{
int i,j,epoxh=0;
//tipwma eisodwn
read_arxeio();
//arxikopoihsh varwn-polosewn sto -1,1
initialize_poloseis();
initialize_varwn();
//arxikopoihshs paragwgwn sto 0,0
initialiaze_parag_polosewn();
initialiaze_parag_varwn();
//arxi epoxwn
while(epoxh<MaxEpoxh)
{
for(i=0;i<N;i++)
{
//euthi perasma
forward_pass(x_train[i]);
//anapodo perasma
backprop(t_train[i]);
//upologismos athroismatos paragogwn varwn-polosewn
sum_dw(x_train[i]);
sum_db();
}
//gradient descent
gradient_descent();
//ta barh-polwseis epoxis
//printf_varoi_polwseis();
//<NAME>
if(finish()==1)
{
printf("MLP is trained in: %d \n",epoxh);
break;
}
//an oxi
else
{
epoxh++;
initialiaze_parag_varwn();
initialiaze_parag_polosewn();
}
}
// vevaiotita 0.8
//double vev=0.8;
double sin1=0;
double sin2=0;
for(i=0; i<T; i++)
{
printf("%f %f--->\n", x_test[i][0], x_test[i][1]);
forward_pass(x_test[i]);
for(j=0; j<K; j++)
{
printf("%f it should be %f\n", y[j], t_test[i][j]);
if(y[j]>0.5)
{
sin1++;
}
if(y[j]<0.5)
{
sin2++;
}
}
}
double sin=sin1+sin2;
//printf("%lf\n", (sin/T)*100);
}
<file_sep>/rand.c
#include <time.h>
#include <stdio.h>
#include <stdlib.h>
//int init=0;
int init=0;
//dimiourgia tyxaiou arithmou
double GetRand()
{
int i;
while(init==0)
{
srand((unsigned)(time(0)));
init=1;
}
double rr = ( ((double)rand() / ((double)(RAND_MAX))*18+1.0));
return(rr);
}
main()
{
double rr;
int i;
// initrand();
for(i=0;i<50;i++)
{
rr = (int)GetRand();
//double x=rr*M;
printf("%lf\n",rr);
}
}
/*
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
int main ()
{
double iSecret, iGuess;
srand ( time(NULL) );
iSecret = rand() % 10 + 1;
do {
printf ("Guess the number (1 to 10): ");
scanf ("%d",&iGuess);
if (iSecret<iGuess) puts ("The secret number is lower");
else if (iSecret>iGuess) puts ("The secret number is higher");
} while (iSecret!=iGuess);
puts ("Congratulations!");
return 0;
}
*/
|
4af6a0df9a380a70d2b03fbc8824417370ab6f90
|
[
"C"
] | 2
|
C
|
ekrikelis/Multilayer-perceptron
|
51c08af9952514d354ad83b39a5772a184c62491
|
dc3307451f88628c1364ad4b9820c3b6e32ad1f6
|
refs/heads/master
|
<file_sep>var mongoose = require('mongoose');
mongoose.Promise = require('bluebird');
var mongoDB = require('./Config').mongoDB;
mongoose.connect(mongoDB, function(err) {
if (err)
console.log('Mongoose Error:', err);
else
console.log('Mongoose connected', mongoDB);
});<file_sep>var jwt = require('jsonwebtoken');
var Users = require('../models/Users');
var secretKey = require('../models/Config').secretKey;
module.exports.getUsernameAndPassword = function(user, haveToken) {
return new Promise(function (resolve, reject) {
Users.findOne({
username: user.username
}).then(function (res) {
if (!res) {
reject({ success: false, message: 'No User' });
} else if (res.password !== user.password) {
reject({ success: false, message: 'Wrong Password' });
} else {
resolve({ success: true, message: 'User Found'});
}
}).catch(function (err) {
reject({ success: false, error: err });
})
})
};
module.exports.getAllUsers = function () {
return Users.find().then(function (result) {
if (!result) {
return {
success: false,
message: 'Error'
}
} else {
if (result.length) {
return {
success: true,
message: 'User List',
data: result
}
} else {
return {
success: false,
message: 'No User on Database'
}
}
}
})
};
module.exports.addUser = function(user) {
return Users.findOne({ username: user.username }).then(function (err) {
if (err) {
return { success: false, message: 'This Username Is Already Have', username: user.username }
} else {
return Users.create(user).then(function (res) {
if (res) {
return { success: true, message: 'Register Successful', user: res, };
}
})
}
});
}<file_sep>var express = require('express');
var MainRouter = require('./Main');
var HomeRouter = require('./Home');
var LoginRouter = require('./Login');
var RegisterRouter = require('./Register');
var UserListRouter = require('./UserList');
module.exports = function(app) {
app.use('/', MainRouter);
app.use('/home', HomeRouter);
app.use('/login', LoginRouter);
app.use('/register', RegisterRouter);
app.use('/userlist', UserListRouter);
}<file_sep>var mongoose = require('mongoose');
var collection = 'users';
var UserSchema = new mongoose.Schema({
name: {
type: String,
required: true
},
surname: {
type: String,
required: true
},
username: {
type: String,
unique: true,
required: true
},
password: {
type: String,
required: true
}
}, {collection: collection});
var User = mongoose.model(collection, UserSchema);
module.exports = User;<file_sep># express-practice-1
Get islemleri
<file_sep>var express = require('express');
var router = express.Router();
var path = require('path');
var bodyParser = require('body-parser');
var ejs = require('ejs');
var ejsLayout = require('express-ejs-layouts');
var jwt = require('jsonwebtoken');
var Router = require('./src/router/Router');
var Database = require('./src/models/Database');
var Config = require('./src/models/Config');
var getUsernameAndPassword = require('./src/managers/User').getUsernameAndPassword;
var app = express();
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
app.use('/public', express.static(path.join(__dirname, 'public')));
app.set('view engine', 'ejs');
app.set('views', path.join(__dirname, './public/views/pages'));
app.use(function(req, res, next) {
var isFreeZone = req.path === '/login' || req.path === '/register';
if (isFreeZone) {
next();
} else {
var token = req.headers['token'];
if (token) {
var decoded = jwt.verify(token, Config.secretKey, function(err, data) {
if (err) {
res.send({ success: false, message: 'TOKEN ERROR' });
} else {
var haveToken = true;
var promise = getUsernameAndPassword(data, haveToken);
console.log(data);
promise.then(function (response) {
console.log(response);
if (response.success) {
next();
} else {
res.send({ success: false, message: 'Token - User Relation Fail'});
}
})
}
});
} else {
res.status(403).render('Forbidden', {
message: 'Please log in.',
link: '/login',
linkText: 'Go to Login Page'
});
}
}
});
app.use(ejsLayout);
Router(app);
app.listen(Config.port, function() {
console.log('Server running at localhost:%s', Config.port);
});<file_sep>module.exports.index = function(req, res) {
res.render('Main', {
pageTitle: 'Main',
username: '<NAME>'
});
};<file_sep>module.exports = {
'secretKey': 'express-practice-1',
'mongoDB': 'mongodb://localhost/nodejs-practice-1',
'port': 3030
};<file_sep>module.exports.index = function(req, res) {
// res.sendFile(path.join(__dirname, '../pages/Home.html'));
res.render('Home', {
pageTitle: 'Home'
});
};<file_sep>var express = require('express');
var router = express.Router();
var controller = require('../controller/UserList');
router.get('/', controller.index);
router.get('/update', controller.redirecttoindex);
router.get('/delete/:id', controller.delete);
router.get('/update/:id', controller.getUpdate);
router.post('/update', controller.postUpdate);
module.exports = router;
|
a3c944e0cec76351985b0ffa6797e0934b177ac6
|
[
"JavaScript",
"Markdown"
] | 10
|
JavaScript
|
dizefurkan/express-practice-1
|
ee47dce7ec5725a1111ed0900e6c80f22d7b9338
|
8f412b098c17f580557686b412bbffff6178aad7
|
refs/heads/master
|
<file_sep>source 'https://rubygems.org'
gem 'data_mapper'
gem 'dm-postgres-adapter'
gem 'sinatra'
gem 'bcrypt-ruby'
gem 'rack-flash3'
gem 'sinatra-partial'
group :test, :development do
gem 'rspec'
gem 'cucumber'
gem 'cucumber-sinatra'
gem 'capybara'
gem 'launchy'
gem 'database_cleaner'
end
|
3dd0e177e4c228bf9281ea43778c1453ceae56e3
|
[
"Ruby"
] | 1
|
Ruby
|
zrasool88/bookmark_manager
|
83738c022bc97dfb1c902c3734b017adb1071147
|
0062680b6a98af296701e3db72cbb70d89cb543d
|
refs/heads/master
|
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Tankstelle
{
class Zapfsaule
{
public int SauleNr;
public string[] Kraftstoffarten;
public int getankt;
public bool aktiv = false;
public bool gesperrt = false;
}
}
<file_sep># Tankstelle
Aufgabe für M226b
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Tankstelle
{
class Kasse
{
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Tankstelle
{
class Kraftstoff
{
public int Literpreis;
}
}
|
1d44ca7ebf0241a700ef5f87e000fec918eccad2
|
[
"Markdown",
"C#"
] | 4
|
C#
|
mstettler69/Tankstelle
|
fbe2d9cf4d925b2cff776458767e1c2d491e7152
|
2cdef8c074478b78e5ac20f93ab62eaad312d6d6
|
refs/heads/master
|
<file_sep>//
// Bundle-Decodable.swift
// ChallengeDay60
//
// Created by <NAME> on 23/10/2020.
//
import Foundation
extension Bundle {
func decode(stringUrl: String) -> [JUser] {
if let url = URL(string: stringUrl) {
if let data = try? Data(contentsOf: url){
print("Load success")
let decoder = JSONDecoder()
decoder.dateDecodingStrategy = .iso8601
if let jsonUsers = try? decoder.decode([JUser].self, from: data) {
print("Parsing success")
return jsonUsers
} else {
print("Parsing unsuccessful")
}
} else {
print("Loading failed")
}
}
return []
}
}
<file_sep>//
// AstronautView.swift
// Moonshot
//
// Created by <NAME> on 13/10/2020.
//
import SwiftUI
struct AstronautView: View {
let astronaut: Astronaut
let activeMissions: [Mission]
var body: some View {
GeometryReader { geometry in
ScrollView(.vertical){
VStack {
Image(self.astronaut.id)
.resizable()
.scaledToFit()
.frame(width: geometry.size.width)
Text(self.astronaut.description)
.padding()
// if the text is not showing full and ending with ... just change the priority of the layout with code below
// default is 0 meaning, that all layouts will have same chance to occupy the free space
.layoutPriority(1)
ForEach(activeMissions){ mission in
// NavigationLink(destination: Text("TODO")) {
HStack(alignment: .center){
Image(mission.image)
.resizable()
.scaledToFit()
.frame(width: 44, height: 44)
VStack(alignment: .leading){
Text(mission.displayName)
.font(.headline)
Text(mission.formattedLaunchDate)
}
}
// }
.frame(maxWidth: geometry.size.width * 0.95, alignment: .leading)
}
}
}
}
.navigationBarTitle(Text(astronaut.name), displayMode: .inline)
}
init(astronaut: Astronaut, missions: [Mission]) {
self.astronaut = astronaut
var matches = [Mission]()
for mission in missions {
if mission.crew.first(where: { $0.name == astronaut.id }) != nil {
matches.append(mission)
}
}
self.activeMissions = matches
}
}
struct AstronautView_Previews: PreviewProvider {
static let astronauts: [Astronaut] = Bundle.main.decode("astronauts.json")
static let missions: [Mission] = Bundle.main.decode("missions.json")
static var previews: some View {
AstronautView(astronaut: astronauts[13], missions: missions)
}
}
<file_sep>//
// User+CoreDataProperties.swift
// ChallengeDay60
//
// Created by <NAME> on 23/10/2020.
//
//
import Foundation
import CoreData
extension User {
@nonobjc public class func fetchRequest() -> NSFetchRequest<User> {
return NSFetchRequest<User>(entityName: "User")
}
@NSManaged public var id: UUID?
@NSManaged public var isActive: Bool
@NSManaged public var name: String?
@NSManaged public var age: Int16
@NSManaged public var company: String?
@NSManaged public var email: String?
@NSManaged public var address: String?
@NSManaged public var about: String?
@NSManaged public var registered: Date?
@NSManaged public var friends: NSSet?
public var friendsArray: [Friend] {
let set = friends as? Set<Friend> ?? []
return set.sorted {
$0.name ?? "unknown" < $1.name ?? "unknown"
}
}
var formattedDate: String {
let formatter = DateFormatter()
formatter.dateStyle = .long
return formatter.string(from: registered ?? Date())
}
func transferJUserToDatabase(jUser: JUser) {
self.id = jUser.id
self.isActive = jUser.isActive
self.name = jUser.name
self.age = Int16(jUser.age)
self.company = jUser.company
self.email = jUser.email
self.address = jUser.address
self.about = jUser.about
self.registered = jUser.registered
}
}
// MARK: Generated accessors for friends
extension User {
@objc(addFriendsObject:)
@NSManaged public func addToFriends(_ value: Friend)
@objc(removeFriendsObject:)
@NSManaged public func removeFromFriends(_ value: Friend)
@objc(addFriends:)
@NSManaged public func addToFriends(_ values: NSSet)
@objc(removeFriends:)
@NSManaged public func removeFromFriends(_ values: NSSet)
}
extension User : Identifiable {
}
<file_sep>//
// ContentView.swift
// ViewsAndModifiers
//
// Created by <NAME> on 03/10/2020.
//
import SwiftUI
struct ContentView: View {
var body: some View {
Text("Text")
.largeBlueFont()
}
}
struct LargeBlueFont: ViewModifier {
func body(content: Content) -> some View {
content
.font(.largeTitle)
.foregroundColor(/*@START_MENU_TOKEN@*/.blue/*@END_MENU_TOKEN@*/)
}
}
extension View {
func largeBlueFont() -> some View{
self.modifier(LargeBlueFont())
}
}
struct Watermark: ViewModifier {
var text: String
func body(content: Content) -> some View {
ZStack(alignment: .bottomTrailing){
content
Text(text)
.font(.caption)
.foregroundColor(.white)
.padding(5)
.background(Color.black)
}
}
}
extension View{
func watermarked(with text: String) -> some View{
self.modifier(Watermark(text: text))
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
<file_sep>//
// ContentView.swift
// Accessibility
//
// Created by <NAME> on 05/11/2020.
//
import SwiftUI
struct ContentView: View {
let pictures = ["ales-krivec-15949", "galina-n-189483", "kevin-horstmann-141705", "nicolas-tissot-335096"]
let labels = ["Tulips", "Frozen tree buds", "Sunflowers", "Fireworks"]
@State private var selectedPicture = Int.random(in: 0...3)
let score: Int = 1000
@State private var rating = 3
var body: some View {
Stepper("Rate our service: \(rating)/5", value: $rating, in: 1...5)
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
<file_sep>//
// ContentView.swift
// ChallengeDay60
//
// Created by <NAME> on 22/10/2020.
//
import SwiftUI
struct ContentView: View {
@Environment(\.managedObjectContext) var moc
@FetchRequest(entity: User.entity(), sortDescriptors: []) var databaseUsers: FetchedResults<User>
var body: some View {
NavigationView {
VStack {
List {
ForEach(databaseUsers, id: \.self) { user in
NavigationLink(destination: UserDetailView(allUsers: databaseUsers, user: user)) {
VStack(alignment: .leading) {
Text(user.name ?? "Unknown")
.font(.headline)
Text(user.company ?? "Unknown")
.font(.body)
}
}
}
.onDelete(perform: deleteUser)
}
}
.navigationBarTitle(Text("iFriends"))
}
}
func deleteUser(at offsets: IndexSet) {
for offset in offsets {
let user = databaseUsers[offset]
moc.delete(user)
}
try? moc.save()
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
let context = (UIApplication.shared.delegate as! AppDelegate).persistentContainer.viewContext
return ContentView().environment(\.managedObjectContext, context)
}
}
<file_sep>//
// ContentView.swift
// ConferenceContacts
//
// Created by <NAME> on 06/11/2020.
//
import MapKit
import SwiftUI
struct ContentView: View {
@ObservedObject var allContacts: AllContacts = AllContacts()
@State private var showingSheet = false
@State private var newPictureAdded = false
@State private var image: Image?
@State private var inputImage: UIImage?
@State private var firstName: String = ""
@State private var surname: String = ""
@State private var centerCoordinate = CLLocationCoordinate2D()
@State private var anotation = MKPointAnnotation()
let locationFetcher = LocationFetcher()
var body: some View {
if newPictureAdded {
//view for adding name and surname to picture
NavigationView {
VStack {
image?
.resizable()
.frame(width: 300, height: 300)
.clipShape(Circle())
.shadow(radius: 2)
.padding(.horizontal)
TextField("First name", text: $firstName)
TextField("Surname", text: $surname)
MapView(centerCoordinate: $centerCoordinate, annotation: anotation)
Spacer()
HStack{
Button("Save") {
saveContact()
saveData()
self.firstName = ""
self.surname = ""
self.image = nil
self.inputImage = nil
loadData()
self.newPictureAdded = false
}
Button("Cancel") {
self.firstName = ""
self.surname = ""
self.image = nil
self.inputImage = nil
self.newPictureAdded = false
}
}
}
.padding()
.navigationBarTitle(Text("Add new contact"))
}
} else {
// List view of all contacts
NavigationView {
ZStack {
List {
ForEach(allContacts.contacts, id: \.id) { contact in
NavigationLink(destination: DetailContactView(contact: contact)){
HStack {
contact.image
.resizable()
.clipShape(Circle())
.frame(width: 50, height: 50)
Text(contact.firstName + " " + contact.surname)
.font(.headline)
}
}
}
.onDelete(perform: deleteContact)
}
VStack {
Spacer()
HStack {
Spacer()
Button(action: {
self.showingSheet = true
}) {
Image(systemName: "plus")
.padding()
.background(Color.black.opacity(0.75))
.foregroundColor(.white)
.font(.title)
.clipShape(/*@START_MENU_TOKEN@*/Circle()/*@END_MENU_TOKEN@*/)
.padding(.trailing)
}
}
}
}
.navigationBarTitle(Text("Conference Contacts"))
.sheet(isPresented: $showingSheet, onDismiss: loadImage, content: {
ImagePicker(image: self.$inputImage)
})
}
.onAppear(perform: {
loadData()
locationFetcher.start()
})
}
}
func getCurrentLocation() {
self.centerCoordinate = locationFetcher.lastKnownLocation ?? CLLocationCoordinate2D(latitude: 51.0, longitude: 0.0)
}
func loadPhoto(contact: Contact) -> Image {
var image: Image = Image(systemName: "plus")
let filename = getDocumentDirectory().appendingPathComponent("\(contact.id)")
if let data = try? Data(contentsOf: filename) {
guard let inputImage = UIImage(data: data) else { return Image(systemName: "plus") }
image = Image(uiImage: inputImage)
}
return image
}
func saveContact(){
let contact = Contact(id: UUID(), firstName: self.firstName, surname: self.surname, latitude: anotation.coordinate.latitude, longitude: anotation.coordinate.longitude)
allContacts.contacts.append(contact)
let filename = getDocumentDirectory().appendingPathComponent("\(contact.id)")
if let jpegData = inputImage?.jpegData(compressionQuality: 0.8){
try? jpegData.write(to: filename, options: [.atomicWrite, .completeFileProtection])
}
}
func getDocumentDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0]
}
func loadData() {
let filename = getDocumentDirectory().appendingPathComponent("SavedContacts")
do {
let data = try Data(contentsOf: filename)
allContacts.contacts = try JSONDecoder().decode([Contact].self, from: data)
allContacts.contacts.sort()
print("Load successful.")
} catch {
print("Unable to load saved data.")
}
}
func saveData() {
do {
let filename = getDocumentDirectory().appendingPathComponent("SavedContacts")
let data = try JSONEncoder().encode(self.allContacts.contacts)
try data.write(to: filename, options: [.atomicWrite, .completeFileProtection])
print("Save successful.")
} catch {
print("Unable to save data.")
}
}
func deleteContact(at offsets: IndexSet) {
for offset in offsets {
allContacts.contacts.remove(at: offset)
saveData()
print("Contact deleted.")
}
}
func loadImage() {
guard let inputImage = inputImage else { return }
image = Image(uiImage: inputImage)
self.newPictureAdded = true
getCurrentLocation()
anotation.coordinate = centerCoordinate
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView(allContacts: AllContacts.example)
}
}
<file_sep># Code samples for project 10
import SwiftUI
// if I use @Published for variable inside the Codable class, SwiftUI will not compile
// I have to tell it which part of the @Published is to be encoded and decoded
class User: ObservableObject, Codable {
// 1. add CodingKeys
enum CodingKeys: CodingKey {
case name
}
@Published var name = "<NAME>"
// 2. create custom init
required init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
name = try container.decode(String.self, forKey: .name)
}
// 3. create custom encode func
func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encode(name, forKey: .name)
}
}
struct Response: Codable {
var results: [Result]
}
struct Result: Codable {
var trackId: Int
var trackName: String
var collectionName: String
}
struct ContentView: View {
@State var results = [Result]()
@State var userName = ""
@State var email = ""
var body: some View {
// DISABLING THE PART OF CODE
Form {
Section {
TextField("User name", text: $userName)
TextField("Email", text: $email)
}
Section {
Button("Create account") {
print("Creating account...")
}
}
// with this line of code I will disable the button if the userName or email is empty
.disabled(userName.isEmpty || email.isEmpty)
}
// THIS IS THE PART FOR URL SESSION LOADING THE SONGS FROM ITUNES
// List(results, id: \.trackId){ item in
// VStack(alignment: .leading){
// Text(item.trackName)
// .font(.headline)
//
// Text(item.collectionName)
// }
// }
// .onAppear(perform: loadData)
}
// loading data from URL
func loadData(){
// 1. prepare URL string
guard let url = URL(string: "https://itunes.apple.com/search?term=taylor+swift&entity=song") else {
print("Invalid URL")
return
}
// 2. create request from the URL string
let request = URLRequest(url: url)
// 3. initiate URL session
URLSession.shared.dataTask(with: request) { data, response, error in
// try loading the data
if let data = data {
// if we load data try to decode them
if let decodedResponse = try? JSONDecoder().decode(Response.self, from: data) {
// if we are all the way here, we can return the data from background to the main program
DispatchQueue.main.async {
self.results = decodedResponse.results
}
return
}
}
// otherwise print error to the console
print("Fetch failed: \(error?.localizedDescription ?? "Unknown Error")")
// DON'T FORGET the .resume() at the end of URLSession
}.resume()
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
<file_sep>//
// ContentView.swift
// RockPaperScissors
//
// Created by <NAME> on 04/10/2020.
//
import SwiftUI
struct ContentView: View {
let options = ["Rock", "Paper", "Scissors"]
@State private var appMove = Int.random(in: 0...2)
@State private var winOrLose = Bool.random()
@State private var currentScore = 0
@State private var scoreTitle = ""
@State private var showingScore = false
var body: some View {
VStack{
Text("I choose \(options[appMove])")
Text("You have to \(winOrLose ? "win" : "lose")")
HStack{
ForEach(0..<options.count){index in
Button(action: {
self.optionTapped(index)
}, label: {
Text("\(options[index])")
.foregroundColor(.white)
.padding()
.background(RadialGradient(gradient: Gradient(colors: [Color.blue, Color.black
]), center: /*@START_MENU_TOKEN@*/.center/*@END_MENU_TOKEN@*/, startRadius: 0, endRadius: 50))
.clipShape(Capsule())
})
}
}
Text("Current score is \(currentScore)")
}
.alert(isPresented: $showingScore) {
Alert(title: Text(scoreTitle), message: Text("Your score is \(currentScore)"), dismissButton: .default(Text("Continue")) {
self.askQuestion()
})
}
}
func optionTapped(_ number: Int){
if winOrLose {
if number > appMove || (appMove == 2 && number == 0){
currentScore += 1
scoreTitle = "Correct"
} else {
currentScore -= 1
scoreTitle = "Wrong"
}
} else if number < appMove || (appMove == 0 && number == 2){
currentScore += 1
scoreTitle = "Correct"
} else {
currentScore -= 1
scoreTitle = "Wrong"
}
showingScore = true
}
func askQuestion(){
appMove = Int.random(in: 0...2)
winOrLose = Bool.random()
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
<file_sep>//
// Country+CoreDataClass.swift
// CoreDataProject
//
// Created by <NAME> on 21/10/2020.
//
//
import Foundation
import CoreData
@objc(Country)
public class Country: NSManagedObject {
}
<file_sep>//
// ContentView.swift
// HotProspects
//
// Created by <NAME> on 15/11/2020.
//
import SamplePackage
import UserNotifications
import SwiftUI
class User: ObservableObject {
@Published var name = "<NAME>"
}
struct EditView: View {
@EnvironmentObject var user: User
var body: some View {
TextField("Name", text: $user.name)
}
}
struct DisplayView: View {
@EnvironmentObject var user: User
var body: some View {
Text(user.name)
}
}
enum NetworkError: Error {
case badURL, requestFailed, unknown
}
class DelayedUpdater: ObservableObject {
var value = 0 {
willSet {
objectWillChange.send()
}
}
init() {
for i in 1...10 {
DispatchQueue.main.asyncAfter(deadline: .now() + Double(i)) {
self.value += 1
}
}
}
}
struct ContentView: View {
@State private var selectedTab = 0
let user = User()
@ObservedObject var updater = DelayedUpdater()
@State private var backgroundColor = Color.red
let possibleNumbers = Array(1...60)
var results: String {
let selected = possibleNumbers.random(7).sorted()
let strings = selected.map(String.init)
return strings.joined(separator: ", ")
}
var body: some View {
// ADDING DEPENDENCIES
Text(results)
// NOTIFICATIONS
// VStack {
// Button("Request permision") {
// UNUserNotificationCenter.current().requestAuthorization(options: [.alert, .badge, .sound]) { success, error in
// if success {
// print("All set!")
// } else if let error = error {
// print(error.localizedDescription)
// }
// }
// }
//
// Button("Schedule notification") {
// let content = UNMutableNotificationContent()
// content.title = "Feed the cat"
// content.subtitle = "It looks hungry"
// content.sound = UNNotificationSound.default
//
// let trigger = UNTimeIntervalNotificationTrigger(timeInterval: 5, repeats: false)
//
// let request = UNNotificationRequest(identifier: UUID().uuidString, content: content, trigger: trigger)
//
// UNUserNotificationCenter.current().add(request)
//
// }
// }
// CONTEXT MENU
// VStack {
// Text("Hello, World!")
// .padding()
// .background(backgroundColor)
//
// Text("Change color")
// .padding()
// .contextMenu {
// Button(action: {
// self.backgroundColor = .red
// }) {
// Text("Red")
// Image(systemName: "checkmark.circle.fill")
// .foregroundColor(.red)
// }
//
// Button(action: {
// self.backgroundColor = .green
// }) {
// Text("Green")
// }
//
// Button(action: {
// self.backgroundColor = .blue
// }) {
// Text("Blue")
// }
// }
// }
// IMAGE INTERPOLATION
// Image("example")
// .interpolation(.none)
// .resizable()
// .scaledToFit()
// .frame(maxHeight: .infinity)
// .background(Color.black)
// .edgesIgnoringSafeArea(/*@START_MENU_TOKEN@*/.all/*@END_MENU_TOKEN@*/)
// FETCHING DATA FROM REMOTE URL
// .onAppear {
// self.fetchData(from: "https://www.apple.com") { result in
// switch result {
// case .success(let str):
// print(str)
// case .failure(let error):
// switch error {
// case .badURL:
// print("Bad URL")
// case .requestFailed:
// print("Network problems")
// case .unknown:
// print("Unknown error")
// }
// }
// }
// }
}
func fetchData(from urlString: String, completion: @escaping (Result<String, NetworkError>) -> Void) {
guard let url = URL(string: urlString) else {
completion(.failure(.badURL))
return
}
URLSession.shared.dataTask(with: url) { data, response, error in
DispatchQueue.main.async {
if let data = data {
let stringData = String(decoding: data, as: UTF8.self)
completion(.success(stringData))
} else if error != nil {
completion(.failure(.requestFailed))
} else {
completion(.failure(.unknown))
}
}
} .resume()
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
<file_sep>//
// ContentView.swift
// UnitConversion
//
// Created by <NAME> on 01/10/2020.
//
import SwiftUI
struct ContentView: View {
@State private var inputValue = ""
@State private var inputUnit = 0
@State private var outputUnit = 0
let units = ["km", "m", "cm", "mm", "mi", "yd", "ft", "in"]
let conversionToMm = [1000000, 1000, 10, 1, 1609344, 914.4, 304.8, 25.4]
var outputValue: Double{
let userInputValue = Double(inputValue) ?? 0
let userInputInMm = userInputValue * Double(conversionToMm[inputUnit])
let result = userInputInMm / Double(conversionToMm[outputUnit])
return result
}
var body: some View {
NavigationView{
Form{
Section(header: Text("Input")){
TextField("Input value", text: $inputValue)
.keyboardType(.decimalPad)
Picker("Input unit", selection: $inputUnit){
ForEach(0 ..< units.count){
Text("\(units[$0])")
}
}
.pickerStyle(SegmentedPickerStyle())
}
Section(header: Text("Output")){
Text("\(outputValue)")
Picker("Output unit", selection: $outputUnit){
ForEach(0 ..< units.count){
Text("\(units[$0])")
}
}
.pickerStyle(SegmentedPickerStyle())
}
}
.navigationBarTitle("Unit Conversion")
}
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
<file_sep>//
// MultiplyTableApp.swift
// MultiplyTable
//
// Created by <NAME> on 09/10/2020.
//
import SwiftUI
@main
struct MultiplyTableApp: App {
var body: some Scene {
WindowGroup {
ContentView()
}
}
}
<file_sep>//
// FilteredList.swift
// CoreDataProject
//
// Created by <NAME> on 21/10/2020.
//
import CoreData
import SwiftUI
struct FilteredList<T: NSManagedObject, Content: View>: View {
var fetchRequest: FetchRequest<T>
var singers: FetchedResults<T> {
fetchRequest.wrappedValue
}
var sortDescriptors: [NSSortDescriptor]
let content: (T) -> Content
var body: some View {
List(fetchRequest.wrappedValue, id: \.self) { singer in
self.content(singer)
}
}
init(filterKey: String, filterValue: String, sortDescriptors: [NSSortDescriptor], @ViewBuilder content: @escaping (T) -> Content) {
if (filterValue == "") {
fetchRequest = FetchRequest<T>(entity: T.entity(), sortDescriptors: sortDescriptors)
} else {
fetchRequest = FetchRequest<T>(entity: T.entity(), sortDescriptors: sortDescriptors, predicate: NSPredicate(format:"%K BEGINSWITH %@", filterKey, filterValue))
}
self.content = content
self.sortDescriptors = sortDescriptors
}
}
//struct FilteredList_Previews: PreviewProvider {
// static var previews: some View {
// FilteredList()
// }
//}
<file_sep>//
// ShowActivity.swift
// TrackIt
//
// Created by <NAME> on 15/10/2020.
//
import SwiftUI
struct ShowActivity: View {
@ObservedObject var allActivities: AllActivities
var activity: Activity
@State private var counter = 3
var body: some View {
List {
Section(header: Text("Description")) {
Text(self.activity.activityDescription)
}
Section(header: Text("Activity counter")){
HStack {
Spacer()
Button(action: {
if counter > 0 {
counter -= 1
}
}) {
Image(systemName: "minus")
.foregroundColor(/*@START_MENU_TOKEN@*/.blue/*@END_MENU_TOKEN@*/)
.frame(width: 30, height: 30)
}
.buttonStyle(BorderlessButtonStyle())
Spacer()
Text("\(counter)")
.font(.largeTitle)
Spacer()
Button(action: {
counter += 1
}) {
Image(systemName: "plus")
.foregroundColor(.blue)
.frame(width: 30, height: 30)
}
.buttonStyle(BorderlessButtonStyle())
Spacer()
}
}
}
.navigationBarTitle(self.activity.activityName)
.onAppear {
self.counter = self.activity.activityCounter
}
.onDisappear {
updateCounter()
}
}
func updateCounter() {
if let indexItem = allActivities.activities.firstIndex(where: { (activity) -> Bool in
activity == self.activity
}) {
let tempActivity = Activity(activityName: self.activity.activityName, activityDescription: self.activity.activityDescription, activityCounter: self.counter)
self.allActivities.activities.remove(at: indexItem)
self.allActivities.activities.insert(tempActivity, at: indexItem)
}
}
}
struct ShowActivity_Previews: PreviewProvider {
static var previews: some View {
ShowActivity(allActivities: AllActivities(), activity: Activity(activityName: "Pokus", activityDescription: "Popis pokusu"))
}
}
<file_sep>//
// AddActivity.swift
// TrackIt
//
// Created by <NAME> on 15/10/2020.
//
import SwiftUI
struct AddActivity: View {
@ObservedObject var allActivities: AllActivities
@State private var name = ""
@State private var description = ""
@Environment(\.presentationMode) var presentationMode
@State private var showingAlert = false
var body: some View {
NavigationView {
Form {
TextField("Name", text: $name)
TextField("Description", text: $description)
}
.navigationBarTitle("Add Activity")
.navigationBarItems(
trailing: Button("Save") {
if !name.isEmpty {
let activity = Activity(activityName: self.name, activityDescription: self.description)
self.allActivities.activities.append(activity)
self.presentationMode.wrappedValue.dismiss()
} else {
showingAlert = true
}
}
)
}
.alert(isPresented: $showingAlert, content: {
Alert(title: Text("Missing name"), message: Text("Unable to save activity without name"), dismissButton: .default(Text("Continue")))
})
}
}
struct AddActivity_Previews: PreviewProvider {
static var previews: some View {
AddActivity(allActivities: AllActivities())
}
}
<file_sep>//
// UserDetailView.swift
// ChallengeDay60
//
// Created by <NAME> on 23/10/2020.
//
import SwiftUI
struct InformationView: View {
let header: String
let information: String
init(_ header: String, _ information: String) {
self.header = header
self.information = information
}
var body: some View {
ZStack {
RoundedRectangle(cornerRadius: 15)
.fill(Color.gray)
.frame(maxHeight: 75)
VStack {
Text(header)
.font(.headline)
.foregroundColor(.white)
Text(information)
.font(.body)
.fixedSize(horizontal: false, vertical: true)
.multilineTextAlignment(.center)
.foregroundColor(.white)
}
}
}
}
struct UserDetailView: View {
let allUsers: FetchedResults<User>
let user: User
var body: some View {
ScrollView {
VStack(alignment: .leading) {
HStack {
InformationView("COMPANY:", self.user.company ?? "Unknown Company")
InformationView("AGE:", "\(self.user.age)")
}
InformationView("E-MAIL:", self.user.email ?? "Unknown email")
InformationView("ADDRESS:", self.user.address ?? "Unknown address")
InformationView("REGISTERED FROM:", "\(self.user.formattedDate)")
Text("ABOUT")
.font(.headline)
Text(self.user.about ?? "Unknown about")
.font(.body)
.padding(.bottom, 10)
Text("TAGS")
.font(.headline)
// Text(returnAllTags(tagsArray: self.user.tags))
// .font(.body)
// .padding(.bottom, 5)
Section(header: Text("FRIENDS").font(.headline)) {
ForEach(self.user.friendsArray, id: \.id) {friend in
NavigationLink(destination: UserDetailView(allUsers: allUsers, user: returnUser(friendID: friend.id ?? UUID()))) {
Text(friend.name ?? "Unknown friend name")
.padding(5)
}
}
}
}
}
.padding()
.navigationBarTitle(Text(self.user.name ?? "Unknown User name"))
}
func returnUser(friendID: UUID) -> User {
return allUsers.first(where: { $0.id == friendID }) ?? allUsers[1]
}
func returnAllTags(tagsArray: [String]) -> String {
var result = ""
for tag in tagsArray {
result += tag + ", "
}
result.removeLast(2)
return result
}
}
//struct UserDetailView_Previews: PreviewProvider {
// static var previews: some View {
// UserDetailView()
// }
//}
<file_sep>//
// ContentView.swift
// Moonshot
//
// Created by <NAME> on 12/10/2020.
//
import SwiftUI
struct ContentView: View {
let astronauts: [Astronaut] = Bundle.main.decode("astronauts.json")
let missions: [Mission] = Bundle.main.decode("missions.json")
@State private var crewOrLaunch = true
var body: some View {
NavigationView {
List(missions) { mission in
NavigationLink(
destination: MissionView(mission: mission, astronauts: self.astronauts, allMissions: missions)) {
Image(mission.image)
.resizable()
.scaledToFit()
.frame(width: 44, height: 44)
VStack(alignment: .leading){
Text(mission.displayName)
.font(.headline)
// Text(mission.formattedLaunchDate)
Text(crewOrLaunch ? mission.formattedLaunchDate : getAstronautsNames(mission: mission))
}
}
}
.navigationBarTitle("Moonshot")
.navigationBarItems(trailing: Button(crewOrLaunch ? "Show crew" : "Show date"){
self.crewOrLaunch.toggle()
})
}
}
func getAstronautsNames(mission: Mission) -> String {
var result = ""
for member in mission.crew {
result += member.name.capitalized + " | "
}
result.removeLast(3)
return result
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
<file_sep>//
// Activity.swift
// TrackIt
//
// Created by <NAME> on 15/10/2020.
//
import Foundation
struct Activity: Codable, Identifiable {
let id = UUID()
let activityName: String
let activityDescription: String
}
<file_sep>//
// Friend+CoreDataClass.swift
// ChallengeDay60
//
// Created by <NAME> on 23/10/2020.
//
//
import Foundation
import CoreData
@objc(Friend)
public class Friend: NSManagedObject {
}
<file_sep>//
// Resort.swift
// SnowSeeker
//
// Created by <NAME> on 16/12/2020.
//
import Foundation
struct Resort: Codable, Identifiable {
let id: String
let name: String
let country: String
let description: String
let imageCredit: String
let price: Int
let size: Int
let snowDepth: Int
let elevation: Int
let runs: Int
let facilities: [String]
var facilityTypes: [Facility] {
facilities.map(Facility.init)
}
static let allResorts: [Resort] = Bundle.main.decode("resorts.json")
static let example = allResorts[0]
enum SortingOptions: String, CaseIterable {
case original = "default"
case alphabetical = "alphabetically"
case country = "by country"
}
}
<file_sep>//
// TrackItApp.swift
// TrackIt
//
// Created by <NAME> on 15/10/2020.
//
import SwiftUI
@main
struct TrackItApp: App {
var body: some Scene {
WindowGroup {
ContentView()
}
}
}
<file_sep>//
// TestView.swift
// ChallengeDay60
//
// Created by <NAME> on 23/10/2020.
//
import SwiftUI
struct TestView: View {
let databaseUsers: FetchedResults<User>
let user: User
var body: some View {
VStack{
Text(user.name ?? "Unknown name")
ForEach(user.friendsArray, id: \.id) { friend in
Text(friend.name ?? "Unknown friend")
}
}
}
}
//struct TestView_Previews: PreviewProvider {
// static var previews: some View {
// TestView()
// }
//}
<file_sep>//
// Friend.swift
// ChallengeDay60
//
// Created by <NAME> on 23/10/2020.
//
import Foundation
struct JFriend: Identifiable, Codable {
let id: UUID
let name: String
}
<file_sep>//
// ContentView.swift
// CoreDataProject
//
// Created by <NAME> on 21/10/2020.
//
import CoreData
import SwiftUI
//if all variables in struct conforms to Hashable whole struct conforms to Hashable
struct Student: Hashable {
let name: String // String conforms to hashable
}
struct ContentView: View {
@Environment(\.managedObjectContext) var moc
@FetchRequest(entity: Country.entity(), sortDescriptors: []) var countries: FetchedResults<Country>
@State var filterValue = ""
@FetchRequest(entity: Ship.entity(), sortDescriptors: [], predicate: NSPredicate(format: "universe == %@", "Star Wars")) var ships: FetchedResults<Ship>
@FetchRequest(entity: Wizard.entity(), sortDescriptors: []) var wizards: FetchedResults<Wizard>
@FetchRequest(entity: Movie.entity(), sortDescriptors: []) var movies: FetchedResults<Movie>
let sortDescriptors = [NSSortDescriptor(keyPath: \Singer.lastName, ascending: true), NSSortDescriptor(keyPath: \Singer.lastName, ascending: true)]
@State var filterKey = "lastName"
let availableFilterKeys = ["lastName", "firstName"]
let students = [Student(name: "<NAME>"), Student(name: "<NAME>")]
var body: some View {
// VStack {
// List {
// ForEach(countries, id: \.self) { country in
// Section(header: Text(country.wrappedFullName)) {
// ForEach(country.candyArray, id: \.self) {candy in
// Text(candy.wrappedName)
// }
// }
// }
// }
//
// Button("Add") {
// let candy1 = Candy(context: self.moc)
// candy1.name = "Mars"
// candy1.origin = Country(context: self.moc)
// candy1.origin?.shortName = "UK"
// candy1.origin?.fullName = "United Kingdom"
//
// let candy2 = Candy(context: self.moc)
// candy2.name = "KitKat"
// candy2.origin = Country(context: self.moc)
// candy2.origin?.shortName = "UK"
// candy2.origin?.fullName = "United Kingdom"
//
// let candy3 = Candy(context: self.moc)
// candy3.name = "Twix"
// candy3.origin = Country(context: self.moc)
// candy3.origin?.shortName = "UK"
// candy3.origin?.fullName = "United Kingdom"
//
// let candy4 = Candy(context: self.moc)
// candy4.name = "Toblerone"
// candy4.origin = Country(context: self.moc)
// candy4.origin?.shortName = "CH"
// candy4.origin?.fullName = "Switzerland"
//
// try? self.moc.save()
// }
// }
VStack {
HStack {
Image(systemName: "magnifyingglass")
.offset(x: 10)
TextField("Filter singers", text: $filterValue)
.padding()
}
Picker(selection: $filterKey, label: Text("Select filter")) {
ForEach(availableFilterKeys, id: \.self) { filter in
Text(filter)
}
}
.pickerStyle(SegmentedPickerStyle())
FilteredList(filterKey: filterKey, filterValue: filterValue, sortDescriptors: sortDescriptors) {(singer: Singer) in
Text("\(singer.wrappedFirstName) \(singer.wrappedLastName)")
}
Button("Add Examples") {
let taylor = Singer(context: self.moc)
taylor.firstName = "Taylor"
taylor.lastName = "Swift"
let ed = Singer(context: moc)
ed.firstName = "Ed"
ed.lastName = "Sheeran"
let adele = Singer(context: moc)
adele.firstName = "Adele"
adele.lastName = "Adkins"
try? self.moc.save()
}
Button("Show A") {
self.filterValue = "A"
}
Button("Show S") {
self.filterValue = "S"
}
}
// VStack {
// List(ships, id: \.self) { ship in
// Text(ship.name ?? "Unknown name")
// }
//
// Button("Add Examples") {
// let ship = Ship(context: self.moc)
// ship.name = "Enterprise"
// ship.universe = "Star Trek"
//
// let ship2 = Ship(context: self.moc)
// ship2.name = "Defiant"
// ship2.universe = "Star Trek"
//
// let ship3 = Ship(context: moc)
// ship3.name = "Millenium Falcon"
// ship3.universe = "Star Wars"
//
// let ship4 = Ship(context: moc)
// ship4.name = "Executor"
// ship4.universe = "Star Wars"
//
// try? self.moc.save()
// }
// }
// VStack {
// since Student conforms to hashable we can use \.self as id
// even if we will create 2 identical Students, their hash will be different since
// Swift adds some other values to them and calculate hash together
// List(students, id: \.self) { student in
// Text(student.name)
// }
// Section{
// List(wizards, id: \.self) {wizard in
// Text(wizard.name ?? "Unknown")
// }
// Button("Add Wizard") {
// let wizard = Wizard(context: self.moc)
// wizard.name = "<NAME>"
// }
// Button("Save Wizards") {
// do {
// try self.moc.save()
// } catch {
// print(error.localizedDescription)
// }
// }
// }
//
// Section {
// List {
// ForEach(movies, id: \.self) {movie in
// HStack {
// Text(movie.title ?? "Unknown")
// .font(.title)
// Spacer()
// VStack {
// Text(movie.director ?? "Unknown")
// Text("\(movie.year)")
// }
// }
// }
// .onDelete(perform: deleteMovie)
// }
//
// Button("Add movie") {
// let movie = Movie(context: self.moc)
// movie.title = "The Godfather"
// movie.director = "Director"
// movie.year = 1985
// }
//
// Button("Save Movies") {
// // it is good practice to check if there are any changes before saving to CoreData
// if self.moc.hasChanges {
// try? self.moc.save()
// print ("Saving changes")
// } else {
// print("Not saving...")
// }
// }
// }
// }
// }
//
// func deleteMovie(at offsets: IndexSet){
// for offset in offsets {
// let movie = movies[offset]
// moc.delete(movie)
// }
//
// try? moc.save()
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
<file_sep>//
// AllContactsListView.swift
// ConferenceContacts
//
// Created by <NAME> on 10/11/2020.
//
import SwiftUI
struct AllContactsListView: View {
var body: some View {
Text(/*@START_MENU_TOKEN@*/"Hello, World!"/*@END_MENU_TOKEN@*/)
}
}
struct AllContactsListView_Previews: PreviewProvider {
static var previews: some View {
AllContactsListView()
}
}
<file_sep>//
// Initialization.swift
// ChallengeDay60
//
// Created by <NAME> on 23/10/2020.
//
import Foundation
import CoreData
class Initialize: ObservableObject {
let context: NSManagedObjectContext
init(context: NSManagedObjectContext) {
self.context = context
let request: NSFetchRequest<User> = User.fetchRequest()
let count = (try? context.fetch(request).count) ?? 0
print("Core database user count: \(count)")
// if the user count in database is 0, load data to database from URL
if(count == 0) {
// download and create users from JSON file
let allUsers: [JUser] = Bundle.main.decode(stringUrl: "https://www.hackingwithswift.com/samples/friendface.json")
//transfer JUsers to database
for jUser in allUsers {
let user = User(context: context)
user.transferJUserToDatabase(jUser: jUser)
for jFriend in jUser.friends {
let friend = Friend(context: context)
friend.id = jFriend.id
friend.name = jFriend.name
user.addToFriends(friend)
}
try? self.context.save()
}
}
}
}
<file_sep>//
// AddContact.swift
// ConferenceContacts
//
// Created by <NAME> on 06/11/2020.
//
import SwiftUI
struct AddContact: View {
@State private var firstName: String = ""
@State private var surname: String = ""
var allContacts: AllContacts
@Environment(\.presentationMode) var presentationMode
var body: some View {
NavigationView {
VStack {
Circle()
.stroke(Color.blue, lineWidth: 2)
.frame(width: 300, height: 300)
.padding(.horizontal)
TextField("First name", text: $firstName)
TextField("Surname", text: $surname)
Spacer()
}
.padding(.horizontal)
.navigationBarTitle(Text("\(firstName) \(surname)"))
.navigationBarItems(leading: Button("Cancel") {
self.presentationMode.wrappedValue.dismiss()
},
trailing: Button("Save") {
saveContact()
saveData()
self.presentationMode.wrappedValue.dismiss()
})
}
}
func saveContact(){
let contact = Contact(firstName: self.firstName, surname: self.surname, pictureNumber: 3)
allContacts.contacts.append(contact)
}
func getDocumnetDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0]
}
func saveData() {
do {
let filename = getDocumnetDirectory().appendingPathComponent("SavedContacts")
let data = try JSONEncoder().encode(self.allContacts.contacts)
try data.write(to: filename, options: [.atomicWrite, .completeFileProtection])
print("Save successful.")
} catch {
print("Unable to save data.")
}
}
}
struct AddContact_Previews: PreviewProvider {
static var previews: some View {
AddContact(allContacts: AllContacts.example)
}
}
<file_sep>//
// Contact.swift
// ConferenceContacts
//
// Created by <NAME> on 06/11/2020.
//
import Foundation
import SwiftUI
struct Contact: Codable, Comparable {
let id: UUID
let firstName: String
let surname: String
let latitude: Double
let longitude: Double
var image: Image {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
var image: Image = Image(systemName: "plus")
let filename = paths[0].appendingPathComponent("\(self.id)")
if let data = try? Data(contentsOf: filename) {
guard let inputImage = UIImage(data: data) else { return Image(systemName: "plus") }
image = Image(uiImage: inputImage)
}
return image
}
static func < (lhs: Contact, rhs: Contact) -> Bool {
lhs.surname < rhs.surname
}
}
class AllContacts: ObservableObject {
@Published var contacts: [Contact]
init() {
self.contacts = []
}
init(allContacts: [Contact]) {
self.contacts = allContacts
}
}
extension AllContacts {
static var example: AllContacts {
let allContacts = AllContacts()
let contact1 = Contact(id: UUID(), firstName: "Fero", surname: "Dajaky", latitude: 51.0, longitude: 10.0)
let contact2 = Contact(id: UUID(), firstName: "Jozef", surname: "Onaky", latitude: 51.0, longitude: 0.0)
allContacts.contacts.append(contact1)
allContacts.contacts.append(contact2)
return allContacts
}
}
<file_sep>//
// ContentView.swift
// SnowSeeker
//
// Created by <NAME> on 16/12/2020.
//
import SwiftUI
struct ContentView: View {
@State private var filterOption = Resort.SortingOptions.original
@State private var sizeFilter = 0
@State private var priceFilter = 0
@State private var countryFilter = ""
@State private var isShowingSettingsSheet = false
@ObservedObject var favorites = Favorites()
let resorts: [Resort] = Bundle.main.decode("resorts.json")
var countries: Set<String> {
return Set(resorts.map { $0.country } )
}
var filteredResorts: [Resort] {
return self.resorts.filter {
sizeFilter == 0 ? true : $0.size == self.sizeFilter &&
priceFilter == 0 ? true : $0.price == self.priceFilter &&
countryFilter == "" ? true : $0.country == self.countryFilter
}
}
var sortedResorts: [Resort] {
switch filterOption {
case .alphabetical:
return self.filteredResorts.sorted { $0.name < $1.name }
case .country:
return self.filteredResorts.sorted { $0.country < $1.country }
default:
return self.filteredResorts
}
}
var body: some View {
NavigationView {
List(sortedResorts) { resort in
NavigationLink(destination: ResortView(resort: resort)) {
Image(resort.country)
.resizable()
.scaledToFill()
.frame(width: 40, height: 25)
.clipShape(RoundedRectangle(cornerRadius: 5))
.overlay(
RoundedRectangle(cornerRadius: 5)
.stroke(Color.black, lineWidth: 1)
)
VStack(alignment: .leading) {
Text(resort.name)
.font(.headline)
Text("\(resort.runs) runs")
.foregroundColor(.secondary)
}
.layoutPriority(1)
if self.favorites.contains(resort) {
Spacer()
Image(systemName: "heart.fill")
.accessibility(label: Text("This is a favorite resort."))
.foregroundColor(.red)
}
}
}
.navigationBarTitle("Resorts")
.navigationBarItems(trailing: Button(action: {
self.isShowingSettingsSheet = true
}, label: {
Image(systemName: "gear")
.font(.title2)
}))
// this view will be shown after the user runs the app before he select the resort from the list
WelcomeView()
}
.sheet(isPresented: $isShowingSettingsSheet, content: {
SettingsView(filterOption: self.$filterOption, sizeFilter: self.$sizeFilter, priceFilter: self.$priceFilter, countryFilter: self.$countryFilter, countries: self.countries)
})
.environmentObject(favorites)
// // StackNavigationViewStyle will be used for phones
// .phoneOnlyStackNavigationView()
}
}
extension View {
func phoneOnlyStackNavigationView() -> some View {
if UIDevice.current.userInterfaceIdiom == .phone {
return AnyView(self.navigationViewStyle(StackNavigationViewStyle()))
} else {
return AnyView(self)
}
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
<file_sep>//
// ContentView.swift
// Project11_Playground
//
// Created by <NAME> on 21/10/2020.
//
import SwiftUI
struct PushButton: View {
let title: String
// to be able to access isOn property from the different view, I have to change from @State to @Binding
@Binding var isOn: Bool
var onColors = [Color.red, Color.yellow]
var offColors = [Color(white: 0.6), Color(white: 0.4)]
var body: some View {
Button(title) {
self.isOn.toggle()
}
.padding()
.background(LinearGradient(gradient: Gradient(colors: isOn ? onColors : offColors), startPoint: .top, endPoint: .bottom))
.foregroundColor(.white)
.clipShape(Capsule())
.shadow(radius: isOn ? 0 : 5)
}
}
struct ContentView: View {
@State private var rememberMe = false
@Environment(\.horizontalSizeClass) var sizeClass
var body: some View {
// VStack {
// PushButton(title: "Remember Me", isOn: $rememberMe)
// Text(rememberMe ? "On" : "Off")
// }
if sizeClass == .compact {
return AnyView(VStack {
Text("Active size class:")
Text("COMPACT")
}
.font(.largeTitle))
} else {
return AnyView(HStack {
Text("Active size class:")
Text("REGULAR")
}
.font(.largeTitle))
}
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
<file_sep>//
// User.swift
// ChallengeDay60
//
// Created by <NAME> on 23/10/2020.
//
import Foundation
struct JUser: Identifiable, Codable {
let id: UUID
let isActive: Bool
let name: String
let age: Int
let company: String
let email: String
let address: String
let about: String
let registered: Date
let tags: [String]
let friends: [JFriend]
var formattedDate: String {
let formatter = DateFormatter()
formatter.dateStyle = .long
return formatter.string(from: registered)
}
}
<file_sep>//
// DetailContactView.swift
// ConferenceContacts
//
// Created by <NAME> on 10/11/2020.
//
import SwiftUI
import MapKit
struct DetailContactView: View {
let contact: Contact
@State private var anotation = MKPointAnnotation()
@State private var centerCoordinate = CLLocationCoordinate2D()
var body: some View {
VStack {
contact.image
.resizable()
.frame(width: 300, height: 300)
.clipShape(Circle())
.shadow(radius: 2)
.padding(.horizontal)
Text(contact.firstName)
.font(.title)
Text(contact.surname)
.font(.title)
MapView(centerCoordinate: $centerCoordinate, annotation: anotation)
Spacer()
}
.padding()
.navigationBarTitle(Text("Contact details"))
.onAppear(perform: loadAnotation)
}
func loadAnotation() {
self.anotation.coordinate.latitude = contact.latitude
self.anotation.coordinate.longitude = contact.longitude
self.centerCoordinate = CLLocationCoordinate2D(latitude: contact.latitude, longitude: contact.longitude)
print(anotation.coordinate.latitude)
print(anotation.coordinate.longitude)
}
}
struct DetailContactView_Previews: PreviewProvider {
static var previews: some View {
DetailContactView(contact: Contact(id: UUID(), firstName: "Fero", surname: "Dajaky", latitude: 51.0, longitude: 10.0))
}
}
<file_sep>//
// UnitConversionApp.swift
// UnitConversion
//
// Created by <NAME> on 01/10/2020.
//
import SwiftUI
@main
struct UnitConversionApp: App {
var body: some Scene {
WindowGroup {
ContentView()
}
}
}
<file_sep>//
// Project11_PlaygroundApp.swift
// Project11_Playground
//
// Created by <NAME> on 21/10/2020.
//
import SwiftUI
@main
struct Project11_PlaygroundApp: App {
var body: some Scene {
WindowGroup {
ContentView()
}
}
}
<file_sep>//
// SettingsView.swift
// SnowSeeker
//
// Created by <NAME> on 16/12/2020.
//
import SwiftUI
struct SettingsView: View {
@Environment(\.presentationMode) var presentationMode
@Binding var filterOption: Resort.SortingOptions
@Binding var sizeFilter: Int
@Binding var priceFilter: Int
@Binding var countryFilter: String
@State var countries: Set<String>
var body: some View {
NavigationView {
Form {
Section(header: Text("Sort")) {
Picker("", selection: $filterOption) {
ForEach(Resort.SortingOptions.allCases, id:\.self) { option in
Text(option.rawValue)
.tag(option)
}
}
.pickerStyle(SegmentedPickerStyle())
}
Section(header: Text("Filter")) {
Text("Filter by size:")
Picker("", selection: $sizeFilter) {
Text("All").tag(0)
Text("Small").tag(1)
Text("Average").tag(2)
Text("Large").tag(3)
}
.pickerStyle(SegmentedPickerStyle())
Text("Filter by price:")
Picker("", selection: $priceFilter) {
Text("All").tag(0)
Text("$").tag(1)
Text("$$").tag(2)
Text("$$$").tag(3)
}
.pickerStyle(SegmentedPickerStyle())
Picker("Filter by country:", selection: $countryFilter) {
Text("All").tag("")
ForEach(Array(countries), id:\.self) { country in
Text(country)
}
}
}
}
.navigationBarTitle("Settings")
.navigationBarItems(trailing:
Button(action: {
self.presentationMode.wrappedValue.dismiss()
}, label: {
Text("Done")
}))
}
}
}
//struct SettingsView_Previews: PreviewProvider {
// static var previews: some View {
// SettingsView(filterOption: "default")
// }
//}
<file_sep>//
// ContentView.swift
// TrackIt
//
// Created by <NAME> on 15/10/2020.
//
import SwiftUI
struct Activity: Codable, Identifiable {
let id = UUID()
let activityName: String
let activityDescription: String
var activityCounter: Int = 0
static func == (lhs: Activity, rhs: Activity) -> Bool {
return lhs.id == rhs.id
}
}
class AllActivities: ObservableObject {
@Published var activities = [Activity]() {
didSet {
let encoder = JSONEncoder()
if let encoded = try? encoder.encode(activities) {
UserDefaults.standard.set(encoded, forKey: "Activities")
}
}
}
init() {
if let activities = UserDefaults.standard.data(forKey: "Activities") {
let decoder = JSONDecoder()
if let decoded = try? decoder.decode([Activity].self, from: activities){
self.activities = decoded
return
}
}
self.activities = []
}
}
struct ContentView: View {
@ObservedObject var allActivities = AllActivities()
@State var showingAddActivity = false
var body: some View {
NavigationView {
List {
ForEach(allActivities.activities){ activity in
NavigationLink(destination: ShowActivity(allActivities: allActivities, activity: activity)){
HStack{
Text(activity.activityName)
.font(.headline)
Spacer()
Text("\(activity.activityCounter)")
}
}
}
.onDelete(perform: removeActivity)
}
.navigationBarTitle("Track It")
.navigationBarItems(
leading: EditButton(),
trailing: Button(action: {
self.showingAddActivity = true
}) {
Image(systemName: "plus")
}
)
.sheet(isPresented: $showingAddActivity) {
AddActivity(allActivities: allActivities)
}
}
}
func removeActivity(at offsets: IndexSet){
allActivities.activities.remove(atOffsets: offsets)
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
|
2a2da2e5630964de04d660c3f33a996552223a96
|
[
"Swift",
"Markdown"
] | 37
|
Swift
|
M4t2h3w/100DaysWithSwiftUI
|
3d08e7a52a8e12a8de43687a5a45a02825ef8963
|
bca4103d69c9269c1b0430cfb7afa2c8ccdef108
|
refs/heads/master
|
<repo_name>saketkc/NGS-Stuff<file_sep>/bwa_pssm_gatk_old_without_I.sh
fasta="/data1/reference_build37/human_g1k_v37.fasta"
aln="$4_aln.sam"
if [ "$1" == "bowtie2" ]
then
fasta="$bowtie_fasta"
echo "####################################BOWTIE2-ALN-START################################################"
echo "Starting alignment of $2 at `date`"
starttime=`date +%s`
echo `bowtie2 -x /data2/Amit_Dutt_Lab/human_g1k_v37_bowtie2/human_g1k_v37 -1 $2 -2 $3 -S $aln`
endtime=`date +%s`
echo "Ended alignment of $2 at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################BOWTIE2-ALN-END################################################"
fi
if [ "$1" == "bwa" ]
then
extension="${2##*.}"
filename="${2%.*}"
aln_file1="$filename.sai"
echo "####################################BWA-ALN-START################################################"
echo "Starting alignment of $aln_file1 at `date`"
starttime=`date +%s`
echo `bwa-pssm pssm $fasta $2 > $aln_file1`
endtime=`date +%s`
echo "Ended alignment of $aln_file1 at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################BWA-ALN-END################################################"
extension="${3##*.}"
filename="${3%.*}"
aln_file2="$filename.sai"
echo "####################################BWA-ALN-START################################################"
echo "Starting alignment of $3 at `date`"
starttime=`date +%s`
echo `bwa-pssm pssm $fasta $3 > $aln_file2`
endtime=`date +%s`
echo "Ended alignment of $3 at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################BWA-ALN-END################################################"
echo "####################################SAMFILE-GENERATATION-START################################################"
echo "Starting alignment of $3 at `date`"
starttime=`date +%s`
# bwa sampe -r "@rg\tid:@hwusi-eas100r:6:73:941:1973\tpl:illumina\tlb:lib-rdt\tsm:unknown\tpi:200" /data1/amit_dutt_lab/human_g1k_v37.fasta $aln_file1 $aln_file2 $2 $3 > aln.sam
bwa-pssm sampe -r "@RG\tID:@HWUSI-EAS100R:6:73:941:1973\tPL:ILLUMINA\tLB:LIB-RDT\tSM:UNKNOWN\tPI:200" /data1/reference_build37/human_g1k_v37.fasta $aln_file1 $aln_file2 $2 $3 > $aln
endtime=`date +%s`
echo "Ended alignment of $3 at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################SAMFILE-GENERATATION-END################################################"
fi
if [ "$1" == "soap" ]
then
fasta="$soap_fasta"
echo "####################################SOAP-ALN-START################################################"
echo "Starting alignment of $2 and $3 at `date`"
starttime=`date +%s`
echo `soap -a $2 -b $3 -D $fasta -o soap_aln_paired.soap -2 soap_aln_single.soap `
endtime=`date +%s`
echo "Ended alignment of $2 and $3 at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################SOAP-ALN-END################################################"
echo "####################################SAMFILE-GENERATATION-START################################################"
echo "Converting SOAP to SAM at `date`"
starttime=`date +%s`
echo `perl /usr/bin/soap2sam.pl soap_aln_paired.soap > aln.sam`
endtime=`date +%s`
echo "Ended SOAP to SAM conversion at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################SAMFILE-GENERATATION-END################################################"
fi
#echo "####################################ADD-READGROUPS-START################################################"
#echo "Adding READ Groups data at `date`"
#starttime=`date +%s`
#echo `java -jar /home/saket/softwares/picard-tools-1.84/AddOrReplaceReadGroups.jar INPUT=aln.sam RGLB=PAIRED_END RGPL=ILLUMINA RGPU=1 RGSM=ACTREC4 OUTPUT=aln.with.readgroups.sam`
#endtime=`date +%s`
#echo "Ended adding READ Groups at `date`"
#((diff_sec=endtime-starttime))
#echo "###TIME-TAKEN###"
#echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
#echo "###h:m:s###"
#echo "####################################ADD-READGROUPS-END################################################"
echo "####################################FixMateInformation-START################################################"
echo "Fixing Mate Information at `date`"
starttime=`date +%s`
aln_fixed_mates="$4_aln.with.readgroups.fixed.sam"
#echo `java -jar -Xmx20G /usr/bin/picard-tools/AddOrReplaceReadGroups.jar INPUT=aln.sam RGLB=PAIRED_END RGPL=ILLUMINA RGPU=1 RGSM=B2 OUTPUT=aln.with.readgroups.sam`
echo `java -Xmx20G -jar /home/saket/softwares/picard-tools-1.84/FixMateInformation.jar VALIDATION_STRINGENCY=SILENT INPUT=$aln OUTPUT=$aln_fixed_mates`
endtime=`date +%s`
echo "Ending Fix Mate Information at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################FixMateInformation-END################################################"
echo "####################################SAMTOOLS-VALIDATION-START################################################"
echo "Adding READ Groups data at `date`"
starttime=`date +%s`
echo `java -jar /home/saket/softwares/picard-tools-1.84/ValidateSamFile.jar I=$aln_fixed_mates O=validate_sam_file_results.txt`
endtime=`date +%s`
echo "Ended adding READ Groups at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################SAMTOOLS-VALIDATION-END################################################"
echo "####################################SAM-TO-BAM-START################################################"
echo "Converting SAM TO BAM at `date`"
starttime=`date +%s`
aln_fixed_mates_bam="$4_aln.with.readgroups.fixed.bam"
#echo `samtools view -bS aln.with.readgroups.fixed.sam > aln.with.readgroups.bam`
echo `java -Xmx20G -jar /home/saket/softwares/picard-tools-1.84/SamFormatConverter.jar I=$aln_fixed_mates O=$aln_fixed_mates_bam VALIDATION_STRINGENCY=SILENT `
endtime=`date +%s`
echo "Ended SAM TO BAM conversion at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################SAM-TO-BAM-END################################################"
echo "####################################RMDUP-START################################################"
echo "Starting PCR removal at `date`"
starttime=`date +%s`
aln_rmdup_bam="$4_aln.with.readgroups.fixed.rmdup.bam"
echo `samtools rmdup $aln_fixed_mates_bam $aln_rmdup_bam`
endtime=`date +%s`
echo "Ended PCR removal at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################RMDUP-END################################################"
echo "####################################SAM-SORT-START################################################"
echo "Starting PCR removal at `date`"
starttime=`date +%s`
aln_sorted_bam="$4_aln.with.readgroups.fixed.rmdup.sorted"
echo `samtools sort $aln_rmdup_bam $aln_sorted_bam`
endtime=`date +%s`
echo "Ended PCR removal at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################SAM-SORT-END################################################"
echo "####################################SAM-INDEX-START################################################"
echo "Starting indexing at `date`"
starttime=`date +%s`
aln_sorted_bam="$4_aln.with.readgroups.fixed.rmdup.sorted.bam"
echo `samtools index $aln_sorted_bam`
endtime=`date +%s`
echo "Ended indexing at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################SAM-INDEX-END################################################"
echo "####################################SAM-MPILEUP-START################################################"
echo "Starting mpileup at `date`"
starttime=`date +%s`
bcf="$4_samtools_mpileup.raw.bcf"
echo `samtools mpileup -uf $fasta $aln_sorted_bam | bcftools view -bvcg - > $bcf`
endtime=`date +%s`
echo "Ended mpileup at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################SAM-MPILEUP-END################################################"
echo "####################################GATK-INDEL-START################################################"
echo "Starting indelaligner at `date`"
starttime=`date +%s`
vcf="$4_gatk_unified_genotyper.vcf"
log="$4_gatk_unified_genotyper.log"
java -Xmx20G -jar ~/GenomeAnalysisTK-1.6-9-g47df7bb/GenomeAnalysisTK.jar -T UnifiedGenotyper -R /data1/reference_build37/human_g1k_v37.fasta -I $aln_sorted_bam -o $vcf --genotype_likelihoods_model BOTH --annotateNDA -l INFO -log $log
#echo `java -Xmx20G -jar /home/saket/softwares/GenomeAnalysisTK-2.3-9/GenomeAnalysisTK.jar -I aln.with.readgroups.baq.rmdup.sorted.bam -R $fasta -T RealignerTargetCreator -o forIndelRealigner.intervals -et NO_ET -K /home/saket/softwares/saket.kumar_iitb.ac.in.key`
endtime=`date +%s`
echo "Ended IndelRealigner at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################GATK-INDEL-END################################################"
<file_sep>/find_common_reads_from_bam.py
import pysam
def calculate_intersection(file1,file2,file3,file4):
bwa = pysam.Samfile(file1,"rb")
pssm = pysam.Samfile(file2,"rb")
unmapped = pysam.Samfile(file3,"rb")
unmapped_pssm = pysam.Samfile(file4,"rb")
list1 = [x.qname for x in bwa]
list2 = [x.qname for x in pssm]
list3 = [x.qname for x in unmapped]
list4 = [x.qname for x in unmapped_pssm ]
set1 = set(list1)
set2 = set(list2)
set3 = set(list3)
set4 = set(list4)
unmapped_intersect_pssm = set3.intersection(set2)
unmapped_pssm_intersect_bwa = set4.intersection(set1)
unmapped_intersect_unmapped_pssm = set3.intersection(set4)
return str(len(list1))+"\t"+str(len(list2))+"\t"+str(len(list3))+"\t"+str(len(list4))+"\t"+str(len(unmapped_intersect_pssm))+"\t"+str(len(unmapped_pssm_intersect_bwa))+"\t"+str(len(unmapped_intersect_unmapped_pssm))
t755_bwa = "/data2/Cervical_fastq/T755/T755_bwa_sngr_aln.with.readgroups.fixed.rmdup.sorted.bam"
t755_pssm = "/data2/Cervical_fastq/T755/T755_bwapssm_sngr_aln.with.readgroups.fixed.rmdup.sorted.bam"
t755_unmapped = "/data2/Cervical_fastq/unmapped_bams/T755_bwa_sngr_unmapped.bam"
t755_unmapped_pssm = "/data2/Cervical_fastq/T755/T755_bwapssm_sngr_unmapped.bam"
t783_bwa = "/data2/Cervical_fastq/T785/T785_bwa_sngr_aln.with.readgroups.fixed.rmdup.bam"
t783_pssm = "/data2/Cervical_fastq/T785/T785_bwappsm_aln.with.readgroups.fixed.bam"
t783_unmapped ="/data2/Cervical_fastq/unmapped_bams/T785_bwa_sngr_unmapped.bam"
t783_unmapped_pssm = "/data2/Cervical_fastq/T785/T785_bwapssm_sngr_unmapped.bam"
t837_bwa = "/data2/Cervical_fastq/T837/T837_bwa_sngr_aln.with.readgroups.fixed.rmdup.sorted.bam"
t837_pssm = "/data2/Cervical_fastq/T837/T837_bwapssm_sngr_aln.with.readgroups.fixed.rmdup.sorted.bam"
t837_unmapped = "/data2/Cervical_fastq/unmapped_bams/T837_bwa_sngr_unmapped.bam"
t837_unmapped_pssm = "/data2/Cervical_fastq/T837/T837_bwapssm_sngr_unmapped.bam"
t887_bwa = "/data2/Cervical_fastq/T887/T887_bwapssm_sngr_aln.with.readgroups.fixed.rmdup.sorted.bam"
t887_pssm = "/data2/Cervical_fastq/T887/T887_bwapssm_sngr_aln.with.readgroups.fixed.rmdup.sorted.bam"
t887_unmapped = "/data2/Cervical_fastq/unmapped_bams/T887_bwa_sngr_unmapped.bam"
t887_unmapped_pssm = "/data2/Cervical_fastq/T887/T887_bwapssm_sngr_unmapped.bam"
#t937_bwa = "/data2/Cervical_fastq/T937/T937_bwa_aln.with.readgroups.fixed.rmdup.sorted.bam"
#t937_pssm = "/data2/Cervical_fastq/T937/T937_bwa"
bams = [
{"bwa":t755_bwa,"pssm":t755_pssm,"unmapped":t755_unmapped, "unmapped_pssm": t755_unmapped_pssm, "name":"T755"},
{"bwa":t783_bwa,"pssm":t783_pssm,"unmapped":t783_unmapped, "unmapped_pssm": t783_unmapped_pssm, "name": "T783"},
{"bwa":t837_bwa,"pssm":t755_pssm,"unmapped":t837_unmapped, "unmapped_pssm": t837_unmapped_pssm, "name": "T837"},
{"bwa":t887_bwa,"pssm":t887_pssm,"unmapped":t887_unmapped, "unmapped_pssm": t887_unmapped_pssm, "name": "T887"},
]
print "Sample\tBWA\tPSSM\tUnmapped-BWA\tUnmapped-PSSM\tUnmapped-BWA-i-PSSM\tUnmapped-PSSM-i-BWA\tUnmapped-BWA-i-Unmapped-PSSM"
for dicts in bams:
print dicts["name"]+ "\t", calculate_intersection(dicts["bwa"],dicts["pssm"],dicts["unmapped"], dicts["unmapped_pssm"])
<file_sep>/convert_fastq.py
#!/usr/bin/env python
import sys
filename = sys.argv[1]
filetowrite = filename.split(".")[0]+"_sngr"+".fastq"
from Bio import SeqIO
SeqIO.convert(filename,"fastq-illumina",filetowrite,"fastq")
<file_sep>/new_analysis/nucleotide_content.py
#!/usr/bin/env python
import re,sys
from Bio import SeqIO
input_fasta = sys.argv[1]
nucleotides=['A','T','G','C','N']
motifs_count = {i+j:0 for i in nucleotides for j in nucleotides}
for rec in SeqIO.parse(input_fasta,"fasta"):
for motif in motifs_count.keys():
position = 0
regexp = re.compile(motif)
while True:
matches = regexp.search(rec.seq.tostring(), position)
if matches is None:
break
position= matches.start() + 1
motifs_count[motif]+=1
#print motifs_count
all_matrix = [i+j for i in nucleotides for j in nucleotides]
row1 ="A\t"
row2 ="T\t"
row3="G\t"
row4 = "C\t"
row5 = "N\t"
row0 = "\tA\tT\tG\tC\tN"
row_A = [motifs_count['A'+str(i)] for i in nucleotides]
#print row_A
sum_A =sum(row_A)*1.0
row_T = [motifs_count['T'+str(i)] for i in nucleotides]
sum_T =sum(row_T)*1.0
row_G = [motifs_count['G'+str(i)] for i in nucleotides]
sum_G =sum(row_G)*1.0
row_C = [motifs_count['C'+str(i)] for i in nucleotides]
sum_C =sum(row_C)*1.0
row_N = [motifs_count['N'+str(i)] for i in nucleotides]
sum_N =sum(row_N)*1.0
try:
row1 += ("\t").join("%0.3f" % (r/sum_A) for r in row_A)
except ZeroDivisionError:
row1 += ("\t").join("0" for r in row_A)
try:
row2 += ("\t").join("%0.3f" % (r/sum_T) for r in row_T)
except ZeroDivisionError:
row2 += ("\t").join("0" for r in row_T)
try:
row3 += ("\t").join("%0.3f" % (r/sum_G) for r in row_G)
except ZeroDivisionError:
row3 += ("\t").join("0" for r in row_G)
try:
row4 += ("\t").join("%0.3f" % (r/sum_C) for r in row_C)
except ZeroDivisionError:
row4 += ("\t").join("0" for r in row_C)
try:
row5 += ("\t").join("%0.3f" % (r/sum_N) for r in row_N)
except ZeroDivisionError:
row5 += ("\t").join("0" for r in row_N)
print row0
print row1
print row2
print row3
print row4
print row5
<file_sep>/separate_variant_csvs.py
import sys, csv
filepath = sys.argv[1]
reader = csv.reader(open(filepath,"r"),delimiter=",")
writer_synonymous = csv.writer(open())
for row in reader:
<file_sep>/unmapped.py
import subprocess,sys
prefix=sys.argv[2]
filename=sys.argv[1]
command = "bwa aln -b Virus_fasta_reference/Virus_Fasta.fa " + filename +" > " + prefix+".sai"
command1 = "bwa samse Virus_fasta_reference/Virus_Fasta.fa "+prefix+".sai " + filename +" | samtools view - -Sb -o " + prefix+"_all.bam"
command2 = "samtools view "+ prefix+"_all.bam"+ " -b -F 0x04 -o " + prefix+"_unmapped.bam"
command3 = "samtools view "+ prefix+"_all.bam"+ " -b -F 0x04 -o " + prefix+"_mapped.bam"
c1=subprocess.Popen(command,shell=True)
out,err=c1.communicate()
c1=subprocess.Popen(command1,shell=True)
out,err=c1.communicate()
c1=subprocess.Popen(command2,shell=True)
out,err=c1.communicate()
c1=subprocess.Popen(command3,shell=True)
out,err=c1.communicate()
<file_sep>/guess_fastq_platform.py
#!/usr/bin/env python
from Bio import SeqIO
import sys
from itertools import islice
def get_format(seqio):
nnuc = 50000
start = 0
skip = 4
max_quality = 40
min_seen = 128
max_seen = 0
nuc_count = 0
seq_count = 0
possible_encodings = set(('sanger', 'solexa', 'illumina'))
possible_encodings = set(possible_encodings)
sanger_min = 33
solexa_min = 59
illumina_min = 64
solexa_threshold = solexa_min - sanger_min
illumina_threshold = illumina_min - sanger_min
seqio_slice = islice(seqio, start, None, skip + 1)
for record in seqio_slice:
seq_count += 1
qualities = record.letter_annotations["phred_quality"]
min_seen = min(min_seen, min(qualities))
max_seen = max(max_seen, max(qualities))
# Eliminate possibilities
if 'sanger' in possible_encodings and max_seen > max_quality:
possible_encodings.remove('sanger')
if 'solexa' in possible_encodings and min_seen < solexa_threshold:
return 'sanger'
if 'illumina' in possible_encodings and min_seen < illumina_threshold:
possible_encodings.remove('illumina')
# Check if we finished early
if len(possible_encodings) == 1:
return possible_encodings.pop()
elif len(possible_encodings) == 0:
raise ValueError("Could not identify FASTQ file %s: eliminated all possible encodings." % (filename,))
if nnuc:
nuc_count += len(record)
if nuc_count >nnuc:
break
# If no Illumina-encoded quality less than zero has been seen,
# then eliminate solexa and return illumina.
if min_seen >= illumina_threshold:
return 'illumina'
else:
return 'solexa'
possible_encodings = set(("solexa", "sanger", "illumina"))
filename = sys.argv[1]
seqio = SeqIO.parse(filename, "fastq-sanger")
print get_format(seqio)
<file_sep>/new_analysis/gzip_everything.sh
## A script to gzip
## files in a given folder on maximum possible cores with maximum compression
## Run as :
## user@laptop$ bash gzip_everything.sh foldername
# find -print0 / xargs -0 protects you from whitespace in filenames
# xargs -n 1 means one gzip process per file
# xargs -P specifies the number of jobs
# gzip -9 means maximum compression
#source : http://stackoverflow.com/questions/4341442/gzip-with-all-cores
CORES=$(grep -c '^processor' /proc/cpuinfo)
find $1 -type f -print0 | xargs -0 -n 1 -P $CORES gzip -9
<file_sep>/README.md
NGS-Stuff
=========
My Experiments with python[BioPython] for NGS Analysis
<file_sep>/convert.sh
folders=`ls -d */`
for folder in $folders; do
path="/data2/Mulherkar_Lab_Data/$folder"
cd $path
fastqs=`ls T*`
for fastq in $fastqs; do
python /usr/bin/convert_fastq.py $fastq
done
done
<file_sep>/new_analysis/combine-fasta.py
import os
import glob
dirs=[x[0] for x in os.walk("/data1/Virus_NCBI/")]
all_files=[]
for directory in dirs:
os.chdir(directory)
for files in glob.glob("*.fna"):
all_files.append(directory+'/'+files)
with open("/data1/Virus_NCBI/master_virus.fa","w") as outfile:
for fname in all_files:
with open(fname) as infile:
outfile.write(infile.read())
#print all_files
<file_sep>/unmapped.sh
unmapped="$2.unmapped.bam"
sai="$2.virus.sai"
all_virus="$2.virus.bam"
unmapped_virus="$2.virus.unmappped.bam"
mapped_virus="$2.virus.mapped.bam"
samtools view -f 0x04 -h -b $1 -o $unmapped
bwa aln -b $3 $unmapped > $sai
bwa samse -r "@RG\tID:$2\tPL:ILLUMINA" $3 $sai $unmapped | samtools view - -Sb -o $all_virus; samtools view $all_virus -b -F 0x04 -o $mapped_virus; samtools view $all_virus -b -f 0x04 -o $unmapped_virus;
echo "MAPPED READS"
samtools view -c $mapped_virus
samtools view -c $unmapped_virus
<file_sep>/sam_to_fastq_or_fasta.py
import os
import sys
import pysam
from Bio import SeqIO, Seq, SeqRecord
def convert_to_fasta(in_file):
out_file = "%s.fa" % os.path.splitext(in_file)[0]
with open(out_file, "w") as out_handle:
SeqIO.write(bam_to_fasta(in_file), out_handle, "fasta")
def bam_to_fasta(in_file):
bam_file = pysam.Samfile(in_file, "rb")
for read in bam_file:
seq = Seq.Seq(read.seq)
if read.is_reverse:
seq = seq.reverse_complement()
rec = SeqRecord.SeqRecord(seq, read.qname, "", "")
yield rec
if __name__ == "__main__":
convert_to_fasta(*sys.argv[1:])
<file_sep>/new_analysis/get_virus_fasta_from_ebi.py
#!/usr/bin/env python
import urllib2
import re
txt_ftp_location = "ftp://ftp.ncbi.nlm.nih.gov/genomes/GENOME_REPORTS/viruses.txt"
handle = open("papillomavirus.txt","r")
response = handle.read()
handle.close()
project_ids = [line for line in response.splitlines()]
outputpath="/home/saket/my-softwares/NGS-Stuff/new_analysis/fastas/"
#project_ids=["X74475"]
for id in project_ids:
try:
handle = urllib2.urlopen("http://www.ebi.ac.uk/ena/data/view/"+str(id)+"&display=fasta")
# print handle.geturl()
record = handle.read()
# print type(record)
# handle.close()
# print record
if record=="":
print record
print "Something not ok with : "+str(id)
else:
fasta=open(outputpath+str(id)+".fa","w")
fasta.write(record)
fasta.close()
except:
print "SOMETHING not ok with id : " + str(id)
<file_sep>/comparefiles.py
import sys,csv
reader1 = csv.reader(open(sys.argv[1]),delimiter=",")
reader2 = csv.reader(open(sys.argv[2]),delimiter=",")
row_count1 = sum(1 for row in reader1 )
row_count2= sum(1 for row in reader1 )
for row1 in reader1:
for row2 in reader2:
if row1==row2
<file_sep>/new_analysis/all_pssms.sh
files=`find fastas/ -type f -name "*.fa"`
path="/home/saket/my-softwares/NGS-Stuff/new_analysis"
for file in $files ; do
input="$path/$file"
output="$path/$file.pssm"
python nucleotide_content.py $input > $output
done
<file_sep>/ngs_pipeline.py
import argparse
from Bio.Sequencing.Applications import BwaCommandline
parser = argparse.ArgumentParser()
parser.add_argument("-r1", "--read1", help="read1.fastq path", required=True)
parser.add_argument("--read2", help="read2.fastq path", required=True)
parser.add_argument("--reference",help="Absolute path to reference genome")
parser.parse_args()
<file_sep>/cleanup.sh
cd $1
rm -rf *.bcf
rm -rf *.bam
rm -rf *.bai
rm -rf *.txt
rm -rf *.sai
rm -rf *.sam
rm -rf *.log
rm -rf *.sam
<file_sep>/get_bacteria_fasta_from_ncbi.py
import urllib2
from Bio import Entrez,SeqIO
import csv
txt_ftp_location = "ftp://ftp.ncbi.nlm.nih.gov/genomes/GENOME_REPORTS/prok_reference_genomes.txt"
#txt_ftp_location = "ftp://ftp.ncbi.nlm.nih.gov/genomes/GENOME_REPORTS/viruses.txt"
handle = urllib2.urlopen(txt_ftp_location)
response = handle.read()
reader = csv.reader(response.splitlines(), delimiter='\t')
project_ids =[]
outputpath="/data2/Bacteria_Fasta/"
for row in reader:
project_ids.append(row[3].strip())
Entrez.email = "<EMAIL>"
for id in project_ids:
handle = Entrez.elink(dbfrom="bioproject", id=id, linkname="bioproject_nuccore")
record = Entrez.read(handle)
handle.close()
print record
"""if record[0]["LinkSetDb"]:
id_list = record[0]["LinkSetDb"][0]["Link"]
nuccore_ids = []
for link in id_list:
nuccore_ids.append(link['Id'])
handle = Entrez.efetch(db="nuccore", id=nuccore_ids, rettype="fasta", retmode="text")
records = list(SeqIO.parse(handle, "fasta"))
handle.close()
# Save them all in one fasta file
all_ids = (".").join(nuccore_ids)
SeqIO.write(records, outputpath+"bioprojectId:"+str(id) +"-len:"+ str(len(records)) + "-nuccore-ids:" +all_ids +"-sequences.fasta", "fasta")
"""
<file_sep>/novelvaraints.py
import csv,sys
filepath = sys.argv[1]
novelvariants_filepath = filepath.split(".")[0]+"_novel_variants.csv"
reader= csv.reader(open(filepath,"r"),delimiter=",")
writer = csv.writer(open(novelvariants_filepath,"w"))
for row in reader:
if row[8]=="":
writer.writerow(row)
#writer.close()
<file_sep>/new_analysis/cogent-analysis.py
#!/usr/bin/env python
from cogent import LoadSeqs, DNA
from cogent.core.usage import DinucUsage
import sys
input_fasta = "human.fasta"#sys.argv[1]
nucleotides = ['A','G','C','U']
fasta = LoadSeqs(input_fasta, moltype=DNA,aligned=False,format='fasta')
print fasta[0]
#for rec in fasta:
# print rec
#du = DinucUsage(y_pseudo_seq, Overlapping=True)
<file_sep>/bwa_old.sh
fasta="/data1/reference_build37/human_g1k_v37.fasta"
aln="$3_aln.sam"
extension="${1##*.}"
filename="${1%.*}"
aln_file1="$filename.sai"
echo "####################################BWA-ALN-START################################################"
echo "Starting alignment of $aln_file1 at `date`"
starttime=`date +%s`
echo `bwa aln -t 16 $fasta $1 > $aln_file1`
endtime=`date +%s`
echo "Ended alignment of $aln_file1 at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################BWA-ALN-END################################################"
extension="${2##*.}"
filename="${2%.*}"
aln_file2="$filename.sai"
echo "####################################BWA-ALN-START################################################"
echo "Starting alignment of $2 at `date`"
starttime=`date +%s`
echo `bwa aln -t 16 $fasta $2 > $aln_file2`
endtime=`date +%s`
echo "Ended alignment of $2 at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################BWA-ALN-END################################################"
echo "####################################SAMFILE-GENERATATION-START################################################"
echo "Starting alignment of $3 at `date`"
starttime=`date +%s`
# bwa sampe -r "@rg\tid:@hwusi-eas100r:6:73:941:1973\tpl:illumina\tlb:lib-rdt\tsm:unknown\tpi:200" /data1/amit_dutt_lab/human_g1k_v37.fasta $aln_file1 $aln_file2 $2 $3 > aln.sam
bwa sampe -r "@RG\tID:@HWUSI-EAS100R:6:73:941:1973\tPL:ILLUMINA\tLB:LIB-RDT\tSM:$3\tPI:200" /data1/reference_build37/human_g1k_v37.fasta $aln_file1 $aln_file2 $1 $2 > $aln
endtime=`date +%s`
echo "Ended alignment of $3 at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################SAMFILE-GENERATATION-END################################################"
echo "####################################FixMateInformation-START################################################"
echo "Fixing Mate Information at `date`"
starttime=`date +%s`
aln_fixed_mates="$3_aln.with.readgroups.fixed.sam"
#echo `java -jar -Xmx20G /usr/bin/picard-tools/AddOrReplaceReadGroups.jar INPUT=aln.sam RGLB=PAIRED_END RGPL=ILLUMINA RGPU=1 RGSM=B2 OUTPUT=aln.with.readgroups.sam`
echo `java -Xmx20G -jar /home/saket/softwares/picard-tools-1.84/FixMateInformation.jar VALIDATION_STRINGENCY=SILENT INPUT=$aln OUTPUT=$aln_fixed_mates`
endtime=`date +%s`
echo "Ending Fix Mate Information at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################FixMateInformation-END################################################"
echo "####################################SAMTOOLS-VALIDATION-START################################################"
echo "Adding READ Groups data at `date`"
starttime=`date +%s`
echo `java -jar /home/saket/softwares/picard-tools-1.84/ValidateSamFile.jar I=$aln_fixed_mates O=validate_sam_file_results.txt`
endtime=`date +%s`
echo "Ended adding READ Groups at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################SAMTOOLS-VALIDATION-END################################################"
echo "####################################SAM-TO-BAM-START################################################"
echo "Converting SAM TO BAM at `date`"
starttime=`date +%s`
aln_fixed_mates_bam="$3_aln.with.readgroups.fixed.bam"
#echo `samtools view -bS aln.with.readgroups.fixed.sam > aln.with.readgroups.bam`
echo `java -Xmx20G -jar /home/saket/softwares/picard-tools-1.84/SamFormatConverter.jar I=$aln_fixed_mates O=$aln_fixed_mates_bam VALIDATION_STRINGENCY=SILENT `
endtime=`date +%s`
echo "Ended SAM TO BAM conversion at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################SAM-TO-BAM-END################################################"
echo "####################################RMDUP-START################################################"
echo "Starting PCR removal at `date`"
starttime=`date +%s`
aln_rmdup_bam="$3_aln.with.readgroups.fixed.rmdup.bam"
echo `samtools rmdup $aln_fixed_mates_bam $aln_rmdup_bam`
endtime=`date +%s`
echo "Ended PCR removal at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################RMDUP-END################################################"
echo "####################################SAM-SORT-START################################################"
echo "Starting PCR removal at `date`"
starttime=`date +%s`
aln_sorted_bam="$3_aln.with.readgroups.fixed.rmdup.sorted"
echo `samtools sort -@ 16 -m 800M $aln_rmdup_bam $aln_sorted_bam`
endtime=`date +%s`
echo "Ended PCR removal at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################SAM-SORT-END################################################"
echo "####################################SAM-INDEX-START################################################"
echo "Starting indexing at `date`"
starttime=`date +%s`
aln_sorted_bam="$3_aln.with.readgroups.fixed.rmdup.sorted.bam"
echo `samtools index $aln_sorted_bam`
endtime=`date +%s`
echo "Ended indexing at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################SAM-INDEX-END################################################"
echo "####################################SAM-MPILEUP-START################################################"
echo "Starting mpileup at `date`"
starttime=`date +%s`
bcf="$3_samtools_mpileup.raw.bcf"
echo `samtools mpileup -uf $fasta $aln_sorted_bam | bcftools view -bvcg - > $bcf`
endtime=`date +%s`
echo "Ended mpileup at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################SAM-MPILEUP-END################################################"
echo "####################################UNMAPPED_READS-EXTRACT-START################################################"
echo "Starting extraction at `date`"
starttime=`date +%s`
unmapped="$3_unmapped_bam.bam"
echo `samtools view -f 0x04 -h -b $aln_sorted_bam -o $unmapped`
endtime=`date +%s`
echo "Ended extraction at `date`"
((diff_sec=endtime-starttime))
echo "###TIME-TAKEN###"
echo - | awk '{printf "%d:%d:%d","'"$diff_sec"'"/(60*60),"'"$diff_sec"'"%(60*60)/60,"'"$diff_sec"'"%60}'
echo "###h:m:s###"
echo "####################################UNMAPPED_READS-EXTRACT-END################################################"
virus_sai="$1.virus.sai";
virus_unmapped="$1.virus.unmapped.bam"
virus_mapped="$1.virus.mapped.bam"
bwa aln -b /data2/Virus_Fasta/Virus_Fasta.fa $unmapped > $virus_sai;
bwa samse /data2/Virus_Fasta/Virus_Fasta.fa $virus_sai $unmapped | samtools view - -Sb -f 0x04 -o $virus_unmapped | samtools view - -Sb -F 0x04 -o $virus_mapped;
<file_sep>/simulate_reads.py
import os,shutil
import subprocess
fastq1 = sys.argv[1]
fastq2 = sys.argv[2]
name = sys.argv[3]
simulated1 = fastq1+".simulated"
simulated2 = fastq2+".simulated"
def simulate_reads(substitutionrate=0.001,insertsize=250,insertsd=60,simulate-minindellen=0,simulate_maxindellen=0,simulate_duplications,simulate_numsubstitutions):
command = "stampy -g /data1/reference_index/human_g1k_v37 -h /data1/reference_index/human_g1k_v37 -S --inde "+ fastq1+","+fastq2
stdout=open("stampystdout.log","wb")
stderr=open("stampystderr.log","wb")
proc = subprocess.Popen( args=command, shell=True, stderr=stderr.fileno(),stdout=stdout.fileno() )
returncode = proc.wait()
stdout.close()
stderr.close()
<file_sep>/new_analysis/create_PSSM.py
#!/usr/bin/env python
import sys, re
from Bio import SeqIO
import matplotlib.pyplot as plt
class GetPSSM:
def __init__( self, filelocation, fastq_format ):
self.filelocation = filelocation
self.fastq_format = fastq_format
self.is_casava = False
self.nucleotides=['A','G','T','C','N']
self.per_base_count = {}
self.read_wise_motif_count={}
self.read_wise_motif_count = {}
self.records = None
self.casava_regexp = re.compile('@.* [^:]*:N:[^:]*:')
self.fails_casava = re.compile('@.* [^:]*:Y:[^:]*:')
assert fastq_format in ['fastq', 'fastq-sanger', 'fastq-illumina', 'fastq-solexa']
def run_index( self ):
self.fastq_index = SeqIO.index(self.filelocation, self.fastq_format)
#self.set_casava()
#self.filter_casava()
self.records = [self.fastq_index[index] for index in self.fastq_index.keys()]
print len(self.records)
return self.records
def guess_read_length(self):
try:
self.read_lengths = [len(rec.seq) for rec in self.records]
self.read_length = max(self.read_lengths)
self.total_bases = sum(self.read_lengths)
return self.read_length
except:
raise "10 reads should be present"
def read_in_memory( self ):
self.records = [rec for rec in SeqIO.parse(self.filelocation, self.fastq_format)]
return self.records
def create_motif_regex(self, motif):
return re.compile(motif)
def get_read_wise_motif_content( self, per_base_content=True):
self.per_position_base_content = {i:{n:0 for n in self.nucleotides} for i in range(0,self.read_length)}
for i,rec in enumerate(self.records):
print i
for pos,nuc in enumerate(rec.seq.tostring()):
self.per_position_base_content[pos][nuc]+=1
if per_base_content:
base_count = {n:rec.seq.tostring().count(n) for n in self.nucleotides}
self.per_base_count[i] = base_count
motifs_count = {i+j:0 for i in self.nucleotides for j in self.nucleotides }
for motif in motifs_count.keys():
position=0
regexp = self.create_motif_regex(motif)
while True:
matches = regexp.search(rec.seq.tostring(), position)
if matches is None:
break
position= matches.start() + 1
motifs_count[motif]+=1
self.read_wise_motif_count[i]= motifs_count
return self.read_wise_motif_count
def create_cpg_matrix(self ):
motifs_count = {i+j:0 for i in self.nucleotides for j in self.nucleotides }
for readnumber in self.read_wise_motif_count.keys():
for motif in motifs_count.keys():
motifs_count[motif]+=self.read_wise_motif_count[readnumber][motif]
all_matrix = [i+j for i in self.nucleotides for j in self.nucleotides]
row1 ="A\t"
row2 ="T\t"
row3="G\t"
row4 = "C\t"
row5 = "N\t"
row0 = "\tA\tT\tG\tC\tN"
row_A = [motifs_count['A'+str(i)] for i in self.nucleotides]
sum_A =sum(row_A)*1.0
row_T = [motifs_count['T'+str(i)] for i in self.nucleotides]
sum_T =sum(row_T)*1.0
row_G = [motifs_count['G'+str(i)] for i in self.nucleotides]
sum_G =sum(row_G)*1.0
row_C = [motifs_count['C'+str(i)] for i in self.nucleotides]
sum_C =sum(row_C)*1.0
row_N = [motifs_count['N'+str(i)] for i in self.nucleotides]
sum_N =sum(row_N)*1.0
row1 += ("\t").join("%0.3f" % (r/sum_A) for r in row_A)
row2 += ("\t").join("%0.3f" % (r/sum_T) for r in row_T)
row3 += ("\t").join("%0.3f" % (r/sum_G) for r in row_G)
row4 += ("\t").join("%0.3f" % (r/sum_C) for r in row_C)
row5 += ("\t").join(str(r/sum_N) for r in row_N)
print row0
print row1
print row2
print row3
print row4
print row5
return True
def set_casava( self) :
key = self.fastq_index.keys()[1]
#print key
#print self.fastq_index[key]
if self.casava_regexp.match(key):
print "CASAVA"
self.is_casava=True
return True
def filter_casava( self ):
if self.is_casava:
for key in self.fastq_index:
print key
if self.fails_casava.match(key):
print key
return True
def get_sequences( self ):
for key in self.fastq_index:
print self.fastq_index[key]
if __name__=="__main__":
get_pssm = GetPSSM(sys.argv[1], "fastq")
#get_pssm.read_in_memory()
get_pssm.run_index()
get_pssm.guess_read_length()
print "READ \length done"
get_pssm.get_read_wise_motif_content()
read_wise_motif_count = get_pssm.read_wise_motif_count
get_pssm.create_cpg_matrix()
"""
filter motif in get
print get_pssm.per_base_count
base_content = get_pssm.per_position_base_content
for i in base_content.keys():
for j in base_content[i].keys():
if base_content[i][j] !=0:
print i,j,base_content[i][j]
#get_pssm.run_index()
# for keys in get_pssm.run_index():
# print keys
"""
|
b7f9edc41059092c45d6478aff895be8b6b4391c
|
[
"Markdown",
"Python",
"Shell"
] | 24
|
Shell
|
saketkc/NGS-Stuff
|
90c7c67fbac5a5dc534d4c1f40ab2374f0fd271e
|
376927426ca23fae17a42edd290b733ce6d85b7c
|
refs/heads/master
|
<file_sep><?php
function start_session($session_name) {
if (session_status() == PHP_SESSION_NONE) {
session_name(sha1($session_name));
session_start();
}
}
function getCategories(){
$sql = "SELECT * FROM navigation";
$result = mysql_query($sql);
$categories = "";
if ($result && mysql_num_rows($result) > 0) {
while ($cate = mysql_fetch_assoc($result)) {
$categories .= "<h3><a href='?category=" . $cate['navid'] . "'>" . "<div class='f_category'>" . $cate['category'] . "</div></a></h3>";
$categories .= "<p>" . $cate['desc'] . "</p>";
$categories .= "<span class='post_count'>" . $cate['post_count'] . " Posts" . "</span><br><Br>";
}
} return $categories;
}
function printPosts($category){
$sql = "SELECT * FROM forum_posts WHERE cat_id = $category";
$result = mysql_query($sql);
$content = "";
if ($result && mysql_num_rows($result) > 0) {
while ($post = mysql_fetch_assoc($result)) {
$authorid = $post['authorid'];
$getusername = mysql_query("SELECT * FROM users WHERE id = $authorid");
while ($author = mysql_fetch_assoc($getusername)) {
$user_name = $author['fname'] . " " . $author['lname'];
}
$content .= "<h3><div class='post_title'>" . $post['title'] . "</div></h3>";
$content .= "<p><div class='post_details'>" . "By: {$user_name} ON-" . date("Y/m/d : H-i-s",$post['unix']) . "</div></p>";
}
}
return $content;
}
function setForumPost($category,$uid,$post_content,$title){
$unix = time();
$sql = "INSERT INTO forum_posts VALUES('','$category',$uid,'$unix','$post_content','$title')";
$result = mysql_query($sql);
if ($result && mysql_affected_rows() > 0) {
header("location: ?category={$category}");
}
}
function setCat($name,$title,$category,$desc){
$sql = "INSERT INTO navigation VALUES('','$title','$name','$category','$desc','0')";
$result = mysql_query($sql);
if ($result && mysql_affected_rows() > 0) {
return "Category saved!";
}
}
function printNav(){
$sql = "SELECT * FROM navigation";
$result = mysql_query($sql);
$navitem = "<ul>";
if ($result && mysql_num_rows($result) > 0) {
while ($nav = mysql_fetch_assoc($result)) {
$navitem .= "<li><a href='pages/page.php?page=" . $nav['name'] . "'>" . $nav['name'] . "</a></li>";
}
}
return $navitem .= "</ul>";
}
function checkifAdmin($id) {
$sql = "SELECT role FROM users WHERE id = $id";
$result = mysql_query($sql);
$check = "";
if ($result) {
while ($row = mysql_fetch_assoc($result)) {
$check .= $row['role'];
}
}
if ($check == 5) {
return true;
} elseif ($check == 1) {
return false;
}
}
function authenticate($uid) {
if (!empty($_SESSION['user_agent']) && $_SESSION['user_agent'] == $_SERVER['HTTP_USER_AGENT']) {
return !empty($_SESSION[$uid]) ? true : false;
}
return false;
}
function sendStatus($id, $fname, $lname, $status) {
$sql = "INSERT INTO status_updates VALUES('$id','$fname','$lname','$status','')";
$result = mysql_query($sql);
if (mysql_affected_rows() > 0) {
return true;
}
return false;
}
function getBlogStats($blogid){
$sql = "SELECT * FROM comments WHERE blogid = $blogid";
$result = mysql_query($sql);
if ($result && mysql_num_rows($result) > 0) {
return mysql_num_rows($result);
}
}
/*
function printStatus($id){
$checkfriends = "SELECT friendwith FROM friendlist WHERE uid = $id";
$result = mysql_query($checkfriends);
//$row = mysql_fetch_row($checkfriends);
$friend = array();
$sql = "";
$friendw = 'friendwith';
while ($data = mysql_fetch_assoc($result)) {
$friend[] = $data;
//$sql .= "SELECT * FROM status_updates WHERE uid = $id ORDER BY status_id DESC";
$sql .= "SELECT * FROM status_updates WHERE uid in($id) ORDER BY status_id DESC";
echo 123;
}
echo $friend[1]['friendwith'];
echo "<pre>";
print_r($friend);
echo "</pre>";
// $sql = "SELECT * FROM status_updates WHERE uid in ('$id', '$checkfriends') ORDER BY status_id DESC";
$resultfrineds = mysql_query($sql);
$result3 = mysql_fetch_assoc($resultfrineds);
$status = '';
if (mysql_num_rows($result) > 0) {
while ($data = $result3) {
$status .= '<h3>' . $data['fname'] . " " . $data['lname'] . "</h3>";
$status .= "<p>" . $data['status'] . "</p>";
$status .= "<br>";
}
return $status;
} else {
return 1;
}
}
*/
function printStatus($id, $user_profile) {
$content = "";
$sql = "SELECT * FROM status_updates WHERE uid = $user_profile ORDER BY status_id DESC";
$result = mysql_query($sql);
if ($result) {
while ($data = mysql_fetch_assoc($result)) {
$content .= "<div class='status'>";
$content .= '<img class="myImage" width="50px" height="50px" src="../../assets/images/user_images/' . getUserImage($data['uid'], 'MAI') . '"/>';
$content .= "<h3 class='status_name'>" . $data['fname'] . " " . $data['lname'] . "</h3>";
$content .= "<p class='update'>" . nl2br($data['status']) . "</p>";
$content .= "</div>";
//$content .= mysql_num_rows($result);
}
return $content;
} else {
}
}
function valueOfInput($field) {
if (!empty($_POST[$field])) {
return $_POST[$field];
} elseif (!empty($_POST[$field])) {
return $_POST[$field];
}
return false;
}
function submitBPost($title, $heading, $content) {
$sql = "INSERT INTO pages VALUES('','$title','$heading','$content')";
$result = mysql_query($sql);
if ($result && mysql_affected_rows() > 0) {
return "Post submited";
} else {
return "Error submiting post / 'pages' table was not found";
}
}
function getRegisteredUser() {
$sql = "SELECT * FROM users";
$result = mysql_query($sql);
$pruser = "";
$pruser .= "<table class='cptable'>";
$pruser .= "<tr>
<td>ID</td>
<td>FIRST NAME</td>
<td>LAST NAME</td>
<td>EMAIL</td>
<td>BIRTH DATE</td>
<td>ROLE</td>
<td colspan='2'>OPTIONS</td>
</tr>
";
if ($result) {
if (mysql_num_rows($result) > 0) {
while ($user = mysql_fetch_assoc($result)) {
$pruser .= "<tr><td>{$user['id']}</td>
<td>{$user['fname']}</td>
<td>{$user['lname']}</td>
<td>{$user['email']}</td>
<td>{$user['birth']}</td>
<td>{$user['role']}</td>
<td><a href='?cp=cpusers&delete=" . "{$user['id']}" . "'>DELETE</a></td>
<td><a href='?cp=cpusers&edit=" . "{$user['id']}" . "'>EDIT</a></td>
<tr>";
}
}
}
$pruser .= "</table>";
return $pruser;
}
function cpDeleteUser($userid){
$sql = "DELETE FROM users WHERE id = $userid";
$result = mysql_query($sql);
if (mysql_affected_rows() > 0) {
echo "User has been removed";
}
}
function editPost($id,$title,$heading,$content){
$sql = mysql_query("UPDATE pages SET title = '$title', heading = '$heading', content = '$content' WHERE id = $id");
echo "Post id: -" . $id . "- updated";
}
function editUser($id,$fname,$lname,$email,$birth,$role){
$sql = mysql_query("UPDATE users SET fname = '$fname', lname = '$lname', email = '$email', birth = '$birth', role='$role' WHERE id = $id");
echo "User id: -" . $id . "- updated";
}
function debug() {
echo "<pre>";
print_r($_SESSION);
print_r($_POST);
echo "</pre>";
}
function printBlog(){
$sql = "SELECT * FROM pages ORDER BY id DESC";
$result = mysql_query($sql);
$blog_link = "";
if ($result && mysql_num_rows($result) > 0) {
while($data = mysql_fetch_assoc($result)){
//
$sqlcomment = mysql_query("SELECT * FROM comments WHERE blogid = {$data['id']}");
$countcomments = mysql_num_rows($sqlcomment);
//
$blog_link .= "<div class='index_blog_post'>";
$blog_link .= "<h3 class='blog_heading'><a href='pages/blog.php?id=" . $data['id'] . "'>" . $data['heading'] . "</a></h3><br>";
$blog_link .= "<p class='blog_desc'><i>" . substr($data['content'], 0,250) . "..." . "</i></p><br>";
$blog_link .= "<a href='pages/blog.php?id=" . $data['id'] . "#comments'><img class='commentcount' src='assets/images/commentcount.png'> " . "<div class='comment_count'>" . $countcomments . " Comments</a></div>";
$blog_link .= "</div>";
}
}
return $blog_link;
}
function submitComment($uid,$fname,$lname,$content,$inter,$blogid){
$sql = "INSERT INTO comments VALUES('','$uid','$fname','$lname','$content','$inter','$blogid')";
$result = mysql_query($sql);
if ($result && mysql_affected_rows() > 0) {
return "Comment submitted";
}
}
function printComments($blogid){
$sql = "SELECT * FROM comments WHERE blogid = $blogid";
$result = mysql_query($sql);
$comment = "";
if ($result && mysql_num_rows($result) > 0) {
while ($data = mysql_fetch_assoc($result)) {
$comment .= "<div class='user_comment'>";
$comment .= "<b>" . $data['fname'] . " " . $data['lname'] . " Says..<Br></b>";
$comment .= "<p class='comment_content'>" . nl2br($data['comment']) . "</p>";
if ($data['inter'] == 'yes') {
$comment .= "<div class='inter'><img class='inter_img' src='../assets/images/thumbsup.png'> | Interesting</div>";
} else {
$comment .= "<div class='inter'><img class='inter_img' src='../assets/images/thumbsdown.png'> | Not interesting</div>";
}
$comment .= "</div>";
}
}else {
$comment = "Be the first to comment on this blog post";
}
return $comment;
}
function blogPage($blogid){
$sql = "SELECT * FROM pages WHERE id = $blogid";
$result = mysql_query($sql);
$blog_link = "";
if ($result && mysql_num_rows($result) > 0) {
while($data = mysql_fetch_assoc($result)){
$blog_link .= "<h1 class='blog_header'>" . $data['heading'] . "</h1>";
if (getBlogStats($blogid) > 0) {
$blog_link .= "<p class='comment_stats'><a href='#comments'>" . getBlogStats($blogid) . " Comments" . "</a></p>";
} else {
$blog_link .= "<p class='comment_stats'><a href='#comments'>0 Comments</a></p>";
}
$blog_link .= "<p class='blog_content'>" . nl2br(str_replace(' ', "<br><br>", $data['content'])) . "</p><br>";
}
}
return $blog_link;
}
function getToken() {
$token = sha1(uniqid(rand(), TRUE));
$_SESSION['token'] = $token;
//echo "Token Generated";
return $token;
}
function clean_input($method, $input, $option) {
$clean_input = false;
$input = filter_input($method, $input, $option);
$input = trim($input);
if ($input) {
$clean_input = mysql_real_escape_string($input);
}
return $clean_input;
}
function getCpContent($index) {
include ('../cp/' . $index . '.php');
}
function cpGetPage() {
}
function cpDeletePage($pageid) {
$sql = "DELETE FROM pages WHERE id = $pageid";
$result = mysql_query($sql);
if (mysql_affected_rows() > 0) {
echo "Page has been removed";
}
}
function clean_input_var($input, $method, $option) {
$clean_input = false;
$input = filter_var($input, $method, $option);
$input = trim($input);
if ($input) {
$clean_input = mysql_real_escape_string($input);
}
return $clean_input;
}
function emailMatch($email_input) {
$emailsql = "SELECT * FROM users WHERE email = '$email_input'";
$result = mysql_query($emailsql);
if ($result && mysql_num_rows($result) > 0) {
return true;
}
return false;
}
function setUserLogin($cred) {
$_SESSION['user_agent'] = $_SERVER['HTTP_USER_AGENT'];
foreach ($cred as $key => $value) {
$_SESSION[$key] = $value;
}
}
function sendMSG($id, $fname, $lname, $user_profile, $msg, $date) {
$sql = "INSERT INTO private_msg VALUES($id,'$fname','$lname',$user_profile,'$msg','$date')";
$result = mysql_query($sql);
if (mysql_affected_rows() > 0) {
//echo 123;
} else {
echo "error sending msg";
}
}
/*
function bumpBlock($id, $button) {
$bump = "";
$sql = mysql_query("SELECT lastbump FROM placement WHERE uid = $id");
//$num = 0;
if ($sql) {
while ($int = mysql_fetch_assoc($sql)) {
$num = $int['lastbump'];
}
}
$findunix = mysql_query("SELECT lastbump FROM placement WHERE uid = $id");
if ($findunix) {
while ($row = mysql_fetch_assoc($findunix)) {
$row['lastbump'];
}
}
//echo $checkifpassed = $row['lastbump'] + (60*60*3);
//die;
if ($num == 0) {
$unixtimenow = time();
$sql = "UPDATE placement SET lastbump = $unixtimenow WHERE uid = $id";
$result = mysql_query($sql);
if (mysql_affected_rows() > 0) {
echo 123;
return "You have been bumped!";
}
}
//echo time();
//echo "<br>";
$timepasscheck = $num + (5);
$unixtimenow = time();
if ($unixtimenow > $timepasscheck) {
define('ALLOW_BUMP', 'You can now bump!');
}
if (isset($button)) {
if ($unixtimenow > $timepasscheck) {
$sql = "UPDATE placement SET lastbump = $unixtimenow WHERE uid = $id";
$result = mysql_query($sql);
if (mysql_affected_rows() > 0) {
return "You have been bumped!<br>You can bump again in 3 HOURS.";
}
} else {
define('TIME_LEFT', 'Its been ' . str_replace("", "", date('h', time() - $num) - 1) . " hours and " . date('i', time() - $num) . ' minutes since you last bumped.<br>You can bump every 3 HOURS<br>');
//echo date("h-i-s" ,$num) . "<br>";
return "Not enough time has passed!";
}
}
}*/
function submitRev($id, $title, $content, $stars, $revid, $date) {
$checkifexists = mysql_query("SELECT * FROM reviews WHERE uid = $id AND revid = $revid");
if (mysql_num_rows($checkifexists) > 0) {
return "You already reviewed this tutor";
} else {
$sql = "INSERT INTO reviews VALUES('$id','$title','$content','$stars','$revid','$date','','0')";
$result = mysql_query($sql);
}
}
function getReviews($user_profile) {
$revnum = mysql_query("SELECT * FROM reviews WHERE revid = $user_profile");
$num = mysql_num_rows($revnum);
//echo $num;
$sql = "SELECT AVG(stars) FROM reviews WHERE revid = $user_profile";
$result = mysql_query($sql);
if ($result) {
while ($avg = mysql_fetch_assoc($result)) {
return $rev_avg = round($avg['AVG(stars)'], 1);
}
}
}
function printReviews($userprofile, $fname, $lname, $id) {
$sql = "SELECT * FROM reviews WHERE revid = $userprofile";
$result = mysql_query($sql);
$review = '';
if (mysql_num_rows($result) == 0) {
return "<div class='rev_b_first'>Be the first to review this tutor</div>";
}
$submitnum = '';
if ($result) {
while ($rev = mysql_fetch_assoc($result)) {
$submitnum++;
$review .= "<div class='review_block'>";
$review .= '<img class="myImage" width="50px" height="50px" src="../../assets/images/user_images/' . getUserImage($rev['uid'], 'MAI') . '"/>';
$review .= "<b><div class='review_title'>" . $rev['title'] . " | " . $rev['stars'] . " Stars" . '</div></b><br>';
$review .= '<div class="update">' . nl2br($rev['content']) . "</div>" . '<br><br>';
$review .= "<form method='post' action='profile.php?profileid=$userprofile' name='like_button'>
<input type='submit' name='like_review$submitnum' value='Like'/>
</form>";
$reviewid = $rev['reviewid'];
$sql_get_likes = mysql_query("SELECT * FROM rev_likes WHERE reviewid = $reviewid");
$usernameLiked = array();
while ($user_liked = mysql_fetch_array($sql_get_likes)) {
$usernameLiked[] = $user_liked['uid'];
}
if (!in_array($id, $usernameLiked)) {
if (isset($_POST['like_review' . $submitnum])) {
$query = mysql_query("INSERT INTO rev_likes VALUES('$id','$fname','$lname','$reviewid')");
//echo 123;
} else {
//echo 345;
}
} else {
//echo "already liked";
}
if ($sql_get_likes && mysql_num_rows($sql_get_likes) > 0) {
$review .= mysql_num_rows($sql_get_likes) . " Thumbs up";
}
$review .= "</div>";
}
//echo $_POST['like_review' . $submitnum];
}
return $review;
}
function getPMsg($id) {
$sql = "SELECT * FROM private_msg WHERE ruid = $id";
$result = mysql_query($sql);
$data = mysql_fetch_assoc($result);
if (mysql_num_rows($result) > 0) {
return mysql_num_rows($result);
}
//echo 123;
return false;
}
function printPMsg($id) {
$msg = '';
$sql = "SELECT uid,fname,lname FROM private_msg WHERE ruid = $id";
$result = mysql_query($sql);
$rows = mysql_fetch_assoc($result);
echo $rows['uid'];
$sql2 = "SELECT image FROM user_image WHERE uid = {$rows['uid']}";
$resultimg = mysql_query($sql2);
$valueimg = mysql_fetch_assoc($resultimg);
/* $values = mysql_fetch_assoc($result);
$data = array(
'image' => $valueimg['image'],
'uid' => $values['uid'],
'fname' => $values['fname'],
'lname' => $values['lname'],
'accept' => $values['accept'],
'fuid' => $values['fuid']
);*/
if ($result) {
echo 123;
while ($rows = mysql_fetch_assoc($result)) {
echo 123;
$msg .= '<h3>' . $rows['fname'] . '</h3>';
$msg .= '<p>' . $rows['lname'] . '</p>';
}
}
return $msg;
}
function getUserImage($uid, $status) {
$img = "SELECT image FROM user_image WHERE uid = '$uid' AND status = '$status'";
$result = mysql_query($img);
if ($result && mysql_num_rows($result) > 0) {
$result3 = mysql_fetch_assoc($result);
return $result3['image'];
}
return false;
}
function validateImage($file) {
$extentions = array("jpg", "png", "gif", "jpeg");
$size = 1024 * 1024 * 10;
//10MB
if (is_uploaded_file($file['tmp_name'])) {
if ($file['size'] <= $size && $file['size'] != 0) {
$file_path = pathinfo($file['name']);
if (in_array(strtolower($file_path['extension']), $extentions)) {
return true;
}
} else {
$error .= "File size cannot exceed 2mb";
}
}
return true;
}
function saveImage($file, $uid, $status) {
require ("../../root.php");
move_uploaded_file($file['tmp_name'], SITE_ROOT . "/assets/images/user_images/" . $file['name']);
$result = "SELECT image FROM user_image WHERE uid = '$uid' AND status = '$status'";
$sql = mysql_query($result);
if ($sql && mysql_num_rows($sql) > 0) {
$assoc = mysql_fetch_assoc($sql);
echo "Image exists";
mysql_query("DELETE FROM user_image WHERE uid = '$uid' AND status = '$status'");
unlink("../assets/images/user_images/" . $assoc['image']);
}
$imagerand = uniqid(rand(1, 10000000000000000000));
//i wanna see you bump into that again
mysql_query("INSERT INTO user_image VALUES('$uid','{$file['name']}','$status')");
//ysql_query("UPDATE user_image SET image = '$imagerand' WHERE uid = $uid");
$getname = mysql_query("SELECT image FROM user_image WHERE uid = $uid");
$nameget = mysql_fetch_assoc($getname);
//rename(SITE_ROOT . "/assets/images/user_images/" . $file['name'], $nameget['image']);
}
function validateUser($email, $password) {
$password = sha1(md5("%^#" . $password . "<PASSWORD>"));
$sql = "SELECT u.id,u.fname,u.lname,u.email FROM users u";
$sql .= " WHERE u.email = '$email' AND u.password = '$password' LIMIT 1";
$result = mysql_query($sql);
$_SESSION['sql'] = 'yes';
if ($result && mysql_num_rows($result) > 0) {
$row = mysql_fetch_assoc($result);
$data = array('uid' => $row['id'], 'email' => $row['email'], 'fname' => $row['fname'], 'lname' => $row['lname'], 'password' => $row['password']);
setUserLogin($data);
return true;
}
}
function friendUser($id, $fname, $lname, $fuid) {
$sql = "INSERT INTO friendlog VALUES($id,'$fname','$lname','REQ','$fuid')";
$result = htmlspecialchars($sql);
$result = stripslashes($result);
$result = mysql_query($result);
if (mysql_affected_rows() > 0) {
return true;
}
return false;
}
function unfriend($id, $fuid) {
$sql = "DELETE FROM friendlist WHERE uid = $id AND friendwith = $fuid;";
$sql2 = mysql_query("DELETE FROM friendlist WHERE uid = $fuid AND friendwith = $id");
$result = mysql_query($sql);
}
function acceptFriend($id, $fname, $lname, $fuid, $ffname, $flname) {
$sql = "INSERT INTO friendlist VALUES($id,'$fname','$lname',$fuid);";
$sql2 = mysql_query("INSERT INTO friendlist VALUES($fuid,'$ffname','$flname',$id)");
$result = mysql_query($sql);
$remove = mysql_query("DELETE FROM friendlog WHERE uid = $fuid");
}
function getMsg($fuid) {
$sql = "SELECT * FROM friendlog WHERE fuid = $fuid";
$result = mysql_query($sql);
$msg = mysql_num_rows($result);
return $msg;
}
function ignoreFriend($id) {
$sql = "DELETE FROM friendlog WHERE uid = $id";
$result = mysql_query($sql);
}
function ifFriend($id, $user_profile) {
$sql = "SELECT uid FROM friendlist WHERE uid = $id AND friendwith = $user_profile";
$result = mysql_query($sql);
if (mysql_num_rows($result) > 0) {
return false;
}
return true;
}
function checkFriendLog($user) {
$sql = "SELECT uid FROM friendlog WHERE uid = $user";
$result = mysql_query($sql);
$data = mysql_fetch_assoc($result);
if (mysql_num_rows($result) > 0) {
return true;
}
return false;
}
function inFriendList($id, $user_profile) {
$sql = "SELECT uid FROM friendlist WHERE friendwith = $id AND uid = $user_profile";
$result = mysql_query($sql);
$row = mysql_fetch_assoc($result);
if (mysql_num_fields($result) > 0) {
//echo 123;
return true;
}
}
function notFriend($user_profile, $id) {
$sql = "SELECT uid FROM friendlist WHERE friendwith = $user_profile AND uid = $id";
$result = mysql_query($sql);
if (mysql_num_rows($result) > 0) {
return false;
}
return true;
}
function requestSent($id, $user_profile) {
$sql = "SELECT fuid,uid FROM friendlog WHERE uid = $id";
$result = mysql_query($sql);
$data = mysql_fetch_assoc($result);
if ($data['fuid'] == $user_profile) {
return true;
}
return false;
}
function getFRequests($id) {
$content = '';
$sql = "SELECT fname,lname FROM friendlog WHERE fuid = $id";
$sql2 = "SELECT image FROM user_image WHERE uid = $id";
$result = mysql_query($sql);
$resultimg = mysql_query($sql2);
$valueimg = mysql_fetch_assoc($resultimg);
$values = mysql_fetch_assoc($result);
$data = array('image' => $valueimg['image'], 'uid' => $values['uid'], 'fname' => $values['fname'], 'lname' => $values['lname'], 'accept' => $values['accept'], 'fuid' => $values['fuid']);
if ($result) {
while ($row = mysql_fetch_assoc($result)) {
$content .= '<h3>' . $row['fname'] . '</h3>';
$content .= '<p>' . $row['lname'] . '</p>';
}
}
return $content;
}
/*while ($row = $values) {
foreach ($row as $key) {
$userImage = getUserImage($id, 'MAI');
echo '<img class="request_image" width="50px" height="50px" src="../../assets/images/user_images/' . $userImage . '" />';
echo $data['fname'] . " " . $data['lname'];
echo '<br>';
}
}*/
function followUser($uid, $button, $fuid, $fname, $lname, $date) {
if (isset($button)) {
$check = mysql_query("SELECT uid FROM followers WHERE following = $fuid AND uid = $uid");
$count = mysql_num_rows($check);
// echo 123;
if ($count > 0) {
//echo 123;
echo "You are already following this user you prick";
} else {
$sql = "INSERT INTO followers (uid, fname, lname, date, following) VALUES($uid,'$fname','$lname','$date',$fuid)";
$result = mysql_query($sql);
// echo "Follow request was successful";
return true;
}
}
}
function followCount($uid) {
$sql = "SELECT following FROM followers WHERE following = '$uid'";
$result = mysql_query($sql);
$getcount = mysql_num_rows($result);
return $getcount;
}
function checkFollower($id, $fuid) {
$sql = "SELECT following FROM followers WHERE uid = '$id' AND following = '$fuid'";
$result = mysql_query($sql);
$rows = mysql_num_rows($result);
if ($rows > 0) {
return false;
} else {
return true;
}
}
function getUserProfile($user_profile, $user_fname, $user_lname, $user_about) {
}
function getAbout($id) {
$sql = "SELECT about FROM users WHERE id = $id";
$result = mysql_query($sql);
if ($result && mysql_affected_rows() > 0) {
$about = mysql_fetch_assoc($result);
foreach ($about as $key => $value) {
echo $value;
}
}
}
function unfollowUser($id, $fuid) {
$sql = "DELETE FROM followers WHERE uid = '{$id}' AND following = '{$fuid}'";
$result = mysql_query($sql);
if (mysql_affected_rows() > 0) {
return true;
} else {
return false;
}
}
function userRegister($data) {
if (!empty($data['fname']) && !empty($data['lname']) && !empty($data['email']) && !empty($data['password']) && !empty($data['birth'])) {
//echo "Passed if";
$fname = $data['fname'];
$lname = $data['lname'];
$emailaddress = $data['email'];
$pwd = sha1(md5("%^#" . $data['password'] . "<PASSWORD>"));
$role = 1;
//Salt in the function, use for login form aswell
$birthdate = $data['birth'];
echo 123;
//echo "Passed 2";
$reg_sql = "INSERT INTO users VALUES('','{$fname}','{$lname}','{$emailaddress}','{$birthdate}',1,'{$pwd}')";
$reg_query = mysql_query($reg_sql) or die ;
$lastid = mysql_insert_id();
echo 123;
if ($reg_query) {
$id = mysql_insert_id();
$unix = time();
echo 123;
$tutor_id = mysql_query("INSERT INTO placement VALUES($lastid, '', $unix)");
$setdata = array('uid' => $id, 'fname' => $fname, 'lname' => $lname, 'email' => $emailaddress, 'password' => <PASSWORD>);
setUserLogin($setdata);
echo 123;
return true;
}
}
}
?><file_sep><?php
?>
<!DOCTYPE html>
<html>
<head>
<title></title>
</head>
<body>
<header>
<div id="head_wrap">
<div id="inhead_wrap">
<a href="index.php"><div id="logo">JOHN-CMS</div></a>
<div class="head_entry"></div>
<div class="head_entry"></div>
</div>
</div>
</header>
</body>
</html><file_sep><?php
?>
<html>
<head>
<title>PHP SANDBOX || TEST AREA</title>
<link rel="stylesheet" type="text/css" href="style.css">
<link href='http://fonts.googleapis.com/css?family=Lato:400,700,900' rel='stylesheet' type='text/css'>
<link href='http://fonts.googleapis.com/css?family=Oxygen:400,700,300' rel='stylesheet' type='text/css'>
</head>
<body>
<header>
<div id="logo_area">
<div id="logo"><a href="#">PHP SANDBOX</a></div>
<div id="sub">I WAS BORED WHEN I MADE THIS!</div>
<ul>
<li><a href="#">ABOUT</a></li>
<li><a href="#">FAQ</a></li>
<li><a href="#">CONTACT</a></li>
</ul>
</div>
</header>
<div id="wrapper">
<ul id="code_nav">
<li class="code_tab"><a href="index.php">RESET</a></li>
<li class="code_tab"><a href="index.php?t=1">TAB 1</a></li>
<li class="code_tab"><a href="index.php?t=2">TAB 2</a></li>
</ul>
<section class="code"><!--WHERE THE PHP CODE BEGINS! -->
<?php
if(!empty($_GET['t']) && $_GET['t'] == 1) {
echo '<iframe src="sandcode/sandcode.php" frameborder="0px"></iframe>';
}
?>
</section><!--WHERE THE PHP CODE ENDS!-->
<section class="code"><!--WHERE THE PHP CODE BEGINS! -->
<?php if(!empty($_GET['t']) && $_GET['t'] == 2): ?>
<iframe src="sandcode/sandcode2.php" frameborder="0px"></iframe>
<?php endif; ?>
</section><!--WHERE THE PHP CODE ENDS!-->
<div id="home_content">
<p>
</p>
</div>
</div>
<footer>
<div id="footer_text">This is the work of my hands, even i cant use it without permission ™</div>
</footer>
</body>
</html><file_sep><html>
<head>
<title>SANDBOX</title>
<link href='http://fonts.googleapis.com/css?family=Lato:400,700,900' rel='stylesheet' type='text/css'>
<link href='http://fonts.googleapis.com/css?family=Oxygen:400,700,300' rel='stylesheet' type='text/css'>
<style>
#code {
font-size: 18px;
font-family: 'Lato', sans-sarif;
font-weight: 400;
color: #707070;
padding-left: 0px;
-webkit-transition: all 0.2s ease-in-out;
-moz-transition: all 0.2s ease-in-out;
transition: all 0.2s ease-in-out;
}
#code:hover {
padding-left: 5px;
-webkit-transition: all 0.2s ease-in-out;
-moz-transition: all 0.2s ease-in-out;
transition: all 0.2s ease-in-out;
}
#form_name {
width: 100%;
height: 23px;
padding: 3px;
border-radius: 5px;
background: rgba(142, 213, 250, 0.5);
}
.form_error {
width: 100%;
color: rgb(79, 74, 74);
height: 23px;
padding: 3px;
border-radius: 5px;
background: rgba(215, 44, 44, 0.5);
}
</style>
</head>
<body>
<section id="code">
<?php
$x = "";
function math(&$x){
if (isset($_GET['submit'])) {
if (isset($_GET['number1']) && isset($_GET['number2'])) {
if (is_numeric($_GET['number1']) && is_numeric($_GET['number2'])) {
$x = $_GET['number1'] * $_GET['number2'];
return $x;
}
}
}
}
?>
<form name="form_name" id="form_get" action="sandcode.php" method="get">
<input type="text" class="number" name="number1" />
<br>
<input type="text" class="number" name="number2" />
<br><br>
<input type="submit" name="submit" />
</form>
<?php
echo math($x);
?>
<br><br><br>
<?php
$y = "";
function devide(&$y){
if (isset($_GET['submit2'])) {
if (isset($_GET['number3']) && isset($_GET['number4'])) {
if (is_numeric($_GET['number3']) && is_numeric($_GET['number4'])) {
if($_GET['number4'] != 0){
$y = $_GET['number3'] / $_GET['number4'];
return $y;
} else {
$y = "You cannot devide a number by 0, Please try again.";
return $y;
}
}
}
}
}
?>
<form name="form_name" id="form_get" action="sandcode.php" method="get">
<input type="text" class="number" name="number3" />
<br>
<input type="text" class="number" name="number4" />
<br><br>
<input type="submit" name="submit2" />
</form>
<?php
echo devide($y);
?>
<br><br>
<?php
$url_array = array('sec1' => 'This is article one', 'sec2' => 'This is article two');
$article = '';
if (!empty($_GET['x'])) {
$x = $_GET['x'];
if(array_key_exists($x, $url_array)) {
$article = $url_array[$x];
}
}
?>
<ul>
<li><a href="sandcode.php?x=sec1">Tab 1</a></li>
<li><a href="sandcode.php?x=sec2">Tab 2</a></li>
</ul>
<?= $article; ?>
</section>
</body>
</html><file_sep><?php
?>
Anime page<file_sep><?php
?>
<!DOCTYPE html>
<html>
<head>
<title></title>
</head>
<body>
<div id="content">
<div class="side_bar">
jfnefhibefgbfjogbfjgbfeognefoun
</div>
<section class="welcome">
<div id="top_image">
<div id="top_img_l"></div>
<div id="top_img_r"></div>
</div>
<p class="content_top" align="justify">
"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do
eiusmod tempor incididunt ut labore et dolore magna aliqua.
Ut enim ad minim veniam, quis nostrud exercitation ullamco
laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure
dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat
nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in
culpa qui officia deserunt mollit anim id est laborum."
</p>
</section>
</div>
</body>
</html><file_sep><?php
$feedback = array();
if (isset($_POST['submit'])) {
if (!empty($_POST['fname']) && !is_numeric($_POST['fname']) && !empty($_POST['pass']) && !empty($_POST['email'])
&& !empty($_POST['tel']) && is_numeric($_POST['tel'])) {
$feedback[] = "Your name is {$_POST['fname']} and your password is {$_POST['pass']} <br>";
$feedback[] = "Your email address is {$_POST['email']} and Phone number is {$_POST['tel']} <br>";
} if (empty($_POST['fname'])) {
$feedback[] = "You didn't enter a first name<br>";
} if (is_numeric($_POST['fname'])) {
$feedback[] = "You cant use numbers as a first name<br>";
} if (empty($_POST['pass'])) {
$feedback[] = "You didn't enter a password<br>";
} if (empty($_POST['email'])) {
$feedback[] = "You didn't enter an email address<br>";
} if (empty($_POST['tel'])) {
$feedback[] = "You didn't enter a phone number<br>";
} if (!is_numeric($_POST['tel'])) {
$feedback[] = "A phone number cannot contain anything but numbers<br>";
}
}
?>
<!DOCTYPE html>
<html>
<head>
<title>
</title>
</head>
<body>
<form name="form" class="form" accept-charset="UTF-8" action="10_4.php" method="post" enctype="multipart/form-data">
<label>First Name:</label><br>
<input type="text" name="fname" /><br>
<label>Password:</label><br>
<input type="<PASSWORD>" name="pass" /><br>
<label>Email:</label><br>
<input type="text" name="email" /><br>
<label>Phone Number:</label><br>
<input type="tel" name="tel" /><br>
<input type="submit" name="submit" value="Submit"/>
</form>
<br>
<pre>
<?php
print_r($feedback);
?>
</pre>
<?php
foreach ($feedback as $key) {
echo $key;
}
?>
</body>
</html><file_sep><?php
?>
tv shows<file_sep><?php
require_once('../config/functions.php');
require_once('../config/db.php');
$id = isset($_SESSION['uid']) ? $_SESSION['uid'] : null;
$admin = checkifAdmin($id);
if ($admin == false) {
session_destroy();
//A little punishment
echo "You have been logged out";
header("location: ../index.php");
}
//Check if function returns true
if (authenticate('uid')) {
if ($id != null) {
if ($admin == TRUE) {
echo "<div id='cp_innerbar_bar'>";
echo "<div class='cp_text_pad'>Welcome {$_SESSION['fname']}! Heres whats up with the website</div>";
echo "</div>";
}
}
//echo $admin;
if ($admin == false) {
session_destroy();
//A little punishment
echo "You have been logged out";
header("location: ../index.php");
}
} else {
header("location: ../index.php");
}
?>
<!DOCTYPE h<file_sep><?php
?>
Upcoming news<file_sep><?php
?>
text for news<file_sep><?php
?>
<!DOCTYPE html>
<html>
<head>
<title></title>
<style>a {text-decoration: none; color: white;}</style>
</head>
<body>
<div id="top_nav">
<ul>
<li><div class="nav_block">Browse</div>
<ul>
<li class="browse_link">
<a href="?page=news">News</a>
</li>
<li class="browse_link">
<a href="?page=games">Games</a>
</li>
<li class="browse_link">
<a href="?page=movies">Movies</a>
</li>
<li class="browse_link">
<a href="?page=tvshows">TV Shows</a>
</li>
<li class="browse_link">
<a href="?page=bullshit">Bullshit</a>
</li>
<li class="browse_link">
<a href="?page=anime">Animes</a>
</li>
<li class="browse_link">
<a href="?page=comics">Comics</a>
</li>
<li class="browse_link">
<a href="?page=u_news">Upcoming Shows</a>
</li>
<li class="browse_link">
<a href="?page=blog">Blog</a>
</li>
<li class="browse_link">
<a href="?page=users">Users</a>
</li>
</ul>
</li>
</ul>
</div>
<ul class="navigation_top">
<li class="nav_link2">EVOLVE: NEW GAMEPLAY</li>
<li class="nav_link2">GTA V: PS4 GAMEPLAY</li>
<li class="nav_link2">THE SIMPSONS: 2015 TRAILER</li>
<li class="nav_link2">BIOSHOCK ULTRON TRAILER</li>
</ul>
</body>
</html><file_sep><?php
?>
Movies page<file_sep><?php
echo "<pre>";
print_r($_COOKIE);
echo "</pre>";
$total = !empty($_COOKIE['cart']) ? $_COOKIE['cart'] : '';
$price = key($_POST);
if (!empty($price)) {
$total = $total + $price;
setcookie('cart', $total, time() + 60 * 60 * 24 * 30);
}
echo $total;
?>
<html>
<head>
<title>SANDBOX</title>
<link href='http://fonts.googleapis.com/css?family=Lato:400,700,900' rel='stylesheet' type='text/css'>
<link href='http://fonts.googleapis.com/css?family=Oxygen:400,700,300' rel='stylesheet' type='text/css'>
<style>
#code {
font-size: 18px;
font-family: 'Lato', sans-sarif;
font-weight: 400;
color: #707070;
padding-left: 0px;
-webkit-transition: all 0.2s ease-in-out;
-moz-transition: all 0.2s ease-in-out;
transition: all 0.2s ease-in-out;
}
#code:hover {
padding-left: 5px;
-webkit-transition: all 0.2s ease-in-out;
-moz-transition: all 0.2s ease-in-out;
transition: all 0.2s ease-in-out;
}
#form_name {
width: 100%;
height: 23px;
padding: 3px;
border-radius: 5px;
background: rgba(142, 213, 250, 0.5);
}
.form_error {
width: 100%;
color: rgb(79, 74, 74);
height: 23px;
padding: 3px;
border-radius: 5px;
background: rgba(215, 44, 44, 0.5);
}
</style>
</head>
<body>
<section id="code">
<form name="form" action="sandcode2.php" method="post" accept-charset="utf-8">
<lable>
Product 1:
</lable>
<input type="submit" name="20" value="add to cart" />
<br>
<lable>
Product 2:
</lable>
<input type="submit" name="40" value="add to cart" />
<br>
<lable>
Product 3:
</lable>
<input type="submit" name="60" value="add to cart" />
<br>
</form>
</section>
</body>
</html><file_sep><?php
?>
comics page<file_sep><?php
session_start();
echo "<pre>";
print_r($_SESSION);
echo "</pre>";
echo "<br>";
$user = array('uid' => 1, 'user' => 'admin', 'pass' => '<PASSWORD>');
$uid = !empty($_SESSION['uid']) ? $_SESSION['uid'] : '';
if (isset($_POST['submit'])) {
if (!empty($_POST['username']) && !empty($_POST['password'])) {
if ($_POST['username'] == $user['user'] && $_POST['password'] == $user['pass']) {
$_SESSION['uid'] = $user['uid'];
header('location: sandcode.php');
} else {
echo "Wrong information";
}
}
}
if (!empty($_GET['q']) && $_GET['q'] == 'logout') {
unset($_SESSION['uid']);
header('location: sandcode.php');
}
?>
<html>
<head>
<title>SANDBOX</title>
<link href='http://fonts.googleapis.com/css?family=Lato:400,700,900' rel='stylesheet' type='text/css'>
<link href='http://fonts.googleapis.com/css?family=Oxygen:400,700,300' rel='stylesheet' type='text/css'>
<style>
#code {
font-size: 18px;
font-family: 'Lato', sans-sarif;
font-weight: 400;
color: #707070;
padding-left: 0px;
-webkit-transition: all 0.2s ease-in-out;
-moz-transition: all 0.2s ease-in-out;
transition: all 0.2s ease-in-out;
}
#code:hover {
padding-left: 5px;
-webkit-transition: all 0.2s ease-in-out;
-moz-transition: all 0.2s ease-in-out;
transition: all 0.2s ease-in-out;
}
#form_name {
width: 100%;
height: 23px;
padding: 3px;
border-radius: 5px;
background: rgba(142, 213, 250, 0.5);
}
.form_error {
width: 100%;
color: rgb(79, 74, 74);
height: 23px;
padding: 3px;
border-radius: 5px;
background: rgba(215, 44, 44, 0.5);
}
</style>
</head>
<body>
<section id="code">
<?php if(empty($uid)): ?>
<form name="login" action="sandcode.php" enctype="multipart/form-data" method="post" accept-charset="utf-8">
<label>Username:</label><br>
<input name="username" type="text" /><br>
<label>Password:</label><br>
<input name="<PASSWORD>" type="<PASSWORD>" /><br><br>
<input type="submit" name="submit" value="SIGN IN" />
</form>
<?php else: ?>
<p><a href="sandcode.php?q=logout">LOGOUT</a></p>
<?php endif; ?>
</section>
</body>
</html><file_sep><!DOCTYPE html>
<html>
<head>
<title>
</title>
</head>
<body>
<?php
if (isset($_GET['submit'])) {
if (!empty($_GET['fName']) && !empty($_GET['fName'])) {
echo "Your name is:" . $_GET['fName'] . " " . $_GET['lName'];
}
}
?>
<form name="nform" action="lesson1.php" method="get" accept-charset="UTF-8" id="form">
First Name:
<input type="text" name="fName" class="fname" />
<br><br>
Last Name:
<input type="text" name="lName" class="lname" />
<br><br>
<input type="submit" name="submit" value="submit"/>
</form>
</body>
</html><file_sep><?php
?>
<!DOCTYPE html>
<html>
<head>
<title></title>
</head>
<body>
<footer>
<div class="foot_text">
All rights reserved and all the other fucks you dont give. ™
</div>
</footer>
</body>
</html><file_sep><?php
require '../config/db.php';
require '../config/functions.php';
start_session('blog_project');
$id = isset($_SESSION['uid']) ? $_SESSION['uid'] : null;
$feedback = "";
if (authenticate('uid')) {
checkifAdmin($id);
}
?>
<!DOCTYPE html>
<html>
<head>
<title>Forum</title>
<link rel="stylesheet" type="text/css" href="../assets/style/stylesheet.css">
<?php
include_once ('../assets/style/fonts.html');
?>
</head>
<body>
<?php
include_once ('../assets/templates/cms_nav.php');
if (isset($_GET['category'])) {
if (!in_array(null, $_GET)) {
$not_clean_cate = clean_input(INPUT_GET,'category',FILTER_SANITIZE_NUMBER_INT);
$category = stripcslashes($not_clean_cate);
}
}
if (isset($_GET['newpost'])) {
if (!in_array(null, $_GET)) {
$not_clean_post = clean_input(INPUT_GET,'newpost',FILTER_SANITIZE_STRING);
$newpost = stripcslashes($not_clean_post);
}
}
if (isset($_POST['submit_post'])) {
$feedback .= 123;
if (!in_array(null, $_POST)) {
$post_content_ = clean_input(INPUT_POST, 'form_content', FILTER_SANITIZE_STRING);
$post_content = stripcslashes($post_content_);
$title_ = clean_input(INPUT_POST, 'title', FILTER_SANITIZE_STRING);
$title = stripcslashes($title_);
setForumPost($category, $id, $post_content, $title);
$feedback .= 123;
}
}
?>
<div id='forum_wrap'>
<section id='forum_sec'>
<?php if(isset($category)): ?>
<a href="?category=<?=$category;?>&newpost=new">+ New post</a>
<?php if(isset($newpost)):?>
<form name="forumPost" action="forum.php?category=<?= $category; ?>&newpost=<?= $newpost;?>" method="post" enctype="multipart/form-data" accept-charset="utf-8">
<label for="title">Post title:</label>
<Br>
<input type="text" name="title" placeholder="Post title" class="f_input" />
<br>
<br>
<label for="title">Post content:</label>
<br>
<textarea name="form_content" class="f_input" cols="150" rows="20"></textarea>
<br>
<input type="submit" name="submit_post" value="Post" />
</form>
<?php endif; ?>
<?php endif; ?>
</section>
<?php
if (!isset($category)) {
echo getCategories();
} else {
echo printPosts($category);
}
?>
<?= $feedback; ?>
</div>
</body>
</html><file_sep><?php
$article = '';
$array['art1'] = "This is article 1";
$array['art2'] = "This is article 2";
$array['art3'] = "This is article 3";
if (!empty($_GET['q'])) {
$q = $_GET['q'];
if (array_key_exists($q, $array)) {
$article = $array[$q];
}
}
?>
<!DOCTYPE html>
<html>
<head>
<title></title>
</head>
<body>
<ul>
<li><a href="10_3.php?q=art1">Article 1</a></li>
<li><a href="10_3.php?q=art2">Article 2</a></li>
<li><a href="10_3.php?q=art3">Article 3</a></li>
</ul>
<?= $article; ?>
</body>
</html><file_sep><?php
?>
list of users<file_sep><?php
echo "Create and manage pages<br>";
require_once ('../config/functions.php');
require_once ('../config/db.php');
$id = isset($_SESSION['uid']) ? $_SESSION['uid'] : null;
$admin = checkifAdmin($id);
if ($admin == false) {
session_destroy();
//A little punishment
echo "You have been logged out";
header("location: ../index.php");
}
if (isset($_POST['submit_cat'])) {
if (!in_array(null, $_POST)) {
if (isset($_GET['create']) == 'true') {
$cleanstring = clean_input(INPUT_GET, 'create', FILTER_SANITIZE_STRING);
$create_cat = stripcslashes($cleanstring);
if (!in_array(null, $_GET)) {
$name = "";
$category = isset($_POST['cat']) ? clean_input(INPUT_POST, 'cat', FILTER_SANITIZE_STRING) : null;
$title = isset($_POST['title']) ? clean_input(INPUT_POST, 'title', FILTER_SANITIZE_STRING) : null;
$name = isset($_POST['name']) ? clean_input(INPUT_POST, 'name', FILTER_SANITIZE_STRING) : null;
$desc = isset($_POST['desc']) ? clean_input(INPUT_POST, 'desc', FILTER_SANITIZE_STRING) : null;
setCat($name, $title, $category, $desc);
echo "Category saved!";
}
}
} else {
Echo "All fields are required";
}
}
?>
<!DOCTYPE html>
<html>
<head>
<title> Control Panel : Pages </title>
</head>
<body>
<br>
<Br>
<div id="pages_wrap">
<form name="pages_form" method="post" action="cp.php?cp=cpcreatepage&create=true" accept-charset="utf-8">
<label for="cat">Category:</label>
<br>
<input type="text" name="cat" placeholder="Category Name"/>
<br>
<label for="title">Browser tab title:</label>
<br>
<input type="text" name="title" placeholder="Page Title"/>
<br>
<br>
<label for="heading">Page header:</label>
<br>
<input type="text" name="name" placeholder="Heading"/>
<br>
<br>
<label for="desc">Category description:</label>
<br>
<textarea name="desc" rows="25" cols="140"></textarea>
<br>
<br>
<input type="submit" name="submit_cat" value="Save"/>
</form>
</div>
</body>
</html><file_sep><div id="top_bar_wrap">
<div class="center_nav">
<div id="usernav">
<?php $base = "http://" . $_SERVER['SERVER_NAME'] . "/blog_project/"; ?>
<?php if (checkifAdmin($id) == TRUE):
?>
<div id="isadmin">
<ul>
<li class="admin_li">
You are an admin, You can access the admin panel and backend of the cms!
</li>
<li class="admin_li">
<a class="cp_link" href='<?=$base;?>cp/cp.php'> Control Panel</a>
</li>
</ul>
</div>
</div>
<? else: ?>
<?php if(isset($id)): ?>
<div id="isuser">
<ul>
<li class="user_li">
Welcome <?=$fname . " " . $lname . "!";?>
</li>
<li class="user_li">
<!-- Placeholder for another user tab -->
</li>
</ul>
</div>
<? endif;?>
<? endif; ?>
</div>
<div id="navigation">
<div class="center_nav">
<ul>
<li>
<a href='<?=$base;?>index.php'>Home</a>
</li>
<?php if(isset($_SESSION['uid'])): ?>
<li>
<a href='?logout=yes'>Logout</a>
</li>
<?php endif; ?>
<?php if(!isset($_SESSION['uid'])) :?>
<li>
<a href='<?=$base;?>login/login.php'>Sign in</a>
</li>
<?php endif; ?>
</ul>
</div>
</div>
</div>
</div>
<div id="page_width_bar">
<div id="header">
<h3 class="logo">[The Garage]</h3>
<ul>
<a href='<?=$base;?>pages/forum.php'><li class='main_nav'>FORUM</li></a>
</ul>
</div>
</div><file_sep><?php
?>
<html>
<head>
<title>Shop - PHP Project</title>
<link rel="stylesheet" type="text/css" href="style.css">
<link href='http://fonts.googleapis.com/css?family=Lato:400,700,900' rel='stylesheet' type='text/css'>
<link href='http://fonts.googleapis.com/css?family=Oxygen:400,700,300' rel='stylesheet' type='text/css'>
</head>
<body>
<!--This is where the header begins -->
<?php include 'assets/template/header.php'; ?>
<!--Header's end -->
<!--This is where the body of the site begins -->
<div id="wrapper">
<?php include 'assets/template/navigation.php'; ?>
<div id="query_content">
<?php
//Query String to update content by page
//The content updates by a query string
$page = isset($_GET['page']);
switch ($page) {
case 'news':
include_once "pages/{$_GET['page']}.php";
break;
case 'games':
include_once "pages/{$_GET['page']}.php";
break;
case 'movies':
include_once "pages/{$_GET['page']}.php";
break;
case 'tvshows':
include_once "pages/{$_GET['page']}.php";
break;
case 'bullshit':
include_once "pages/{$_GET['page']}.php";
break;
case 'anime':
include_once "pages/{$_GET['page']}.php";
break;
case 'comics':
include_once "pages/{$_GET['page']}.php";
break;
case 'u_news':
include_once "pages/{$_GET['page']}.php";
break;
case 'blog':
include_once "pages/{$_GET['page']}.php";
break;
case 'users':
include_once "pages/{$_GET['page']}.php";
break;
default:
include_once 'pages/home.php';
break;
}
?>
</div>
<!--<?php include 'assets/template/home_content.php'; ?> -->
</div>
<!--Wrapper end -->
<?php include 'assets/template/footer.php'; ?>
</body>
</html>
|
ff65720657f22aec4bc999d4e8ae3c7a2e69c01f
|
[
"PHP"
] | 24
|
PHP
|
theclocker/HackerU_PHP
|
4de2b1c594bebccb7d482dfedffd00abf97ace8c
|
98feade863aa5a01c85e49545f9704eab696c9f9
|
refs/heads/master
|
<file_sep>import networkx as nx
import numpy as np
def bipartite_region_tracking(partition, optical_flow, reliability,
matching_th=0.1, reliability_th=0.2):
"""
Parameters
----------
partition: numpy array
A 3D label array where each label represents a region
optical_flow: numpy array
A 3D,2 array representing optical flow values for each frame
reliability: numpy array
A 3D array representing the flow reliability
matching_th: float, optional
matching threshold for the bipartite matching
reliability_th: float, optional
reliability threshold to stop tracking
Returns
-------
A NetworkX graph object with adjacency relations
"""
dimensions = len(partition.shape)
if dimensions != 3: # pragma: no cover
raise ValueError("Dimensions must be 3")
# link regions across frames
# perform a weighted bipartite matchings
frames = partition.shape[0]
width = partition.shape[1]
height = partition.shape[2]
new_partition = np.zeros_like(partition)
#the first frame is the same
new_partition[0,...] = partition[0,...]
current_label = np.max(np.unique(partition[0,...]))+1
for frame in range(frames-1):
labels = np.unique(new_partition[frame, ...])
labels_next = np.unique(partition[frame+1, ...])
# create a graph matching contours
bipartite = nx.Graph()
bipartite.add_nodes_from([l for l in labels])
bipartite.add_nodes_from([l for l in labels_next])
# find the correspondence of each label to the next frame
for label in labels:
px, py = np.where(new_partition[frame, ...] == label)
# find the mean reliability
rel = np.mean(reliability[frame, px, py])
if rel < reliability_th: # pragma: no cover
continue
# find where the regions projects to the next frame
npx = px + optical_flow[frame, px, py, 0]
npy = py + optical_flow[frame, px, py, 1]
#check for bounds
in_x = np.logical_and(0 <= npx, npx < width)
in_y = np.logical_and(0 <= npy, npy < height)
idx = np.logical_and(in_x, in_y)
npx = npx[idx]
npy = npy[idx]
count = np.bincount(partition[frame+1,
npx.astype(np.int),
npy.astype(np.int)].astype(np.int))
# get the count and eliminate weak correspondences
max_count = max(count)
nodes = np.nonzero(count > max_count*matching_th)[0]
weight = count[nodes]/max_count
for i, n in enumerate(nodes):
bipartite.add_edge(label, n, weight=weight[i])
# max weighted matching
matchings = nx.max_weight_matching(bipartite)
# assign propagated labels to the matchings
for a in matchings:
b = matchings[a]
#print("Match {0}-{1}".format(a,b))
if b not in labels_next:
continue
px, py = np.where(partition[frame+1, ...] == b)
new_partition[frame+1, px, py] = a
# assign new labels to non-matched regions
for n in bipartite.nodes():
if n not in labels_next:
continue
if n not in matchings:
px, py = np.where(partition[frame+1, ...] == n)
new_partition[frame+1, px, py] = current_label + 1
current_label += 1
return new_partition<file_sep>__author__ = 'guillem'
<file_sep>from . import features
from . import segmentation
from . import filter<file_sep>import numpy as np;
from skcv.image.segmentation.region_descriptors import region_mean_color
def mean_color_distance(img,r1,r2):
""" Returns the color mean of two regions
@param r1: Region 1
@param r2: Region 2
@return:euclidean distance between region color means
"""
if ("mean_color" not in r1):
r1["mean_color"] = region_mean_color(img,r1)
if ("mean_color" not in r2):
r2["mean_color"] = region_mean_color(img,r2)
return np.linalg.norm(r1["mean_color"]-r2["mean_color"])<file_sep>import numpy as np
def false_color(id_map):
""" Returns a numpy array of false color
Parameters
---------
id_map : 2D or 3D numpy array with id values
Returns
-------
false_color: array with the same shape than input with 3 values for each position
"""
ids = np.unique(id_map)
nids = len(ids)
# assign a random color to each id
colors = np.random.randint(0, 256, (nids, 3))
# check dimensions (should be 3 or 4)
if len(id_map.shape) == 2:
id_map = id_map[np.newaxis, ...]
# create a false color image of the original size and 3 channels
image_false_color = np.zeros((id_map.shape[0],
id_map.shape[1],
id_map.shape[2],
3))
dimensions = 3
for label, i in zip(ids, range(nids)):
(px, py, pz) = np.where(id_map == label)
image_false_color[px, py, pz, :] = colors[i, :]
#return the false color image
return np.squeeze(image_false_color)
<file_sep>import numpy as np
from numpy.linalg import svd
from math import log
from scipy.optimize import leastsq
from skcv.multiview.util import normalize_points
def fundamental_matrix_from_two_cameras(camera1, camera2):
""" Computes the fundamental matrix from two projection
matrices
Parameters
----------
camera1: numpy array
Projection matrix of first camera
camera2: numpy array
Projection matrix of second camera
Returns
-------
Fundamental matrix
"""
Pp = np.linalg.pinv(camera1)
# camera center
u, d, vh = svd(camera1)
center = vh[3, :]
# epipole on the second image
e = np.dot(camera2, center)
se = np.array(((0, -e[2], e[1]),
(e[2], 0, -e[0]),
(-e[1], e[0], 0)))
f_matrix = np.dot(se, np.dot(camera2, Pp))
return f_matrix
def eight_point_algorithm(x1, x2):
""" Computes the fundamental matrix from 8 (or more) projection
point pairs
Parameters
----------
x1: numpy array
projections of points in the first image, in homogeneous coordinates
x2: numpy array
projections of points in the second image, in homogeneous coordinates
Returns
-------
F, the fundamental matrix satisfying x2.T * F * x1 = 0
"""
n_points = x1.shape[1]
if x2.shape[1] != n_points: # pragma: no cover
raise ValueError("Shape must be the same")
# normalize points
x1n, t1 = normalize_points(x1, is_homogeneous=True)
x2n, t2 = normalize_points(x2, is_homogeneous=True)
# build the vector
a = np.vstack((x2n[0, :] * x1n,
x2n[1, :] * x1n,
x2n[2, :] * x1n))
# find F in the normalized coordinates and transform it
u, d, vh = svd(a.T, full_matrices=True)
f_matrix = np.reshape(vh[8, :], (3, 3))
# force the rank 2 constraint
u, d, vh = svd(f_matrix, full_matrices=True)
d[2] = 0
f_matrix = np.dot(u, np.dot(np.diag(d), vh))
# transform coordinates
f_matrix = np.dot(t2.T, np.dot(f_matrix, t1))
return f_matrix
def right_epipole(f_matrix):
"""
Computes the right epipole (first image) of fundamental matrix
the right epipole satisfies Fe = 0
**Parameters**
f_matrix: numpy array
Fundamental matrix
**Returns**
the right epipole
"""
u, d, vh = svd(f_matrix)
return vh[2, :]
def left_epipole(f_matrix):
"""
Computes the right epipole (first image) of fundamental matrix
the left epipole satisfies Fe = 0
**Parameters**
f_matrix: numpy array
Fundamental matrix
**Returns**
the left epipole
"""
u, d, vh = svd(f_matrix)
return u[:, 2]
def canonical_cameras_from_f(f_matrix):
"""
Retrieves the two canonical cameras given a fundamental matrix
**Parameters**
f_matrix: numpy array
Fundamental matrix
**Returns**
one pair of canonical cameras
"""
# the first camera is the identity
camera1 = np.eye(3, 4)
e = left_epipole(f_matrix)
se = np.array(((0, -e[2], e[1]),
(e[2], 0, -e[0]),
(-e[1], e[0], 0)))
camera2 = np.hstack((np.dot(se, f_matrix), e[:, np.newaxis]))
return camera1, camera2
def sampson_error(x1, x2, f_matrix):
"""
Computes the sampson error for a set of point pairs
Parameters
----------
x1: numpy array
projections of points in the first image, in homogeneous coordinates
x2: numpy array
projections of points in the second image, in homogeneous coordinates
f_matrix: numpy_array
fundamental matrix
Returns
-------
sampson error of each point pair
"""
f_x1 = np.dot(f_matrix, x1)
f_x2 = np.dot(f_matrix.T, x2)
#get the denominator
den = np.sum(f_x1[:2, :] ** 2, axis=0) +\
np.sum(f_x2[:2, :] ** 2, axis=0)
#get the numerator
num = np.sum((x2 * f_x1), axis=0)**2
return num / den
def reprojection_error(x1, x2, f_matrix):
"""
Computes the sampson error for a set of point pairs
Parameters
----------
x1: numpy array
projections of points in the first image, in homogeneous coordinates
x2: numpy array
projections of points in the second image, in homogeneous coordinates
f_matrix: numpy_array
fundamental matrix
Returns
-------
reprojection error of each point pair
"""
def __sampson_residual(f, x1, x2):
"""
computes the residual of the sampson error
"""
f_matrix = np.reshape(f, (3, 3))
f_x1 = np.dot(f_matrix, x1)
f_x2 = np.dot(f_matrix.T, x2)
#get the denominator
den = np.sum(f_x1[:2, :] ** 2, axis=0) +\
np.sum(f_x2[:2, :] ** 2, axis=0)
#get the numerator
num = np.sum((x2 * f_x1), axis=0)
return num / np.sqrt(den)
def robust_f_estimation(x1, x2,
max_iter=1000,
distance='sampson',
n_samples=8,
prob = 0.99,
refine_result=True,
inlier_threshold=2):
""" Computes the fundamental matrix using the eight point algorithm
(Hartley 1997)
Parameters
----------
x1: numpy array
projections of points in the first image, in homogeneous coordinates
x2: numpy array
projections of points in the second image, in homogeneous coordinates
max_iter: int, optional
maximum number of iterations of the ransac algorithm
distance: string, option
distance to use to find inliers/outliers
n_samples: int, optional
number of points to samples at each RANSAC iteration
prob: float, optional
probability of having a free from outliers sample
refine_result: bool, optional
whether after RANSAC a non linear estimation is performed
inlier_threshold: float, optional
maximum distance to consider a point pair inlier
Returns
-------
F, the fundamental matrix satisfying x2.T * F * x1 = 0
"""
iteration = 0
n_points = x1.shape[1]
is_inlier = np.zeros(n_points, dtype=bool)
# variables to store the best result found
best_inliers = is_inlier
best_n_inliers = 0
while iteration < max_iter:
#select 8 points at random
idx = np.random.choice(n_points, n_samples, replace=False)
selected_x1 = x1[:, idx]
selected_x2 = x2[:, idx]
#get inliers
f_matrix = eight_point_algorithm(selected_x1,
selected_x2)
# find the error distance
if distance == 'sampson':
e = sampson_error(x1, x2, f_matrix)
else: # pragma : no cover
raise ValueError()
is_inlier = e < inlier_threshold
n_inliers = np.count_nonzero(is_inlier)
if n_inliers > best_n_inliers:
best_inliers = is_inlier
best_n_inliers = n_inliers
#update max_iterations if estimation is improved
# the epsilon (1e-10) is added in case of all inliers
eps = 1 - n_inliers / n_points
new_iter = log(1 - prob) / log(1e-10 + 1 - (1-eps)**n_samples)
if new_iter < max_iter:
max_iter = new_iter
iteration += 1
#refine the estimate using all inliers
best_x1 = x1[:, best_inliers]
best_x2 = x2[:, best_inliers]
f_matrix = eight_point_algorithm(best_x1, best_x2)
if refine_result:
if distance == 'sampson':
f = np.reshape(f_matrix, 9)
f_matrix, jac = leastsq(__sampson_residual, f, args=(best_x1, best_x2))
f_matrix = np.reshape(f_matrix, (3, 3))
return f_matrix<file_sep>from cmath import sqrt
import numpy as np
import networkx as nx
from math import sqrt
def undersegmentation_error(partition, groundtruth,
tolerance=0.05):
""" Computes the undersegmentation error defined as:
ue(G_i) = (sum_{Area(S_i)} - area(G_i)) / area(G_i)
where G_i is the groundtruth and
S_i is the obtained partition
The total error is the average accross regions
Parameters
----------
partition: (N,M) array
array with obtained labels
groundtruth: (N,M) array or list
array(list with groundtruth labels
tolerance: float, optional
threshold to consider oversegmentation
Returns
-------
The undersegmentation error
"""
gt_list = [];
if type(groundtruth) != list:
gt_list.append(groundtruth)
else:
gt_list = gt_list + groundtruth
# partition labels
seg_labels = np.unique(partition)
areas = {}
for s_i in seg_labels:
area = np.count_nonzero(partition == s_i)
areas[s_i] = area
# evaluate each groundtruth segmentation
err = 0
for segmentation in gt_list:
gt_labels = np.unique(segmentation)
err_s = 0
# get error for each groundtruth region
for g_i in gt_labels:
# get groundtruth area
area = np.count_nonzero(segmentation == g_i)
# compute intersection
total_area = 0.
for s_i in seg_labels:
n = np.count_nonzero((g_i == segmentation) *
(partition == s_i))
if n > tolerance*area:
total_area += areas[s_i]
err_s += abs(total_area - area) / area
err += err_s/len(gt_labels)
return err / len(gt_list)
def segmentation_accuracy(partition, groundtruth):
""" Computes the segmentation accuracy defined as:
accu(G_i) = (sum_{Area(S_k) \in area(G_i)}) / area(G_i)
where G_i is the groundtruth and
S_k is the obtained partition where the majority of S_k is in G_i
The total error is the average accross regions
Parameters
----------
partition: (N,M) array
array with obtained labels
groundtruth: (N,M) array or list
array(list with groundtruth labels
Returns
-------
The segmentation accuracy
"""
gt_list = [];
if type(groundtruth) != list:
gt_list.append(groundtruth)
else:
gt_list = gt_list + groundtruth
# partition labels
seg_labels = np.unique(partition)
# evaluate each groundtruth segmentation
accu = 0
for segmentation in gt_list:
gt_labels = np.unique(segmentation)
#find the area of each segment
area = np.bincount(segmentation.astype(np.int).flatten())
accu_s = 0
# match each pixel to a groundtruth segment
for s_k in seg_labels:
coords = np.where(partition == s_k)
#find the intersection
intersection = np.bincount(segmentation[coords].flatten().astype(np.int))
# get the maximum intersecting groundtruth segment
g_i = np.argmax(intersection)
accu_s += intersection[g_i] / area[g_i]
accu += accu_s/len(gt_labels)
return accu / len(gt_list)
def boundary_detection(partition, groundtruth, tolerance = 0.04):
""" Measures boundary detection
Parameters
----------
partition: (N,M) array
array with obtained labels
groundtruth: (N,M) array or list
array(list with groundtruth labels
tolerance: float, optional
maximum distance of considered boundaries relative
to the diagonal
Returns
-------
The precision recall boundaries
"""
# dictionary holding contours and their status (matched/not matched)
contours = {}
gt_contours = {}
# find horizontal contours for segmentation
seg_hx, seg_hy = np.where(partition[:-1, :] != partition[1:, :])
# find vertical contours for segmentation
seg_vx, seg_vy = np.where(partition[:, :-1] != partition[:, 1:])
# the third number reflects:
# 0/1: horizontal/vertical contour
# the forth number reflect
# 0/1: segmentation/groundtruth contour
for px,py in zip(seg_hx,seg_hy):
contours[(px, py, 0, 0)] = 0
for px,py in zip(seg_vx, seg_vy):
contours[(px, py, 1, 0)] = 0
# find horizontal contours for groundtruth
seg_hx, seg_hy = np.where(groundtruth[:-1, :] != groundtruth[1:, :])
# find vertical contours for groundtruth
seg_vx, seg_vy = np.where(groundtruth[:, :-1] != groundtruth[:, 1:])
# the third number reflects:
# 0/1: horizontal/vertical contour
# the forth number reflect
# 0/1: segmentation/groundtruth contour
for px,py in zip(seg_hx,seg_hy):
gt_contours[(px, py, 0, 1)] = 0
for px,py in zip(seg_vx, seg_vy):
gt_contours[(px, py, 1, 1)] = 0
# create a graph matching contours
bipartite = nx.Graph()
bipartite.add_nodes_from(contours)
bipartite.add_nodes_from(gt_contours)
diagonal = sqrt(partition.shape[0]**2 + partition.shape[1]**2)
# maximum distance to search for
D = int(tolerance * diagonal)
for contour in contours:
px = contour[0]
py = contour[1]
# find groundtruth contours around a neighborhood
for x in range(px - D, px + D + 1):
for y in range(py - D, py + D + 1):
hcontour = (x, y, 0, 1)
vcontour = (x, y, 1, 1)
# add an edge if a contour is found
if hcontour in bipartite:
bipartite.add_edge(contour, hcontour)
if vcontour in bipartite:
bipartite.add_edge(contour, hcontour)
# perform a matching
# matches contains twice the matchings
matches = nx.max_weight_matching(bipartite)
print("Contours {0} and {1} matches {2}".format(len(contours),
len(gt_contours), len(matches)))
# find precision/recall values
true_positives = len(matches)/2
false_positives = len(contours) - len(matches)/2
false_negatives = len(gt_contours) - len(matches)/2
precision = true_positives / (true_positives + false_positives)
recall = true_positives / (true_positives + false_negatives)
return precision, recall
def explained_variation(img, partition):
""" Computes the explained variation defined as:
sum over voxels (\mu_i - \mu) / (\voxel - \mu)
where \mu is the video mean and \mu_i is the region mean
"""
# partition labels
seg_labels = np.unique(partition)
dimensions = img.shape
#compute the color mean
mu = np.zeros(dimensions[-1])
#create an array to compute the mse error
mse = np.zeros(dimensions[:-1])
for i in range(dimensions[-1]):
mu[i] = np.mean(img[..., i])
mse += (img[..., i] - mu[i])**2
#sum the error
mse_error = np.sum(mse)
#find the mse error for each
mse_reg = 0
for segment in seg_labels:
coords = np.where(partition == segment)
mu_i = np.mean(img[coords], axis=0)
mse_reg += np.sum((img[coords] - mu_i)**2)
return mse_reg / mse_error<file_sep>import numpy as np
from numpy.testing import assert_equal
from skcv.image.segmentation.error_measures import undersegmentation_error
from skcv.image.segmentation.error_measures import boundary_detection
from skcv.image.segmentation.error_measures import segmentation_accuracy
from skcv.image.segmentation.error_measures import explained_variation
def test_undersegmentation_error():
N = 100
M = 100
part = np.zeros((N, M))
part[:N/2, :M/2] = 0
part[N/2:, :M/2] = 1
part[:N/2, M/2:] = 2
part[N/2:, M/2:] = 3
part_gt = part.copy()
part[N/2, M/4] = 0
part[N/4-1, M/2-1] = 2
ue = undersegmentation_error(part, part_gt)
assert_equal(ue, 0.0002)
# test with lists
ue = undersegmentation_error(part, [part_gt])
assert_equal(ue, 0.0002)
ue = undersegmentation_error(part, part_gt, tolerance=0)
assert_equal(ue, 0.5001)
def test_segmentation_accuracy():
N = 100
M = 100
part = np.zeros((N, M))
part[:N/2, :M/2] = 0
part[N/2:, :M/2] = 1
part[:N/2, M/2:] = 2
part[N/2:, M/2:] = 3
part_gt = part.copy()
accu = segmentation_accuracy(part, part_gt)
assert_equal(accu, 1)
part[N/2, M/4] = 0
part[N/4-1, M/2-1] = 2
accu = segmentation_accuracy(part, part_gt)
assert_equal(accu, 0.99980000000000002)
# test with lists
accu = segmentation_accuracy(part, [part_gt])
assert_equal(accu, 0.99980000000000002)
def test_boundary_detection():
n = 10
m = 10
part = np.zeros((n, m))
part[:n/2, :m/2] = 0
part[n/2:, :m/2] = 1
part[:n/2, m/2:] = 2
part[n/2:, m/2:] = 3
part_gt = part.copy()
part[n/2, m/4] = 0
part[n/4-1, m/2-1] = 2
precision, recall = boundary_detection(part, part_gt, 0)
assert_equal(precision, 0.75)
assert_equal(recall, 0.9)
def test_explained_variation():
n = 10
m = 10
part = np.zeros((n, m))
part[:n/2, :m/2] = 0
part[n/2:, :m/2] = 1
part[:n/2, m/2:] = 2
part[n/2:, m/2:] = 3
img = np.fromfunction(lambda i, j, k: i+j, (n, m, 3), dtype=int)
ev = explained_variation(img, part)
assert_equal(ev, 0.24242424242424243)
img[:, :, 0] = part
img[:, :, 1] = part
img[:, :, 2] = part
ev = explained_variation(img, part)
assert_equal(ev, 0)<file_sep>from .io import read_flow_file
from .io import write_flow_file
from .reliability import (flow_reliability,
occlusion_reliability,
structure_reliability,
variation_reliability)
from .visualization import flow_to_image
<file_sep>import numpy as np
from numpy.linalg import svd, norm, inv
from skcv.multiview.util import normalize_points
from skcv.multiview.two_views import eight_point_algorithm, right_epipole, left_epipole
def projective_factorization(x, max_iterations=1):
"""
Computes the structure from point projections and camera matrices
x: list
list of numpy arrays, representing the points projections
max_iterations: int, optional
maximum number of iterations
"""
n_views = len(x)
n_points = x[0].shape[1]
iterations = 0
#lambda matrix, approximate depths
l = np.ones((n_views, n_points))
#normalization matrices
norm_matrices = []
# normalize coordinates
xn = np.zeros((3*n_views, n_points))
for i in range(n_views):
#find normalization matrix for projections i
x_norm, T = normalize_points(x[i], is_homogeneous=True)
xn[3*i:3*(i+1), :] = x_norm
norm_matrices.append(T)
while iterations < max_iterations:
# normalize the lambda matrix
lr_norm = norm(l, axis=1)
ln = l / lr_norm[:, np.newaxis]
lc_norm = norm(ln, axis=0)
ln /= lc_norm
# repeat the lambdas
ln = np.repeat(ln, 3, axis=0)
#build the factorization matrix
fact_matrix = ln*xn
u, d, vh = svd(fact_matrix)
print(d[3] / d[4])
d = d[:4]/d[0]
# from the svd decomposition we can find the projections and 3d points
p_matrices = u[:, :4]
x_3d = np.dot(np.diag(d), vh[:4, :])
iterations += 1
if iterations != max_iterations:
w_matrix = np.dot(p_matrices, x_3d)
for i in range(n_views):
l[i, :] = w_matrix[3*i+2, :]
cameras = []
for i in range(n_views):
# denormalize camera matrices
c_matrix = np.dot(inv(norm_matrices[i]), p_matrices[3*i:3*(i+1), :])
cameras.append(c_matrix)
return cameras, x_3d<file_sep>import numpy as np
from numpy.testing import assert_equal, assert_almost_equal
from skcv.multiview.util.points_functions import *
def test_coordinate_transformation():
x = np.arange(10)
y = np.arange(10)
points = np.vstack((x, y))
points_h = euclidean_to_homogeneous(points)
# check the conversion
assert_equal(points_h[:2, :], points)
assert_equal(points_h[2, :], np.ones(10))
points_e = homogeneous_to_euclidean(points_h)
assert_almost_equal(points_e, points)
def test_normalize_points():
x = np.arange(10)
y = np.arange(10)
ones = np.ones(10)
points = np.vstack((x, y))
# result of the transformation
t_gt = np.array([[0.34815531, 0., -1.5666989],
[0., 0.34815531, -1.5666989],
[0., 0., 1.]])
# normalized points
x_gt = np.array([[-1.5666989, -1.21854359, -0.87038828, -0.52223297, -0.17407766,
0.17407766, 0.52223297, 0.87038828, 1.21854359, 1.5666989],
[-1.5666989, -1.21854359, -0.87038828, -0.52223297, -0.17407766,
0.17407766, 0.52223297, 0.87038828, 1.21854359, 1.5666989]])
x_n, t = normalize_points(points, is_homogeneous=False)
assert_almost_equal(x_n, x_gt)
assert_almost_equal(t, t_gt)
#do the normalization in homogeneous coordinates
points_h = np.vstack((x, y, ones))
x_n, t = normalize_points(points_h, is_homogeneous=True)
assert_almost_equal(x_n, np.vstack((x_gt, ones)))
assert_almost_equal(t, t_gt)
def test_hnormalize():
Xh2D = np.array((10, 4, 2))
Xh3D = np.array((3, 6, 9, 3))
xh2D = hnormalize(Xh2D)
xh3D = hnormalize(Xh3D)
assert_almost_equal(xh2D, (5, 2, 1))
assert_almost_equal(xh3D, (1, 2, 3, 1))<file_sep>from .bpt import BPT
from .error_measures import (boundary_detection,
explained_variation,
segmentation_accuracy,
undersegmentation_error)
from .region_descriptors import (region_dominant_colors,
region_color_histograms,
region_mean_color)
from .region_distances import mean_color_distance<file_sep>import numpy as np
from numpy.linalg import inv, svd
from skcv.multiview.two_views.fundamental_matrix import *
from ._triangulate_kanatani_cython import _triangulate_kanatani
def _triangulate_hartley(x1, x2, f_matrix, P1, P2):
"""
triangulates points according to
<NAME> and <NAME> (2003). \"Multiple View Geometry in computer vision.\"
"""
n_points = x1.shape[1]
#3D points
x_3d = np.zeros((4, n_points))
for i in range(n_points):
t = np.eye(3)
tp = np.eye(3)
# define transformation
t[0, 2] = -x1[0, i]
t[1, 2] = -x1[1, i]
tp[0, 2] = -x2[0, i]
tp[1, 2] = -x2[1, i]
# translate matrix F
f = np.dot(inv(tp).T, np.dot(f_matrix, inv(t)))
# find normalized epipoles
e = right_epipole(f)
ep = left_epipole(f)
e /= (e[0] ** 2 + e[1] ** 2)
ep /= (ep[0] ** 2 + ep[1] ** 2)
r = np.array(((e[0], e[1], 0), (-e[1], e[0], 0), (0, 0, 1)))
rp = np.array(((ep[0], ep[1], 0), (-ep[1], ep[0], 0), (0, 0, 1)))
f = np.dot(rp, np.dot(f, r.T))
f1 = e[2]
f2 = ep[2]
a = f[1, 1]
b = f[1, 2]
c = f[2, 1]
d = f[2, 2]
# build a degree 6 polynomial
coeffs = np.zeros(7)
coeffs[0] = -(2 * a ** 2 * c * d * f1 ** 4 - 2 * a * b * c ** 2 * f1 ** 4)
coeffs[1] = -(-2 * a ** 4 - 4 * a * 2 * c ** 2 * f2 ** 2 + 2 * a ** 2 * d ** 2 * f1 ** 4 -
2 * b ** 2 * c ** 2 * f1 ** 4 - 2 * c ** 4 * f2 ** 4)
coeffs[2] = - (-8 * a ** 3 * b + 4 * a ** 2 * c * d * f1 ** 2 -
8 * a ** 2 * c * d * f2 ** 2 - 4 * a * b * c ** 2 * f1 ** 2 -
8 * a * b * c ** 2 * f2 ** 2 + 2 * a * b * d ** 2 * f1 ** 4 -
2 * b ** 2 * c * d * f1 ** 4 - 8 * c ** 3 * d * f2 ** 4)
coeffs[3] = - (-12 * a ** 2 * b ** 2 + 4 * a ** 2 * d ** 2 * f1 ** 2 -
4 * a ** 2 * d ** 2 * f2 ** 2 - 16 * a * b * c * d * f2 ** 2 -
4 * b ** 2 * c ** 2 * f1 ** 2 - 4 * b ** 2 * c ** 2 * f2 ** 2 -
12 * c ** 2 * d ** 2 * f2 ** 4)
coeffs[4] = - (2 * a ** 2 * c * d - 8 * a * b ** 3 - 2 * a * b * c ** 2 +
4 * a * b * d ** 2 * f1 ** 2 - 8 * a * b * d ** 2 * f2 ** 2 -
4 * b ** 2 * c * d * f1 ** 2 - 8 * b ** 2 * c * d * f2 ** 2 -
8 * c * d ** 3 * f2 ** 4)
coeffs[5] = - (2 * a ** 2 * d ** 2 - 2 * b ** 4 - 2 * b ** 2 * c ** 2 -
4 * b ** 2 * d ** 2 * f2 ** 2 - 2 * d ** 4 * f2 ** 4)
coeffs[6] = -2 * a * b * d ** 2 + 2 * b ** 2 * c * d
roots = np.roots(coeffs)
# evaluate the polinomial at the roots and +-inf
vals = np.hstack((roots, [1e20]))
min_s = 1e200
min_v = 0
# check all the polynomial roots
for k in range(len(vals)):
x = np.real(vals[k])
s_t = x ** 2 / (1 + f1 ** 2 * x ** 2) + (c * x + d) ** 2 / \
((a * x + b) ** 2 + f2 ** 2 * ((c * x + d) ** 2))
if s_t < min_s:
min_v = np.real(vals[k])
min_s = s_t
if min_v < 1e10:
l = np.array((min_v * f1, 1, -min_v))
lp = np.array((0, min_v, 1))
else: # pragma: no cover
l = np.array((f1, 0, -1))
lp = np.array((0, 1, 0))
lp = np.dot(f, lp)
# find the point closest to the lines
x = np.array((-l[0]*l[2], -l[1]*l[2], l[0]**2 + l[1]**2))
xp = np.array((-lp[0]*lp[2], -lp[1]*lp[2], lp[0]**2 + lp[1]**2))
x = np.dot(inv(t), np.dot(r.T, x))
xp = np.dot(inv(tp), np.dot(rp.T, xp))
# triangulate
x_3d[:, i] = triangulate(x, xp, P1, P2)
# return points
return x_3d / x_3d[3, :]
def triangulate(x1, x2, P1, P2):
"""
Triangulates the 3D position from two projections and two cameras
Parameters
----------
x1: numpy array
Projections on the first image
x2: numpy array
Projections on the second image
Returns
-------
The 3D point in homogeneous coordinates
"""
a = np.zeros((4, 4))
a[0, :] = x1[0] * P1[2, :] - P1[0, :]
a[1, :] = x1[1] * P1[2, :] - P1[1, :]
a[2, :] = x2[0] * P2[2, :] - P2[0, :]
a[3, :] = x2[1] * P2[2, :] - P2[1, :]
u, d, v = svd(a)
# the point lies on the null space of matrix a
return v[3, :]
def optimal_triangulation(x1, x2, f_matrix, cameras=None, method='Hartley'):
"""
Triangulates point projections using an optimal solution
Parameters
----------
x1: numpy array
Projections on the first image
x2: numpy array
Projections on the second image
f_matrix: numpy array
Fundamental matrix
cameras: 2-tuple, optional
cameras from the two projections
if none are provided, the two canonical are obtained
Returns
-------
"""
#xn, t = normalize_points(x, is_homogeneous=True)
if cameras is None: # pragma: no cover
p1, p2 = canonical_cameras_from_f(f_matrix)
else:
p1, p2 = cameras
if method == 'Hartley':
x_3d = _triangulate_hartley(x1, x2, f_matrix, p1, p2)
elif method == 'Kanatani':
x_3d = _triangulate_kanatani(x1, x2, f_matrix, p1, p2)
return x_3d
<file_sep>from . import triangulation
from . import fundamental_matrix
from .fundamental_matrix import (eight_point_algorithm,
fundamental_matrix_from_two_cameras,
canonical_cameras_from_f,
left_epipole,
right_epipole,
robust_f_estimation,
sampson_error)
from .triangulation import (optimal_triangulation,
triangulate)<file_sep>from .linear_autocalibration import linear_autocalibration<file_sep>import numpy as np
from numpy.testing import assert_equal
from skcv.video.optical_flow.reliability import flow_reliability
from skcv.video.segmentation.tbpt import TBPT
def test_tbpt():
N = 99
M = 99
n_frames = 2
fflow = np.zeros((n_frames, N, M, 2))
bflow = np.zeros((n_frames, N, M, 2))
fflow[0, N / 3:2 * N / 3, M / 3:2 * M / 3, 0] = 1
fflow[0, N / 3:2 * N / 3, M / 3:2 * M / 3, 1] = 1
bflow[1, 1 + N / 3:2 * N / 3, 1 + M / 3:2 * M / 3, 0] = -1
bflow[1, 1 + N / 3:2 * N / 3, 1 + M / 3:2 * M / 3, 1] = -1
video = np.zeros((2, N, M, 3))
fcoords = np.where(fflow[0, ..., 0] == 1)
bcoords = np.where(bflow[1, ..., 0] == -1)
video[0, fcoords[0], fcoords[1], :] = 200
video[1, bcoords[0], bcoords[1], :] = 200
rel = np.zeros((n_frames, N, M))
for frame in range(n_frames-1):
rel[frame, ...] = flow_reliability(video[frame, ...],
fflow[frame, ...],
bflow[frame + 1, ...],
use_structure=False)
part = (video[..., 1] != 0).astype(np.int)
#define a distance for the TBPT
#arguments: video, flow, region1, region2
distance = lambda v, fflow, r1, r2: 1
tbpt = TBPT(video, part, distance, optical_flow=fflow)
#check regions
assert_equal(tbpt.nodes[0]["parent"], 2)
assert_equal(tbpt.nodes[1]["parent"], 2)
assert_equal(tbpt.nodes[2]["childs"], [0, 1])<file_sep>import numpy as np
def partition_mean_color(img, id_map):
""" Returns a numpy array of false color
Parameters
---------
img : array
image or volume with color values
id_map : array
2D or 3D numpy array with id values
Returns
-------
mean_color: array with the same shape than input with 3 values for each position
"""
ids = np.unique(id_map)
nids = len(ids)
# create a mean color image of the original size
image_mean_color = np.zeros_like(img)
for label, i in zip(ids, range(nids)):
coords = np.where(id_map == label)
coords = [c for c in coords] + [slice(img.shape[-1])]
mean_color = np.mean(img[coords], axis=0)
image_mean_color[coords] = mean_color
# return the false color image
return image_mean_color
<file_sep>import numpy as np
def random_sphere(N, radius, center=None):
"""
Generates N points randomly distributed on a sphere
Parameters
----------
N: int
Number of points to generate
radius: float
Radius of the sphere
center: numyp array, optional
center of the sphere. (0,0,0) default
Returns
-------
Array (3, N) with the points
"""
u = 2*np.random.random(N)-1
theta = 2*np.pi*np.random.random(N)
points = np.array((radius*np.sqrt(1-u**2)*np.cos(theta),
radius*np.sqrt(1-u**2)*np.sin(theta), radius*u))
if center is not None:
c = np.repeat(center, N)
c = np.reshape(c, (3, N))
points += c
return points
def random_ball(N, radius, center=None):
"""
Generates N points randomly distributed on a ball
x^2+y^2+z^y <= 1
Parameters
----------
N: int
Number of points to generate
radius: float
Radius of the sphere
Returns
-------
Array (3, N) with the points
"""
r = np.random.random(N)
x = np.random.normal(0, 1, (3, N))
norm = np.linalg.norm(x, axis=0)
points = radius * np.power(r, 1./3.) * x/norm
if center is not None:
c = np.repeat(center, N)
c = np.reshape(c, (3, N))
points += c
return points
def random_cube(N, size, center=None):
"""
Generates N points randomly distributed on cube
Parameters
----------
N: int
Number of points to generate
size: float
Size of the side of the cube
Returns
-------
Array (3, N) with the points
"""
x = size*np.random.random((3, N)) - 0.5*size
face = np.random.randint(0, 3, N)
side = 2*np.random.randint(0, 2, N)-1
x[face, np.arange(0, N)] = (0.5*size)*side
if center is not None:
c = np.repeat(center, N)
c = np.reshape(c, (3, N))
x += c
return x<file_sep>import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
def plot_point_cloud(x): # pragma: no cover
"""
Plots point cloud as a scattered 3D plot
Cannot be tested in a terminal-only framework
"""
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.plot(x[0, :], x[1, :], x[2, :], '*')
plt.show()<file_sep>import numpy as np
from skcv.graph.rag import rag
from numpy.testing import assert_equal
def test_rag():
#test rag for 2D
ids = np.linspace(0,1000,100, endpoint=True)
p_2d = np.reshape(ids, (10, 10)).astype(np.int)
graph, regions = rag(p_2d)
assert_equal(len(graph.nodes()), 100)
assert_equal(len(regions), 100)
assert_equal(len(graph.edges()), 180)
graph, regions = rag(p_2d, discard_axis=[0])
assert_equal(len(graph.edges()), 90)
#test rag for 3D
ids = np.linspace(0,1000,1000, endpoint=False)
p_3d = np.reshape(ids, (10, 10, 10)).astype(np.int)
graph, regions = rag(p_3d)
assert_equal(len(graph.nodes()), 1000)
assert_equal(len(regions), 1000)
assert_equal(len(graph.edges()), 2700)
pass<file_sep>import numpy as np
from scipy.cluster.vq import kmeans
def region_mean_color(img, region):
""" Region mean color
Parameters
----------
img: numpy array (N,M,D)
color/gray image
region: dict
dictionary containing the coordinates of the region
Returns
-------
avg: numpy 1D vector of D elements
color mean of the region
"""
# construct adavanced indexing
coords = [c for c in region["coords"]] + [slice(img.shape[-1])]
avg = np.mean(img[coords], axis=0)
return avg
def region_color_histograms(img, region, bins = 10):
""" Region mean color
Parameters
----------
img: numpy array (N,M,D)
color/gray image with [0..1] range for each channel
region: dict
dictionary containing the coordinates of the region
bins: int, optional
number of bins for each channel
Returns
-------
hist: list
list of ndarrays representing histograms
edges: list
list of ndarrays representing the edge bins
"""
hist = []
edges = []
channels = img.shape[-1]
# construct adavanced indexing
coords = [c for c in region["coords"]]
for i in range(channels):
c = coords + [i]
values = img[c]
h,e = np.histogram(values, bins=bins, range=(0, 1))
hist.append(h)
edges.append(e)
return hist, edges
def region_dominant_colors(img, region, colors=8):
""" Region mean color
Parameters
----------
img: numpy array (N,M,D)
color/gray image
region: dict
dictionary containing the coordinates of the region
color: int, optional
number of color clusters
Returns
-------
cb: list
list of ndarrays representing the centroid
error: float
squared error of the clustering process
"""
# construct adavanced indexing
coords = [c for c in region["coords"]] + [slice(img.shape[-1])]
values = img[coords]
cb, error = kmeans(values, colors)
return cb, error<file_sep>from .projective_factorization import projective_factorization
from .projective_functions import swap_signs<file_sep>from .camera import (look_at_matrix,
calibration_matrix,
camera_center,
camera_parameters,
internal_parameters)
from .plots import plot_point_cloud
from .points_functions import (euclidean_to_homogeneous,
homogeneous_to_euclidean,
normalize_points)
from .synthetic_point_cloud import (random_cube,
random_sphere,
random_ball)<file_sep>import numpy as np
from numpy.linalg import svd, inv
from skcv.multiview.util import camera_parameters
def linear_autocalibration(cameras, internal_parameters, n_iterations=50):
"""
Computes the homography H to transform a projective reconstruction
into a metric such that:
Pm = P*H
Xm = H^-1*X
Warning: it only works for general motions
Parameters
----------
cameras: list
List of camera matrices
internal_parameters: numpy array
Approximate internal camera matrix
n_iterations: int, optional
number of iterations for varying the variance of the focal length
Returns
-------
Homography satisfying the above equations
"""
n_views = len(cameras)
k_pars = internal_parameters
ki = inv(k_pars)
ratio = k_pars[1, 1] / k_pars[0, 0]
norm_cameras = [np.dot(ki, cam) for cam in cameras]
betas = 0.1*np.exp(0.3*np.linspace(0, n_iterations))
min_cost = 1e200
best_t = np.array(())
# transform a 16 vector of a symmetric matrix to a 10-vector
# it could be precomputed
idx = np.array((0, 1, 2, 3, 1, 4, 5, 6, 2, 5, 7, 8, 3, 6, 8, 9))
h = np.zeros((16, 10))
for i in range(10):
h[idx == i, i] = 1
# assumptions abour the deviation of the normalized internal parameters
skew_sigma = 0.01
center_sigma = 0.1
focal_sigma = 0.2
for beta in betas:
# build the least squares problem
chi = np.zeros((6*n_views, 10))
for i in range(n_views):
p1 = norm_cameras[i][0, :]
p2 = norm_cameras[i][1, :]
p3 = norm_cameras[i][2, :]
#linearize the absolute quadric constraints
chi[6*i, :] = (1./skew_sigma) * np.dot(np.kron(p1, p2), h)
chi[6*i+1, :] = (1./center_sigma) * np.dot(np.kron(p1, p3), h)
chi[6*i+2, :] = (1./center_sigma) * np.dot(np.kron(p2, p3), h)
chi[6*i+3, :] = (1./focal_sigma) * np.dot(np.kron(p1, p1) - np.kron(p2, p2), h)
chi[6*i+4, :] = (1./beta) * np.dot(np.kron(p1, p1) - np.kron(p3, p3), h)
chi[6*i+5, :] = (1./beta) * np.dot(np.kron(p2, p2) - np.kron(p3, p3), h)
# solve the system and build H from svd
u, d, vh = svd(chi)
# the quadric is the last eigenvector (null-space)
q = vh[9, :]
quadric = np.array(((q[0], q[1], q[2], q[3]),
(q[1], q[4], q[5], q[6]),
(q[2], q[5], q[7], q[8]),
(q[3], q[6], q[8], q[9])))
u, d, vh = svd(quadric)
d[3] = 1
t = np.dot(u, np.diag(np.sqrt(d)))
# compute the cost and keep the minimum
cost = 0
for i in range(n_views):
c = np.dot(cameras[i], t)
k, r, center = camera_parameters(c)
k /= k[2, 2]
cost += (k[0, 1]**2 + (k[1, 1] / k[0, 0] - ratio) +
k[0, 2]**2 + k[1, 2]**2) / k[0, 0]**2
if cost < min_cost:
best_t = t
min_cost = cost
return best_t<file_sep>import numpy as np
from scipy.interpolate import griddata
from skimage import filter
def variation_reliability(flow, gamma=1):
""" Calculates the flow variation reliability
Parameters
----------
flow: numpy array
flow values
gamma: float, optional
soft threshold
Returns
-------
variation reliability map (0 less reliable, 1 reliable)
"""
#compute central differences
gradx = np.gradient(flow[:, :, 0])
grady = np.gradient(flow[:, :, 1])
norm_grad = (gradx[0] ** 2 + gradx[1] ** 2 +
grady[0] ** 2 + grady[1] ** 2) / (0.01 * np.sum(flow ** 2, axis=2) + 0.002)
norm_grad[norm_grad > 1e2] = 0
return np.exp(-norm_grad / gamma)
def occlusion_reliability(forward_flow, backward_flow, gamma=1):
""" Calculates the flow variation reliability
Parameters
----------
forward_flow: numpy array
forward flow values
backward_flow: numpy array
backward flow values
gamma: float, optional
soft threshold
Return
------
reliability map (0 less reliable, 1 reliable)
"""
#check dimensions
if forward_flow.shape != backward_flow.shape: #pragma: no cover
raise ValueError("Array sizes should be the same")
#compute warping flow
xcoords = np.arange(0, forward_flow.shape[0])
ycoords = np.arange(0, forward_flow.shape[1])
xx, yy = np.meshgrid(ycoords, xcoords)
coords = (xx.flatten(), yy.flatten())
#find the warped flow
warped_flow = np.zeros_like(forward_flow)
warped_flow[:, :, 0] = xx + forward_flow[:, :, 0]
warped_flow[:, :, 1] = yy + forward_flow[:, :, 1]
warped_coords = (warped_flow[:, :, 0].flatten(), warped_flow[:, :, 1].flatten())
#interpolate flow values
fx = griddata(coords, backward_flow[:, :, 0].flatten(), warped_coords, method='linear', fill_value=0)
fy = griddata(coords, backward_flow[:, :, 1].flatten(), warped_coords, method='linear', fill_value=0)
interpolated_flow = np.zeros_like(forward_flow)
interpolated_flow[:, :, 0] = fx.reshape(backward_flow.shape[:2])
interpolated_flow[:, :, 1] = fy.reshape(backward_flow.shape[:2])
#find the forward-backward consistency
result = np.sum((forward_flow + interpolated_flow) ** 2, axis=2) / \
(0.01 * (np.sum(forward_flow ** 2, axis=2) +
np.sum(interpolated_flow ** 2, axis=2)) + 0.5)
return np.exp(-result / gamma)
def structure_reliability(img, gamma=1):
""" Calculates the flow structure reliability
Parameters
----------
img: numpy array
Image to compute the structure
gamma: float, optional
Soft threshold
Return
------
reliability map (0 less reliable, 1 reliable)
"""
#compute gradient of the image in the three channels
#kernel for blurring
st = np.zeros((img.shape[0], img.shape[1]))
eps = 1e-6
for k in np.arange(img.shape[-1]):
grad = np.gradient(img[:, :, k])
#compute components of the structure tensor
wxx = filter.gaussian_filter(grad[0] ** 2, 1)
wxy = filter.gaussian_filter(grad[0] * grad[1], 1)
wyy = filter.gaussian_filter(grad[1] ** 2, 1)
#determinant and trace
wdet = wxx * wyy - wxy ** 2
wtr = wxx + wyy
st += wdet / (wtr + eps)
avg = st.mean()
return 1 - np.exp(-st / (0.7 * avg * gamma))
def flow_reliability(img, forward_flow, backward_flow, use_structure=True):
"""
Parameters
----------
img: numpy array
image frame
forward_flow: numpy array
flow from the current frame to the other
backward_flow: numpy array
flow from the next frame and the current
use_structure: bool, optional
use structure to compute the minimum
Returns
-------
the minimum of the different reliabilities
"""
#soft threshold
gamma = 1
if use_structure:
st = structure_reliability(img, gamma)
else:
st = np.ones((img.shape[0], img.shape[1]))
#compute the different reliabilities
var = variation_reliability(forward_flow, gamma)
occ = occlusion_reliability(forward_flow, backward_flow)
#return the minimum of the three
return np.minimum(st, np.minimum(var, occ))
<file_sep>from .rag import rag<file_sep>import numpy as np
from numpy.testing import assert_almost_equal, assert_equal
from skcv.image.segmentation.bpt import BPT
from skcv.image.segmentation.region_distances import mean_color_distance
def test_bpt():
N = 100
M = 100
part = np.zeros((N,M))
part[:N/2,:M/2] = 0
part[N/2:,:M/2] = 1
part[:N/2,M/2:] = 2
part[N/2:,M/2:] = 3
f = lambda r, c, d: (part[r.astype(np.int),c.astype(np.int)]/4 + d/12)
img = np.fromfunction(f,
(N, M, 3),
dtype=np.float64)
b = BPT(img,part,mean_color_distance,update_partition=True)
assert_equal(b.nodes[0]["childs"], [])
assert_equal(b.nodes[1]["childs"], [])
assert_equal(b.nodes[2]["childs"], [])
assert_equal(b.nodes[3]["childs"], [])
assert_equal(b.nodes[0]["parent"], 5)
assert_equal(b.nodes[1]["parent"], 5)
assert_equal(b.nodes[2]["parent"], 4)
assert_equal(b.nodes[3]["parent"], 4)
assert_equal(b.nodes[4]["childs"], [2, 3])
assert_equal(b.nodes[5]["childs"], [0, 1])
assert_equal(b.nodes[4]["parent"], 6)
assert_equal(b.nodes[5]["parent"], 6)
assert_equal(b.nodes[6]["childs"], [5, 4])<file_sep>import os
import numpy as np
from numpy.testing import assert_equal
from skcv import data_dir
from skimage.io import imread
from skcv.video.optical_flow.visualization import flow_to_image
def test_io():
N = 11
M = 11
x_flow = np.linspace(0, N, N, endpoint=False)
y_flow = np.linspace(0, M, M, endpoint=False)
xv, yv = np.meshgrid(x_flow, y_flow)
flow = np.zeros((N, M, 2))
flow[..., 0] = xv - N/2
flow[..., 1] = yv - M/2
flow_image = flow_to_image(flow)
test_file = os.path.join(data_dir, 'flow_test.png')
flow_img_read = imread(test_file)
assert_equal(flow_image, flow_img_read)
<file_sep>import numpy as np
from numpy.testing import assert_almost_equal, assert_array_less, assert_equal
from skcv.multiview.util.synthetic_point_cloud import *
def test_random_sphere():
n_points = 10
radius = 7
center = np.array((1, 2, 3))
points = random_sphere(n_points, radius=radius, center=center)
norm = np.linalg.norm(points - center[:, np.newaxis], axis=0)
assert_almost_equal(radius*np.ones(n_points), norm)
def test_random_ball():
n_points = 10
radius = 7
center = np.array((1, 2, 3))
points = random_ball(n_points, radius=radius, center=center)
norm = np.linalg.norm(points - center[:, np.newaxis], axis=0)
assert_array_less(norm, radius*np.ones(n_points))
def test_random_cube():
n_points = 10
size = 5
center = np.array((1, 2, 3))
points = random_cube(n_points, size, center)
points_c = points - center[:, np.newaxis]
in_cube = np.zeros(10, dtype=np.bool)
for i in range(3):
b = (abs(points_c[i, :]) - 0.5*size) < 1e-6
in_cube = np.logical_or(in_cube, b)
assert_equal(np.all(in_cube), True)<file_sep>import numpy as np
from numpy.linalg import norm, svd, qr, det
def project(points, cameras):
""" Generates point projections from a set of cameras
Parameters
----------
points: numpy array
Array (4,N) of points representing a cloud in homogeneous coordinates
cameras: list
List of Q camera matrices to compute the projections
Returns
-------
list of Q projections for the points
"""
#list of projections
projections = []
if np.ndim(points) == 1:
points = points[:, np.newaxis]
for camera in cameras:
p = np.dot(camera, points)
projections.append(p / p[2, :])
return projections
def depth_of_points(camera, points):
"""
Computes the depth of the points
"""
def calibration_matrix(focal, skew=0, center=(0, 0), focal_y=None):
""" Builds a calibration matrix from parameters
Parameters
----------
focal: float
focal length
skew: float, optional
skew of the pixels, normally 0
center: numpy array, optional
center of the projection
focal_y: float, optional
focal length of the y axis
"""
k = np.zeros((3, 3))
k[0, 0] = focal
if focal_y is not None: # pragma: no cover
k[1, 1] = focal_y
else: # pragma: no cover
k[1, 1] = focal
k[0, 1] = skew
k[0, 2] = center[0]
k[1, 2] = center[1]
k[2, 2] = 1
return k
def internal_parameters(k_matrix):
""" Extracts the intrinsic parameters from the matrix
Parameters
----------
k_matrix: numpy array
Returns
-------
list of parameters
"""
return k_matrix[0, 0], k_matrix[1, 1], \
(k_matrix[0, 2], k_matrix[1, 2]), k_matrix[0, 1]
def camera_center(camera):
""" Computes the camera center
Parameters
----------
camera: numpy array
camera matrix
Returns
-------
center of the camera
"""
# camera center
u, d, vh = svd(camera)
center = vh[3, :]
return center[:3] / center[3]
def camera_parameters(camera):
""" Computes the camera center
Parameters
----------
camera: numpy array
camera matrix
Returns
-------
parameters of the camera
"""
# get the center of the camera
center = camera_center(camera)
# get the left square matrix
m = camera[:3, :3]
#perform a RQ decomposition from a QR
q, r = qr(np.flipud(m).T)
r = np.flipud(r.T)
q = q.T
k = r[:, ::-1]
r = q[::-1, :]
#return the calibration matrix with positive focal lengths
t = np.diag(np.sign(np.diag(k)))
k = np.dot(k, t)
r = np.dot(t, r) #T is its own inverse
if det(r) < 0:
r *= -1
return k, r, center
def look_at_matrix(center, look_at, up_vector=np.array((0, 1, 0))):
""" Generates camera matrix using a center at a look at point
the camera is assumed to be looking initially at (0,0,1)
following the model of Zisserman, "Multiple View Geometry"
Parameters
----------
center: numpy array
Vector representing the camera center
look_at: numpy array
Vector representing the point to look at
up_vector: numpy array, option
The camera up vector
Returns
-------
External camera matrix
"""
# form the pointing vector. the camera looks at -w
w = look_at - center
nw = w / norm(w)
# form the up vector
u = np.cross(up_vector, nw)
nu = u / norm(u)
# form the last vector
v = np.cross(nw, nu)
nv = v / norm(v)
#build the camera matrix
external = np.vstack((nu, nv, nw))
rt = np.dot(external, -center)
external = np.hstack((external, rt[:, np.newaxis]))
return external
<file_sep>import numpy as np
from numpy.testing import assert_equal
from skcv.video.segmentation.region_tracking import *
def test_bipartite_region_tracking():
N = 10
M = 10
part = np.fromfunction(lambda i, j: i*N+j, (N, M))
gt_part = np.zeros((2, N, M))
gt_part[0, ...] = part
gt_part[1, ...] = part
gt_part[0, N-1, M-3:M] = 98
video_part = gt_part.copy()
video_part[1, ...] += N*M;
flow = np.zeros((2, N, M, 2))
rel = np.ones((2, N, M, 2))
tracked = bipartite_region_tracking(video_part, flow, rel)
gt_part[1, N-1, M-2] = 101
gt_part[1, N-1, M-3] = 100
gt_part[1, N-1, M-1] = 98
assert_equal(tracked, gt_part)<file_sep>from .false_color import false_color
from .partition_mean_color import partition_mean_color<file_sep>import numpy as np
from numpy.testing import assert_equal
from skcv.util.false_color import false_color
def test_false_color():
N = 100
M = 100
part = np.zeros((N,M))
part[:N/2, :M/2] = 0
part[N/2:, :M/2] = 1
part[:N/2, M/2:] = 2
part[N/2:, M/2:] = 3
img = false_color(part)
for i in range(4):
coords = np.where(part == i)
for ch in range(3):
assert_equal(len(np.unique(img[coords[0], coords[1], ch])), 1)
<file_sep>from .video_slic import video_slic
from .region_tracking import bipartite_region_tracking
from .tbpt import TBPT
<file_sep>import os
import numpy as np
from numpy.testing import assert_equal, assert_almost_equal
from skcv import data_dir
from skcv.video.optical_flow.io import *
def test_io():
N = 11
M = 11
x_flow = np.linspace(0, N, N, endpoint=False)
y_flow = np.linspace(0, M, M, endpoint=False)
xv, yv = np.meshgrid(x_flow, y_flow)
flow = np.zeros((N, M, 2))
flow[..., 0] = xv - N/2
flow[..., 1] = yv - M/2
test_file = os.path.join(data_dir, 'flow_test.flo')
write_flow_file(test_file, flow)
flow_read = read_flow_file(test_file)
assert_equal(flow, flow_read)<file_sep>import numpy as np
def swap_signs(cameras, x3d):
"""
Swaps signs of the camera and 3D points
so the projective depths are positive
Parameters
----------
camera: list
Camera matrices
x3d: numpy array
array containing 3D points
Returns
-------
camera: cameras with the correct sign. empty if error
x3d: points with the correct sign. empty if error
"""
n_views = len(cameras)
n_points = x3d.shape[1]
signs = np.zeros((n_views, n_points))
for i in range(n_views):
signs[i, :] = np.sign(np.dot(cameras[i], x3d))[2, :]
signp = signs[:, 0]
signs *= signp
signx = signs[0, :]
signs *= signx
if np.any(signs < 0):
return [], []
x3d_signed = x3d * signx
cameras_signed = [cameras[i]*signp[i] for i in range(n_views)]
return cameras_signed, x3d_signed<file_sep>import os
import sys
pkg_dir = os.path.abspath(os.path.dirname(__file__))
data_dir = os.path.join(pkg_dir, 'data')
from . import graph
from . import image
from . import video
from . import multiview
from . import util
__all__ = ['graph',
'image',
'video',
'multiview',
'util']<file_sep>import numpy as np
from numpy.testing import assert_almost_equal
from skcv.image.segmentation.region_descriptors import region_mean_color
from skcv.image.segmentation.region_descriptors import region_color_histograms
from skcv.image.segmentation.region_descriptors import region_dominant_colors
def test_mean_color():
img = np.zeros((10,10,3))
img[5:10,5:10,:] = 2
region1 = {'coords' : np.where(img[:,:,1] == 2)}
region2 = {'coords' : np.where(img[:,:,1] == 0)}
all = {'coords' : np.hstack((region1['coords'],region2['coords']))}
avg1 = region_mean_color(img,region1)
avg2 = region_mean_color(img, region2)
avg_all = region_mean_color(img, all)
assert_almost_equal(avg1, [2, 2, 2])
assert_almost_equal(avg2, [0, 0, 0])
assert_almost_equal(avg_all, [0.5, 0.5, 0.5])
def test_histograms():
N = 100
M = 100
part = np.zeros((N,M))
part[:N/2,:M/2] = 0
part[N/2:,:M/2] = 1
part[:N/2,M/2:] = 2
part[N/2:,M/2:] = 3
f = lambda r, c, d: (part[r.astype(np.int),c.astype(np.int)]/4 + d/12)
img = np.fromfunction(f,
(N, M, 3),
dtype=np.float64)
k = 0
for i in range(4):
region = {"coords": np.where(part == i)}
h, _ = region_color_histograms(img, region, bins=12)
for hst in h:
hist_gt = np.zeros(12)
hist_gt[k] += 2500
assert_almost_equal(hst,hist_gt)
k += 1
def test_dominant_colors():
N = 100
M = 100
part = np.zeros((N,M))
part[:N/2,:M/2] = 0
part[N/2:,:M/2] = 1
part[:N/2,M/2:] = 2
part[N/2:,M/2:] = 3
f = lambda r, c, d: (part[r.astype(np.int),c.astype(np.int)]/4 + d/12)
img = np.fromfunction(f,
(N, M, 3),
dtype=np.float64)
k = 0
for i in range(4):
region = {"coords": np.where(part == i)}
colors, error = region_dominant_colors(img, region)
#it will only find one color
color = img[region['coords']][0,...]
assert_almost_equal(colors[0], color)
assert_almost_equal(error, 0)
# get a whole region covering the whole image
region = {"coords": np.where(part < 5)}
colors, error = region_dominant_colors(img, region)
gt_colors = np.zeros((4,3))
gt_colors[0, :] = img[np.where(part == 3)][0, ...]
gt_colors[1, :] = img[np.where(part == 1)][0, ...]
gt_colors[2, :] = img[np.where(part == 0)][0, ...]
gt_colors[3, :] = img[np.where(part == 2)][0, ...]
assert_almost_equal(error, 0)
pass<file_sep>import heapq
import numpy as np
from skcv.graph.rag import rag
class BPT:
"""
Class defining a hierarchical segmentation
The hierarchy is created from an initial partition
and merging the two most similar regions according
to a predefined distance until only one region is
left
"""
def __init__(self, image, partition, distance,
update_partition=False, verbose=0):
""" Creates the Binary Partition Tree
from the initial partition using a specified
distance
Parameters
----------
image: array (N,M,D)
array containing the image
partition: array (M,N)
array with labels used as the initial
partition
distance: function (img,region1,region2)
distance function between two regions
update_partition: bool, optional
whether the partition gets updated
verbose: int, optional
indicates the level of verbosity
"""
#initial rag
r, regions = rag(partition)
#structures to save the tree topology
self.nodes = {}
for reg in regions:
self.nodes[reg] = {}
# store the leaves partition
self.leaves_partition = np.copy(partition)
#compute initial distances
dists = []
max_label = 0
for e in r.edges_iter():
dists.append((distance(image, regions[e[0]], regions[e[1]]),
e[0],
e[1]))
#store the nodes to a structure
self.nodes[e[0]]["childs"] = []
self.nodes[e[1]]["childs"] = []
#get the maximum used label
max_label = max(max_label, e[0], e[1])
#make a heap (priority queue)
heapq.heapify(dists)
#contains the regions that are merged
merged = set()
#number of regions, N-1 merges
n_regions = len(regions)
max_label += 1
if verbose > 0: # pragma: no cover
print("Performing {0} merges".format(n_regions-1, max_label))
for n in range(n_regions-1):
to_merge = heapq.heappop(dists)
while (to_merge[1] in merged) or (to_merge[2] in merged):
to_merge = heapq.heappop(dists)
if verbose > 1: # pragma: no cover
print("Merging {0} and {1} to {2} with distance ".format(
to_merge[1],
to_merge[2],
max_label,
to_merge[0]))
coords1 = regions[to_merge[1]]["coords"]
coords2 = regions[to_merge[2]]["coords"]
#create the new region and add a node to the rag
dimensions = len(coords1)
coords_parent = [np.hstack((coords1[i], coords2[i]))
for i in range(dimensions)]
regions[max_label] = {"label": max_label, "coords": coords_parent}
r.add_node(max_label)
self.nodes[max_label] = {}
#update tree structures
self.nodes[to_merge[1]]["parent"] = max_label
self.nodes[to_merge[2]]["parent"] = max_label
self.nodes[max_label]["childs"] = (to_merge[1], to_merge[2])
#iterate through the neighbors of the childs and update links
edges = r.edges([to_merge[1], to_merge[2]])
for e in edges:
r.add_edge(max_label, e[1])
heapq.heappush(dists,
(distance(image,
regions[max_label],
regions[e[1]]),
max_label,
e[1]))
#remove the two nodes and edges
r.remove_edges_from(edges)
r.remove_node(to_merge[1])
r.remove_node(to_merge[2])
#add the two regions to the set of merged
merged.add(to_merge[1])
merged.add(to_merge[2])
#print(coords_parent,coords_parent.shape)
if update_partition:
coords = [c for c in coords_parent]
partition[coords] = max_label
max_label += 1
<file_sep>from . import optical_flow
from . import segmentation<file_sep>.PHONY: all clean test
all:
python setup.py build_ext --inplace
clean:
find . -name "*.so" -o -name "*.pyc" -o -name "*.pyx.md5" | xargs rm -f
test:
python -c "import skcv, sys, io; sys.exit(skcv.test_verbose())"
doctest:
python -c "import skcv, sys, io; sys.exit(skcv.doctest_verbose())"
coverage:
nosetests skcv --with-coverage --cover-package=skcv
<file_sep>#! /usr/bin/env python
descr = """Scikit Computer Vision
Computer Vision algorithms based on Scikit-Image and Scikit-learn
Includes: image and video segmentation, optical flow, n-view geometry
"""
DISTNAME = 'skicit-cv'
DESCRIPTION = 'Computer Vision library for Python'
LONG_DESCRIPTION = descr
MAINTAINER = '<NAME>'
MAINTAINER_EMAIL = '<EMAIL>'
URL = 'http://github.com/guillempalou/scikit-cv'
LICENSE = 'MIT'
DOWNLOAD_URL = 'http://github.com/guillempalou/scikit-cv'
VERSION = '0.1dev'
PYTHON_VERSION = (3, 3)
DEPENDENCIES = {
'numpy': (1, 6),
'Cython': (0, 17),
'six': (1, 3),
'skimage': (0, 9),
'sklearn': (0, 14),
'networkx': (1, 8)
#'numpydoc': (0, 4)
}
import os
import sys
import re
import glob
import setuptools # setuptools need to be imported before distutils
from distutils.core import setup, Extension
from Cython.Distutils import build_ext
# get the numpoy include directories
from numpy.distutils.misc_util import get_numpy_include_dirs
def configure_extensions():
if os.path.exists('MANIFEST'):
os.remove('MANIFEST')
# search for all cython files and build them as modules
# in the corresponding subpackage
packages = setuptools.find_packages('.')
exts = []
for package in packages:
working_path = os.path.join(*package.split('.'))
pyx_paths = glob.glob(os.path.join(working_path, '*.pyx'))
pyx_files = [path.split('/')[-1] for path in pyx_paths]
for pyx_file in pyx_files:
name = pyx_file[:-4]
full_path = os.path.join(working_path, pyx_file)
e = Extension(package + "." + name, [full_path],
include_dirs=get_numpy_include_dirs())
exts.append(e)
return exts
def write_version_py(filename='skcv/version.py'):
template = ("# THIS FILE IS GENERATED FROM THE SCIKIT-CV SETUP.PY\n"
"version='%s'\n"
)
vfile = open(os.path.join(os.path.dirname(__file__),
filename), 'w')
try:
vfile.write(template % VERSION)
finally:
vfile.close()
def get_package_version(package):
version = []
for version_attr in ('version', 'VERSION', '__version__'):
if hasattr(package, version_attr) \
and isinstance(getattr(package, version_attr), str):
version_info = getattr(package, version_attr, '')
for part in re.split('\D+', version_info):
try:
version.append(int(part))
except ValueError:
pass
return tuple(version)
def check_requirements():
if sys.version_info < PYTHON_VERSION:
raise SystemExit('You need Python version %d.%d or later.' \
% PYTHON_VERSION)
for package_name, min_version in DEPENDENCIES.items():
dep_error = False
try:
package = __import__(package_name)
except ImportError:
dep_error = True
else:
package_version = get_package_version(package)
if min_version > package_version:
dep_error = True
if dep_error:
raise ImportError('You need `%s` version %d.%d or later.' \
% ((package_name, ) + min_version))
if __name__ == "__main__":
check_requirements()
write_version_py()
extensions = configure_extensions()
data_dirs = {'skcv': ['data/*']}
setup(
name=DISTNAME,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author=MAINTAINER,
author_email=MAINTAINER_EMAIL,
url=URL,
license=LICENSE,
download_url=DOWNLOAD_URL,
version=VERSION,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: C++',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
],
packages=setuptools.find_packages(exclude=['doc']),
package_data=data_dirs,
ext_modules=extensions,
include_package_data=True,
cmdclass={'build_ext': build_ext},
zip_safe=False, # the package can run out of an .egg file
)
<file_sep>import numpy as np
from numpy.testing import assert_equal, assert_almost_equal
from skcv.multiview.util.points_functions import *
from skcv.multiview.util.camera import *
def test_project():
camera = np.array(((100, 0, 0, 0),
(0, 100, 0, 0),
(0, 0, 1, 0)))
X1 = np.array((1, 2, 2, 1))
x1 = project(X1, [camera])
x1 = hnormalize(x1[0])
projection = np.array((50, 100, 1))
assert_almost_equal(x1, projection)
def test_camera_center():
center1 = np.array((10,10,10))
look_at = np.zeros(3)
camera1 = look_at_matrix(center1, look_at)
t = camera_center(camera1)
assert_almost_equal(t, center1)
def test_camera_parameters():
center1 = np.array((10, 10, 10))
look_at = np.zeros(3)
camera1 = look_at_matrix(center1, look_at)
K = np.array(((100, 1, 10), (0, 150, 15), (0, 0, 1)))
c1 = np.dot(K, camera1)
k, r, t = camera_parameters(c1)
assert_almost_equal(k, K)
assert_almost_equal(r, camera1[:,:3])
assert_almost_equal(center1, t)
def test_internal_parameters():
k_matrix = calibration_matrix(100, focal_y=150, skew=1, center=(10, 15))
K = np.array(((100, 1, 10), (0, 150, 15), (0, 0, 1)))
focal_x, focal_y, center, skew = internal_parameters(K)
assert_equal(focal_x, 100)
assert_equal(focal_y, 150)
assert_equal(skew, 1)
assert_equal(center, (10, 15))
assert_equal(K, k_matrix)
def test_look_at():
K = np.array(((100, 0, 0, 0),
(0, 100, 0, 0),
(0, 0, 1, 0)))
camera = np.array(((1, 0, 0, 0),
(0, 1, 0, 0),
(0, 0, 1, 0)))
center = np.array((0, 0, 0))
look_at = np.array((0, 0, 10))
camera1 = look_at_matrix(center, look_at)
assert_equal(camera, camera1)
<file_sep>import numpy as np
from numpy.testing import assert_equal
from skcv.video.segmentation.video_slic import *
def test_video_slic():
N = 100
M = 100
video = np.zeros((2, N, M, 3))
video[0, N / 3:2 * N / 3, M / 3:2 * M / 3, :] = 200
video[1, N / 3:2 * N / 3, M / 3:2 * M / 3, :] = 200
gt_part = (video[0, ..., 0] == 200).astype(np.int)
# test fails due segfault
#part = video_slic(video, 5)
#assert_equal(gt_part, part[0, ...])
#assert_equal(gt_part+2, part[1, ...])
#alternative test
part = video_slic(video, 2)
assert_equal(np.zeros_like(part[0, ...]), part[0, ...])
assert_equal(np.ones_like(part[1, ...]), part[1, ...])
<file_sep>__author__ = 'guillem'
import networkx as nx
import numpy as np
def rag(partition, discard_axis=[]):
"""
Parameters
----------
partition: numpy array
A 2D or 3D label array where each label represents a region
discard_axis: list, optional
Whether the rag discards adjacencies from given axis
Returns
-------
A NetworkX graph object with adjacency relations
Raises
------
ValueError:
if partition has dimension 1
Notes
-----
The regions correspond to labels, not connected components
Examples
--------
"""
dimensions = len(partition.shape)
if dimensions == 2:
partition = partition[:, :, np.newaxis]
#create a RAG
rag = nx.Graph()
labels = np.unique(partition)
#create a regions hash table organized by label
regions = {}
for label in labels:
px, py, pz = np.where(partition == label)
if dimensions == 2:
coords = [px, py]
if dimensions == 3:
coords = [px, py, pz]
regions[label] = {"label": label, "coords": coords}
#create nodes for the RAG
rag.add_nodes_from(labels)
#get adjacencies in each dimension
endx = []
startx = []
#list containing all tuples
pairs = []
for d in range(3):
# ignore the adjacency on the given axis
if d in discard_axis:
continue
if d == 0:
idx = np.where(partition[:-1, :, :] != partition[1:, :, :])
elif d == 1:
idx = np.where(partition[:, :-1, :] != partition[:, 1:, :])
elif d == 2:
idx = np.where(partition[:, :, :-1] != partition[:, :, 1:])
incx = int(d == 0)
incy = int(d == 1)
incz = int(d == 2)
adj = (partition[idx[0], idx[1], idx[2]],
partition[idx[0] + incx, idx[1] + incy, idx[2] + incz])
pairs = pairs + \
[(min(adj[0][i], adj[1][i]),
max(adj[0][i], adj[1][i])) for i in range(len(adj[0]))]
#find unique region pairs
unique_pairs = set(pairs)
# compute distances between regions
edges = [(r[0], r[1]) for r in unique_pairs]
rag.add_edges_from(edges)
#return the rag, the regions dictionary and ordered distances
return rag, regions
<file_sep>import numpy as np
def read_flow_file(path):
""" Reads flow file and returns 2D numpy array
Parameters
----------
path: string
file path to read
Returns
-------
numpy array containing the 2D flow vectors for each position (x,y)
"""
#open the file
f = open(path, "rb")
if (not f): # pragma: no cover
raise IOError("File cannot be opened")
#read the tag
tag = f.read(4)
if tag != b"PIEH": # pragma: no cover
raise TypeError("File type does not correspond to a flow file")
#read the width and height
width = np.fromfile(f, dtype=np.uint32, count=1)
height = np.fromfile(f, dtype=np.uint32, count=1)
if width < 1 or width > 99999 or height < 1 or height > 99999: # pragma: no cover
raise ValueError("Width and height file not correct")
#read flow data
flow = np.fromfile(f, dtype=np.float32, count=width[0] * height[0] * 2)
if flow.size != width[0] * height[0] * 2: # pragma: no cover
raise ValueError("Data flow too small %d != %d" % (flow.size, width[0] * height[0] * 2))
#reshape the flow so that its shape is (height,width,2)
flow_reshaped = np.reshape(flow, (height[0], width[0], 2), order='C')
#close the file
f.close()
return flow_reshaped
def write_flow_file(path, flow):
""" Writes flow file to file
Parameters
----------
path: string
file path to write
flow: numpy array
flow values
"""
#open the file for writing
f = open(path, "wb")
if not f: # pragma: no cover
raise IOError("File cannot be opened")
#read the tag
tag = f.write(b"PIEH")
#write first the width and then the height
shape = np.array((2, 1), dtype=np.uint32)
shape[0] = flow.shape[1]
shape[1] = flow.shape[0]
shape.tofile(f)
#write the flow data
flow.astype(np.float32).tofile(f)<file_sep>import numpy as np
import pickle
import os
from numpy.testing import assert_array_almost_equal
from skcv import data_dir
from skcv.multiview.two_views.fundamental_matrix import *
from skcv.multiview.util.points_functions import *
from skcv.multiview.util.camera import *
from skcv.multiview.two_views import triangulation
def test_triangulation_hartley():
projections_file = os.path.join(data_dir, 'two_view_projections.dat')
(x1e, x2e) = pickle.load(open(projections_file, 'rb'))
#add gaussian noise to x1e and x2e
dev = 0.1
x1e += np.random.normal(0, dev, size=x1e.shape)
x2e += np.random.normal(0, dev, size=x2e.shape)
x1h = euclidean_to_homogeneous(x1e)
x2h = euclidean_to_homogeneous(x2e)
f_matrix = robust_f_estimation(x1h, x2h)
p1, p2 = canonical_cameras_from_f(f_matrix)
X = triangulation.optimal_triangulation(x1h, x2h, f_matrix, cameras=(p1,p2), method='Hartley')
x1p = np.dot(p1, X)
x2p = np.dot(p2, X)
ratio1 = x1p / x1h
ratio2 = x2p / x2h
def test_triangulation_kanatani():
projections_file = os.path.join(data_dir, 'two_view_projections.dat')
(x1e, x2e) = pickle.load(open(projections_file, 'rb'))
#add gaussian noise to x1e and x2e
dev = 0.1
x1e += np.random.normal(0, dev, size=x1e.shape)
x2e += np.random.normal(0, dev, size=x2e.shape)
x1h = euclidean_to_homogeneous(x1e)
x2h = euclidean_to_homogeneous(x2e)
f_matrix = robust_f_estimation(x1h, x2h)
p1, p2 = canonical_cameras_from_f(f_matrix)
X = triangulation.optimal_triangulation(x1h, x2h, f_matrix, cameras=(p1,p2), method='Kanatani')
x1p = np.dot(p1, X)
x2p = np.dot(p2, X)
ratio1 = x1p / x1h
ratio2 = x2p / x2h
<file_sep>import numpy as np
from numpy.testing import assert_allclose
import pickle
import os
import matplotlib.pyplot as plt
from skcv import data_dir
from skcv.multiview.autocalibration import linear_autocalibration
from skcv.multiview.n_views import projective_factorization
from skcv.multiview.util.points_functions import *
from skcv.multiview.util.camera import *
def test_linear_autocalibration():
dump_path = os.path.join(data_dir, "multiview_projections.dat")
dump_file = open(dump_path, "rb")
n_views = pickle.load(dump_file)
internals = []
rotations = []
centers = []
gt_cameras = []
depths = []
for i in range(n_views):
(k, r, c) = pickle.load(dump_file)
internals.append(k)
rotations.append(r)
centers.append(c)
cm = np.eye(3, 4)
cm[:, 3] = -c
cm = np.dot(k, np.dot(r, cm))
gt_cameras.append(cm)
(projections, x3d) = pickle.load(dump_file)
x3dh = euclidean_to_homogeneous(x3d)
#get the true depths
for i in range(n_views):
projs = np.dot(gt_cameras[i], x3dh)
depth_i = (projections[i] / projs)[1,:] / np.linalg.norm(gt_cameras[i][2,:])
depths.append(depth_i)
# get a random homography to generate a projective reconstruction
h = np.random.random((4, 4))
x_3d = np.dot(np.linalg.inv(h), x3dh)
cameras = []
for i in range(n_views):
cameras.append(np.dot(gt_cameras[i], h))
t = linear_autocalibration(cameras, internals[0])
for i in range(n_views):
p = np.dot(cameras[i], t)
k, r, c = camera_parameters(p)
k /= k[2, 2]
#check the calibration got correct internal parameters
assert_allclose(k, internals[i], rtol=1e-2, atol=1e-6)
dump_file.close()
<file_sep>from . import autocalibration
from . import n_views
from . import two_views
from . import util
<file_sep>import numpy as np
from numpy.testing import assert_equal
from skcv.util.partition_mean_color import partition_mean_color
def test_false_color():
N = 100
M = 100
part = np.zeros((N,M))
part[:N/2, :M/2] = 0
part[N/2:, :M/2] = 1
part[:N/2, M/2:] = 2
part[N/2:, M/2:] = 3
img = np.zeros((N,M,3))
img[..., 0] = np.fromfunction(lambda i, j: i+j+0, (N,M))
img[..., 1] = np.fromfunction(lambda i, j: i+j+1, (N,M))
img[..., 2] = np.fromfunction(lambda i, j: i+j+2, (N,M))
mean_color = partition_mean_color(img, part)
colors = np.zeros((3,4))
colors[:, 0] = np.array([49, 50, 51])
colors[:, 1] = np.array([99, 100, 101])
colors[:, 2] = np.array([99, 100, 101])
colors[:, 3] = np.array([149, 150, 151])
for i in range(4):
coords = np.where(part == i)
for ch in range(3):
assert_equal(len(np.unique(mean_color[coords[0], coords[1], ch])), 1)
assert_equal(mean_color[coords[0][0], coords[1][0], ch], colors[ch, i])
<file_sep>import math
import numpy as np
#private function for colorwheel
def _colorwheel():
""" Created the color wheel for flow color. Private, auxiliary function
Return
------
colorwheel: array of (Ncolors,3) with a set of colors.
"""
colors = np.array([15, 6, 4, 11, 13, 6])
ncolors = np.sum(colors)
#array to be returned containing the different colors
colorwheel = np.zeros((ncolors, 3))
actual_color = 0
#fill the wheel with colors
idx = np.arange(actual_color, actual_color + colors[0])
colorwheel[idx, 0] = 255
colorwheel[idx, 1] = np.floor(255 * np.linspace(0, 1, colors[0],
endpoint=False))
actual_color = actual_color + colors[0]
idx = np.arange(actual_color, actual_color + colors[1])
colorwheel[idx, 0] = 255 - np.floor(255 * np.linspace(0, 1, colors[1],
endpoint=False))
colorwheel[idx, 1] = 255
actual_color = actual_color + colors[1]
idx = np.arange(actual_color, actual_color + colors[2])
colorwheel[idx, 1] = 255
colorwheel[idx, 2] = np.floor(255 * np.linspace(0, 1, colors[2],
endpoint=False))
actual_color = actual_color + colors[2]
idx = np.arange(actual_color, actual_color + colors[3])
colorwheel[idx, 1] = 255 - np.floor(255 * np.linspace(0, 1, colors[3],
endpoint=False))
colorwheel[idx, 2] = 255
actual_color = actual_color + colors[3]
idx = np.arange(actual_color, actual_color + colors[4])
colorwheel[idx, 0] = np.floor(255 * np.linspace(0, 1, colors[4],
endpoint=False))
colorwheel[idx, 2] = 255
actual_color = actual_color + colors[4]
idx = np.arange(actual_color, actual_color + colors[5])
colorwheel[idx, 0] = 255
colorwheel[idx, 2] = 255 - np.floor(255 * np.linspace(0, 1, colors[5],
endpoint=False))
return colorwheel
def flow_to_image(flow):
""" Converts a flow array into a RGB image according to middlebury color scheme
Parameters
----------
flow: flow array (M,N,2) where M and N are the width and height respectively
"""
if len(flow.shape) != 3: # pragma no cover
raise ValueError("Flow must be of the form (M,N,2)")
if flow.shape[2] != 2: # pragma no cover
raise ValueError("Flow must be of the form (M,N,2)")
#copy data so we do not change values
u = np.copy(flow[:, :, 0])
v = np.copy(flow[:, :, 1])
#flow threshold for unknown values
flow_threshold = 1e9
#fix unknown values
idx_unknown = (np.abs(u) > flow_threshold) | (np.abs(v) > flow_threshold)
idx_nan = (np.isnan(u) | np.isnan(v))
u[idx_unknown] = 0
v[idx_unknown] = 0
#get flow extreme values
maxu = u.max()
maxv = v.max()
minu = v.min()
minv = v.min()
#get the norm of each vector flow
maxnorm = np.max(u * u + v * v)
print("Flow range u={hmin} .. {hmax}; v = {vmin} .. {vmax}".format(
hmin=minu, hmax=maxu, vmin=minv, vmax=maxv))
eps = 1e-10
u /= maxnorm + eps
v /= maxnorm + eps
norm = np.sqrt(u * u + v * v)
# get the color wheel
colorwheel = _colorwheel()
ncolors = colorwheel.shape[0]
# map each angle to a color
uv_angle = np.arctan2(-v, -u) / math.pi
fk = (uv_angle + 1) * 0.5 * (ncolors - 1)
k0 = np.floor(fk)
k0 = k0.astype(np.uint32)
k1 = k0 + 1
k1[k1 == ncolors] = 1
f = fk - k0
# compute color for each channel
flow_img = np.zeros((flow.shape[0], flow.shape[1], 3))
# get indexes with valid flow values
idx = (norm <= 1)
for i in [0, 1, 2]:
t = colorwheel[:, i]
col0 = t[k0] * 1.0 / 255
col1 = t[k1] * 1.0 / 255
col = (1 - f) * col0 + f * col1
col[idx] = 1 - np.exp(-norm[idx]) * (1 - col[idx]);
col[~idx] *= 0.75
flow_img[:, :, i] = np.floor(255 * col * (1 - idx_nan))
return flow_img<file_sep>import numpy as np
from numpy.testing import assert_almost_equal, assert_allclose
import pickle
import os
from skcv import data_dir
from skcv.multiview.two_views.fundamental_matrix import *
from skcv.multiview.util.points_functions import *
from skcv.multiview.util.camera import *
def test_fundamental_from_cameras():
center1 = np.array((0, 0, 0))
center2 = np.array((1, 0, 0))
look_at1 = np.array((0, 0, 10))
look_at2 = np.array((1, 0, 10))
camera1 = look_at_matrix(center1, look_at1)
camera2 = look_at_matrix(center2, look_at2)
f_matrix = fundamental_matrix_from_two_cameras(camera1, camera2)
def test_epipoles():
f_matrix = np.array(((0, 0, 0), (0, 0, -1), (0, 1, 0)))
re = right_epipole(f_matrix)
le = left_epipole(f_matrix)
assert_almost_equal(re, [1, 0, 0])
assert_almost_equal(le, [1, 0, 0])
def test_canonical_cameras():
camera1 = np.array([[1., 0., 0., 0.],
[0., 1., 0., 0.],
[0., 0., 1., 0.]])
camera2 = np.array([[0., 0., 0., 1.],
[0., -1., 0., 0.],
[0., 0., -1., 0.]])
f_matrix = np.array([[0, 0, 0],
[0, 0, -1],
[0, 1, 0]])
p1, p2 = canonical_cameras_from_f(f_matrix)
assert_almost_equal(p1, camera1)
assert_almost_equal(p2, camera2)
def test_sampson_error():
f_matrix = np.array([[0, 0, 0],
[0, 0, -1],
[0, 1, 0]])
x = np.linspace(0, 10, 11)
y = np.linspace(0, 10, 11)
ones = np.ones(11)
x1 = np.vstack((x, y, ones))
x2 = np.vstack((x + 1, y + 1, ones))
x3 = np.vstack((x + 1, y, ones))
error = sampson_error(x1, x2, f_matrix)
gt_err = np.array([0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5])
assert_almost_equal(gt_err, error)
error = sampson_error(x1, x3, f_matrix)
assert_almost_equal(np.zeros(11), error)
def test_eight_point_algorithm():
projections_file = os.path.join(data_dir, 'two_view_projections.dat')
(x1e, x2e) = pickle.load(open(projections_file, 'rb'))
x1h = euclidean_to_homogeneous(x1e)
x2h = euclidean_to_homogeneous(x2e)
f_matrix = eight_point_algorithm(x1h, x2h)
# fundamental matrix corresponding to an horizontal displacement
f_groundtruth = np.array(((0, 0, 0),
(0, 0, -1),
(0, 1, 0)))
f_matrix /= np.max(f_matrix)
assert_almost_equal(f_matrix, f_groundtruth)
def test_robust_f_estimation():
projections_file = os.path.join(data_dir, 'two_view_projections.dat')
(x1e, x2e) = pickle.load(open(projections_file, 'rb'))
#add gaussian noise to x1e and x2e
dev = 0.1
x1e += np.random.normal(0, dev, size=x1e.shape)
x2e += np.random.normal(0, dev, size=x2e.shape)
x1h = euclidean_to_homogeneous(x1e)
x2h = euclidean_to_homogeneous(x2e)
f_matrix = robust_f_estimation(x1h, x2h)
# fundamental matrix corresponding to an horizontal displacement
f_groundtruth = np.array(((0, 0, 0),
(0, 0, -1),
(0, 1, 0)))
#the sampson error should be equal to the noise variance
e_gt = sampson_error(x1h, x2h, f_groundtruth)
e = sampson_error(x1h, x2h, f_matrix)
assert_allclose(np.sqrt(np.mean(e_gt)), dev, rtol=1e-1)
assert_allclose(np.sqrt(np.mean(e)), dev, rtol=1e-1)
f_matrix /= np.max(f_matrix)
assert_allclose(np.abs(f_matrix), np.abs(f_groundtruth), atol=0.1)
<file_sep>import numpy as np
from skimage.segmentation import slic
def video_slic(video, n_segments, compactness=10):
"""
Oversegments a collection of frames using SLIC
Parameters
----------
video: numpy array
3 or 4 dimensional array representing the video, in CIE LAB
n_segments: int
Number of segments desired for each frame
compactness: float, optional
Compactness parameter for the SLIC algorithm
Returns
-------
partition: numpy array
Array representing the partition
"""
d = len(video.shape)
width = video.shape[1]
height = video.shape[2]
if d == 3: # pragma: no cover
video = video[..., np.newaxis]
elif d != 4: # pragma: no cover
raise ValueError('Video should have 3 or 4 dimensions')
n_frames = video.shape[0]
partition = np.zeros((n_frames, width, height))
current_label = 0
for n in range(n_frames):
frame = video[n, ...]
partition[n, ...] = current_label + slic(frame, n_segments, compactness,
convert2lab=False,
enforce_connectivity=True)
current_label = np.max(partition[n, ...]) + 1
return partition
<file_sep>import numpy as np
def euclidean_to_homogeneous(x):
"""
Transforms X to be in homogeneous coordinates
Parameters
----------
x: numpy array
each column of the array is a point
Returns
-------
xh: numpy array,
x in euclidean coordinates
"""
xe = np.vstack((x, np.ones(x.shape[1])))
return xe
def homogeneous_to_euclidean(xh):
"""
Transforms X to be in euclidean coordinates
Parameters
----------
x: numpy array (3,N), (4,N)
each column of the array is a point
Returns
-------
xh: numpy array,
x in homogeneous coordinates
"""
return xh[0:-1, :]/xh[-1, :]
def hnormalize(x):
"""
Normalizes the last coordinate to be 1
Parameters
----------
x: numpy array
each column of the array is a point
Returns
-------
an array with points normalized
"""
if len(x.shape) == 1:
x = x[:, np.newaxis]
return np.squeeze(x / x[-1, :])
def normalize_points(x, is_homogeneous=False):
"""
Normalizes points so that they have mena 0 and variance 1
accross dimensions
Parameters
----------
x: numpy array
array (D, N) with the points. Each columns is a
D-dimensional point
Returns
-------
Xn: numpy array,
normalized points
"""
dimensions = x.shape[0]
mu_x = np.mean(x, axis=1)
std_x = np.std(x, axis=1)
# if x is in homogeneous coordinates
if is_homogeneous:
dimensions -= 1
mu_x = mu_x[:-1]
std_x = std_x[:-1]
size = dimensions + 1
# build the transformation matrix
t = np.eye(size)
t[:dimensions, -1] = -mu_x/std_x
diag = np.arange(0, dimensions)
t[diag, diag] = 1/std_x
# normalize the points
mu_x_mat = np.repeat(mu_x[:, np.newaxis], x.shape[1], axis=1)
x_normalized = (x[:dimensions,:] - mu_x_mat) / std_x[:, np.newaxis]
# put 1 to the last coordinate if we work in homogeneous coordinates
if is_homogeneous:
x_normalized = np.vstack((x_normalized, np.ones(x.shape[1])))
return x_normalized, t
|
64b67b22aad202bd89333df729ea326216eb1ff9
|
[
"Python",
"Makefile"
] | 54
|
Python
|
guillempalou/scikit-cv
|
66b5455f0097a158f0498b5cade4e8e8a0094c08
|
25c8f2ceaf5c8343d0f865a3414d5bb3d6f313d8
|
refs/heads/master
|
<repo_name>darkautism/w32<file_sep>/gofunc.go
package w32
import (
"errors"
"image"
"unsafe"
)
//GoHBITMAP is a HBITMAP wrapper
type GoHBITMAP struct {
Hbmp HBITMAP
W int
H int
Pixels unsafe.Pointer
}
//Image Convert GoHBITMAP to Image type
func (ghbmp *GoHBITMAP) Image() *image.RGBA {
rect := image.Rect(0, 0, ghbmp.W, ghbmp.H)
i := 0
img := image.NewRGBA(rect)
pixels := (*[9999999]uint8)(ghbmp.Pixels)[:ghbmp.W*ghbmp.H*4]
for y := 0; y < ghbmp.H; y++ {
for x := 0; x < ghbmp.W; x++ {
v0 := pixels[i+0]
v1 := pixels[i+1]
v2 := pixels[i+2]
// BGRA => RGBA, and set A to 255
img.Pix[i], img.Pix[i+1], img.Pix[i+2], img.Pix[i+3] = v2, v1, v0, 255
i += 4
}
}
return img
}
//Delete this GoHBITMAP and release resource
func (ghbmp *GoHBITMAP) Delete() {
ghbmp.H = -1
ghbmp.W = -1
DeleteObject(HGDIOBJ(ghbmp.Hbmp))
}
//ScreenShot can take snapshot of screen
func ScreenShot(h HWND, x, y, width, height int) (*GoHBITMAP, error) {
hdc := GetDC(h)
if hdc == 0 {
return nil, errors.New("GetDC failed")
}
defer ReleaseDC(h, hdc)
hDCMem := CreateCompatibleDC(hdc)
if hDCMem == 0 {
return nil, errors.New("CreateCompatibleDC failed")
}
defer DeleteDC(hDCMem)
var bmi BITMAPINFO
bmi.BmiHeader.BiSize = uint32(unsafe.Sizeof(bmi.BmiHeader))
bmi.BmiHeader.BiPlanes = 1
bmi.BmiHeader.BiBitCount = 32
bmi.BmiHeader.BiWidth = int32(width)
bmi.BmiHeader.BiHeight = int32(-height)
bmi.BmiHeader.BiCompression = BI_RGB
bmi.BmiHeader.BiSizeImage = 0
var p unsafe.Pointer
bitmap := CreateDIBSection(hdc, &bmi, DIB_RGB_COLORS, &p, HANDLE(0), 0)
old := SelectObject(hDCMem, HGDIOBJ(bitmap))
defer SelectObject(hDCMem, old)
BitBlt(hDCMem, x, y, width, height, hdc, 0, 0, SRCCOPY)
return &GoHBITMAP{
Hbmp: bitmap,
W: width,
H: height,
Pixels: p,
}, nil
}
|
afe3343d4a6f1368ede1f46646c04a5684fe32ef
|
[
"Go"
] | 1
|
Go
|
darkautism/w32
|
a5954951274cf95bff1ea5a2bf2b8967f3245f3f
|
f4f46d592d2393737c0791a6b93b0149d7915b54
|
refs/heads/main
|
<file_sep>import React from 'react';
import AppBar from '@material-ui/core/AppBar';
import Toolbar from '@material-ui/core/Toolbar';
import logo from '../assets/logo.png';
import { makeStyles } from '@material-ui/core/styles';
const useStyles = makeStyles((theme) => ({
style: {
backgroundColor: '#FFFFFF',
},
image: {
marginLeft: theme.spacing(4),
}
}));
export default function Topbar() {
const classes = useStyles();
return (
<AppBar position="static" className={classes.style}>
<Toolbar>
<img src={logo} className={classes.image} />
</Toolbar>
</AppBar>
)
}
<file_sep>import React from 'react';
import { makeStyles } from '@material-ui/core/styles';
import Card from '@material-ui/core/Card';
import CardHeader from '@material-ui/core/CardHeader';
import Avatar from '@material-ui/core/Avatar';
import Icon from '../assets/Icon.svg';
import Typography from '@material-ui/core/Typography';
import FormControl from '@material-ui/core/FormControl';
import InputLabel from '@material-ui/core/InputLabel';
import Input from '@material-ui/core/Input';
import InputAdornment from '@material-ui/core/InputAdornment';
import YearMonthPicker from './DatePicker';
import Box from '@material-ui/core/Box';
import Button from '@material-ui/core/Button';
const useStyles = makeStyles((theme) => ({
root: {
width: 560,
margin: 'auto',
textAlign: 'initial',
},
avatar: {
width: '64px',
height: '64px',
},
margin: {
margin: theme.spacing(2),
width: '250px',
},
planColor: {
background: '#F4F8FA',
},
buttonConfirm: {
width: 320,
height: 56,
borderRadius: 32,
}
}));
export default function GoalCard() {
const classes = useStyles();
const [amount, setAmount] = React.useState('');
const [date, setDate] = React.useState('');
const [savings, setSavings] = React.useState('');
const [months, setMonths] = React.useState('');
const handleChangeAmount = (e) => {
setAmount(e.target.value);
const d1 = new Date();
if (date !== '') {
let diff = (date.getFullYear() - d1.getFullYear()) * 12;
diff -= d1.getMonth();
diff += date.getMonth();
if (e.target.value !== '' && diff > 0) {
const saveAmount = Math.ceil(Number(e.target.value) / diff);
setMonths(diff);
setSavings(saveAmount);
}
}
}
const dateChange = (date) => {
setDate(date);
const d1 = new Date();
let diff = (date.getFullYear() - d1.getFullYear()) * 12;
diff -= d1.getMonth();
diff += date.getMonth();
if (amount !== '' && diff > 0) {
const saveAmount = Math.ceil(Number(amount) / diff);
setMonths(diff);
setSavings(saveAmount);
}
}
return (
<Card className={classes.root}>
<CardHeader
avatar={
<Avatar aria-label="recipe" className={classes.avatar} src={Icon} />
}
title={
<Typography variant="h5" component="h2">
Buy a house
</Typography>
}
subheader={
<Typography color="textSecondary" variant="h5" >
Saving goal
</Typography>
}
/>
<FormControl fullWidth className={classes.margin}>
<InputLabel htmlFor="standard-adornment-amount">Total Amount</InputLabel>
<Input
id="standard-adornment-amount"
value={amount}
onChange={handleChangeAmount}
startAdornment={<InputAdornment position="start">$</InputAdornment>}
/>
</FormControl>
<YearMonthPicker dateChange={dateChange} />
<div style={{ border: '1px black solid', margin: '24px' }}>
<Box component="span" display="block" p={1} bgcolor="background.paper">
Monthly Amount: $ {savings}
</Box>
<Box component="span" display="block" p={1} className={classes.planColor}>
You’re planning {months} monthly deposits to reach your ${amount} goal.
</Box>
</div>
<div style={{ textAlign: 'center', marginBottom: '40px' }}>
<Button variant="contained" color="primary" className={classes.buttonConfirm}>
Confirm
</Button>
</div>
</Card>
);
}
<file_sep>import React from 'react';
import { makeStyles } from '@material-ui/core/styles';
import Topbar from './modules/Topbar';
import Typography from '@material-ui/core/Typography';
import GoalCard from './modules/Card';
import './App.css';
const useStyles = makeStyles((theme) => ({
rootBackground: {
backgroundColor: '#F4F8FA',
height: '100%',
},
}));
export default function App() {
const classes = useStyles();
return (
<div className={classes.rootBackground}>
<Topbar />
<div className="page-body">
<Typography className="heading-text">
Let's plan your saving goal.
</Typography>
<GoalCard />
</div>
</div>
);
}
|
ad986247886f8d37f49bd5f3ee89b7749ea2da54
|
[
"JavaScript"
] | 3
|
JavaScript
|
Premdeep20/Goal
|
513fa5987933d3206140c907a8026ed2a5a8e99e
|
d5df954015c94e1884ae9ae069dfe9b0e49a498c
|
refs/heads/master
|
<repo_name>yuriy2017/ChatWithDrawFunctions-PHP-Fabric.js<file_sep>/ChatWithDrawFunctions.loc/README.md
# Draw-Share
This project combination of Drawing system, Friend system and Message system.
Database on ds.sql file.
Manage a database configuration on config.php file.
# Login Page

# Register Page

# Drawing Page

# Figure Page

# Friend Page

# Message Page

# Setting Page

<file_sep>/ChatWithDrawFunctions.loc/draw.php
<?php
include('session.php');
$_SESSION['pageStore'] = "draw.php";
if(!isset($_SESSION['login_id'])){
header("location: index.php"); // Redirecting To Home Page
}
?>
<!DOCTYPE html>
<html>
<head>
<script src="js/fabric.min.js"></script>
<script src="js/jquery.js"></script>
<script src="js/bootstrap.min.js"></script>
<script type="text/javascript">
//UpDate table with new dateTime every minutes
setInterval(() => { fetch('checkOnline/active.php?user_id=' + <?php echo $session_id ?>)}, 60000);
$(function() {// For saving data on database
$("#savBtn").click(function() {
var dataURL = canvas.toDataURL();
var dataMsg = document.getElementById('atchMsg').value;
var frnSelect = $(".frnSelect:checked").val();
if(dataMsg=='')
{
alert("Please enter some message");
$("#atchMsg").focus();
}
else if (frnSelect=='') {
alert("Please select friend");
}
else
{
$.ajax({
type: "POST",
url: "img/imgInsert.php",
data: {dataImg: dataURL, dataCont: dataMsg, dataFrn: frnSelect},
cache: true,
success: function(response){
if (response=="Please select a friend") {
alert(response);
$("#searchFrn").focus();
} else {
alert("Sucessfuly Send");
}
}
});
}
return false;
});
// For searching friend
$("#frnBtn").click(function() {
var textcontent = $("#searchFrn").val();
if(textcontent=='')
{
alert("Enter your friend name");
$("#searchFrn").focus();
}
else
{
$.ajax({
type: "POST",
url: "draw/searchFrn.php",
data: {q: textcontent},
cache: true,
success: function(response){
document.getElementById("frnList").innerHTML = response;
}
});
}
return false;
});
});
// Increase width of input
function morewidth() {
$("#atchMsg").animate({ width: "1340px"}, 400);
document.getElementById('SzIcon').className = "icon fa-arrow-right";
}
// Decrease width of input
function lesswidth() {
$("#atchMsg").animate({ width: "170px"}, 400);
document.getElementById('SzIcon').className = "icon fa-arrow-left";
}
</script>
<meta name="viewport" content="width=device-width, initial-scale=1">
<link rel="stylesheet" href="css/w3.css">
<link rel="stylesheet" href="css/bootstrap.css">
<style type="text/css">
input[type="text"]:focus {
box-shadow: 0 0 0 0.2rem rgba(134,142,150,.5);
}
.l5, .l40, .l75, .l110, .l145 {
position:absolute;
width:30px;height:30px;
}
.l5 {
left:5px;
}
.l40 {
left:40px;
}
.l75 {
left:75px;
}
.l110 {
left:110px;
}
.l145 {
left:145px;
}
.t35 {
top:35px;
}
.t70 {
top:70px;
}
</style>
</head>
<body class="bg-light">
<!Menu Heading>
<?php include 'menuHead.php'; ?>
<canvas id="c" width="1170" height="580" style="border:1px solid white"></canvas>
<div class="modal" style="width:30px;height:30px;background:#A7A9AC;"></div>
<!Collection of colors>
<div class="dark" onclick="chooseColor()" style="position:absolute;top:52px;right: 60px;">Choose Colors <i id="colorPen" class="icon fa-pencil" style="color:#A7A9AC;"></i></div>
<div id="colorCollection" style="position:absolute;top:75px;right: 180px;">
<div class="rounded l5" style="background:#A7A9AC;" id="#A7A9AC" onclick="color(this)"></div>
<div class="rounded l40" style="background:#00AACC;" id="#00AACC" onclick="color(this)"></div>
<div class="rounded l75" style="background:#004DE6;" id="#004DE6" onclick="color(this)"></div>
<div class="rounded l110" style="background:#3D00B8;" id="#3D00B8" onclick="color(this)"></div>
<div class="rounded l145" style="background:#600080;" id="#600080" onclick="color(this)"></div>
<div class="rounded l5 t35" style="background:#FFE600;" id="#FFE600" onclick="color(this)"></div>
<div class="rounded l40 t35" style="background:#FFAA00;" id="#FFAA00" onclick="color(this)"></div>
<div class="rounded l75 t35" style="background:#FF5500;" id="#FF5500" onclick="color(this)"></div>
<div class="rounded l110 t35" style="background:#E61B1B;" id="#E61B1B" onclick="color(this)"></div>
<div class="rounded l145 t35" style="background:#B31564;" id="#B31564" onclick="color(this)"></div>
<div class="rounded l5 t70" style="background:#A2E61B;" id="#A2E61B" onclick="color(this)"></div>
<div class="rounded l40 t70" style="background:#26E600;" id="#26E600" onclick="color(this)"></div>
<div class="rounded l75 t70" style="background:#008055;" id="#008055" onclick="color(this)"></div>
<div class="rounded l110 t70" style="background:#58595B;" id="#58595B" onclick="color(this)"></div>
<div class="rounded l145 t70" style="background:#613D30;" id="#613D30" onclick="color(this)"></div>
</div>
<!div>
<!Erasear>
<!div style="position:absolute;top:187px;left: 5px;"><!Eraser <i id="EraserIcon" class="icon fa-eraser"><!/i><!/div>
<!div class="rounded" style="position:absolute;top:185px;left: 80px;width:31px;height:31px;background:white;border:3px solid;" id="white" onclick="color(this)"><!/div>
<!/div>
<!Adjust width of pen with Range>
<canvas id="toolCan" width="60" height="35" style="position: absolute;top:185px;right: 5px;"></canvas>
<input type="range" id="colorWidth" min="1" max="30" value="25" oninput="range(this)" style="position: absolute;top:191px;right:70px;width:105px;">
<!Change Drawing mode>
<button class="btn btn-secondary" id="selObj" onclick="selectObject()" style="position: absolute;top:220px;right:5px;width:170px;">Enter drawing mode</button>
<!Add delete predefine object>
<select style="position: absolute;top:260px;right:44px;width:133px;" class="custom-select" id="paintOption">
<option value="rectangle">Rectangle</option>
<option value="triangle">Triangle</option>
<option value="circle">Circle</option>
<option value="line">Line</option>
<option value="text">Textbox</option>
</select>
<button class="btn btn-primary" onclick="add()" value="add" id="add" style="position:absolute;top:260px;right:42px;"><i id="AddIcon" class="icon fa-plus"></i></button>
<button class="btn btn-danger" onclick="deleteObjects()" value="delete" id="delete" style="position:absolute;top:260px;right:5px;"><i id="DeleteIcon" class="icon fa-times"></i></button>
<!Search friend>
<form method="post">
<input type="search" id="searchFrn" placeholder="Search your friend" style="position: absolute;top:300px;right:5px;width:170px;">
<input type="submit" name="msg" id="frnBtn" style="display: none;">
<div id="frnList" style="position: absolute;top:330px;right:5px;width:170px;height:190px;overflow:auto;"></div>
</form>
<form method="post">
<!Attach message>
<div style="position: absolute;top:532px;right:15px;">Attach your message <i id="SzIcon" class="icon fa-arrow-left"></i></div>
<input type="text" name="atchMsg" id="atchMsg" onblur="lesswidth()" onclick="morewidth()" class="form-control" style="position: absolute;top:555px;right:5px;width:170px;" required>
<!Save erase button>
<button class="btn btn-secondary" value="Save" id="savBtn" size="23" style="position:absolute;top:598px;right:98px;"><i id="SendIcon" class="icon fa-send"></i> Send</button>
</form>
<button class="btn btn-danger" value="Clear" id="clr" size="23" onclick="cleanUp()" style="position:absolute;top:598px;right:10px;"><i id="ClearIcon" class="icon fa-times"></i> Clear</button>
<script>
$(document).ready(function(){
$("#selObj").click(function(){
if ($("#selObj").text()=='Enter drawing mode') {
$("#selObj").text('Cancel drawing mode');
$("#paintOption").hide(400);
$("#add").hide(400);
$("#delete").hide(400);
$("#searchFrn").animate({ top: "260px"}, 400);
$("#frnList").animate({ top: "290px", height: "230px"}, 400);
}
else if ($("#selObj").text()=='Cancel drawing mode') {
$("#selObj").text('Enter drawing mode');
$("#paintOption").show(400);
$("#add").show(400);
$("#delete").show(400);
$("#searchFrn").animate({ top: "300px"}, 400);
$("#frnList").animate({ top: "330px", height: "190px"}, 400);
}
});
});
var x = "#A7A9AC",
y = 24;
tcan = document.getElementById('toolCan');
tctx = tcan.getContext("2d");
tctx.clearRect(0, 0, 60, 35);
tctx.beginPath();
tctx.moveTo(12, 35/2);
tctx.lineTo(48,35/2);
tctx.strokeStyle = document.getElementById('colorPen').style.color;
tctx.lineWidth = 24;
tctx.lineCap = 'round';
tctx.stroke();
function color(obj) {
x=obj.id
y=document.getElementById('colorWidth').value;
document.getElementById('colorPen').style.color = x;
document.getElementById('colorWidth').style.display = "inline";
canvas.freeDrawingBrush.color = x;
var checkObj = canvas.getActiveObject();
if (checkObj)
{
if (checkObj.get('type')!='path') checkObj.set("fill", x);
checkObj.set("stroke", x);
canvas.renderAll();
}
tctx.clearRect(0, 0, 60, 35);
tctx.beginPath();
tctx.moveTo(y/2, 35/2);
tctx.lineTo(60-y/2,35/2);
tctx.strokeStyle = x;
tctx.lineWidth = y;
tctx.lineCap = 'round';
tctx.stroke();
}
// create a wrapper around native canvas element (with id="c")
function add() {
var paintOpt = $("#paintOption").val();
switch(paintOpt) {
case 'rectangle':
var rectangle = new fabric.Rect({
width: 100,
height: 70,
fill: x,
left: 50,
top: 50
});
canvas.add(rectangle);
break;
case 'triangle':
var triangle = new fabric.Triangle({
width: 100,
height: 75,
fill: x,
left: 250,
top: 50
});
canvas.add(triangle);
break;
case 'circle':
var circle = new fabric.Circle({
radius: 50,
fill: x,
left: 450,
top: 50
});
canvas.add(circle);
break;
case 'line':
var line = new fabric.Line([50, 100, 200, 100], {
left: 650,
top: 75,
stroke: x,
strokeWidth: 8
});
canvas.add(line);
break;
case 'text':
var addtext = new fabric.Textbox('Edit this text', {
left: 400,
top: 200,
fill: x,
strokeWidth: 2,
fontFamily: 'Arial'
});
canvas.add(addtext);
break;
default:
alert('No');
}
}
function selectObject() {
canvas.isDrawingMode = !canvas.isDrawingMode;
}
function deleteObjects(){
var active = canvas.getActiveObjects();
if (active) {
canvas.discardActiveObject();
canvas.remove(...active);
}
}
var canvas = this.__canvas = new fabric.Canvas('c');
if (canvas.freeDrawingBrush) {
canvas.freeDrawingBrush.color = x;
canvas.freeDrawingBrush.width = y;
}
function range(tobj) {
y=tobj.value;
tctx.clearRect(0, 0, 60, 35);
tctx.beginPath();
tctx.moveTo(y/2, 35/2);
tctx.lineTo(60-y/2,35/2);
tctx.lineWidth = y;
tctx.lineCap = 'round';
tctx.stroke();
canvas.freeDrawingBrush.width = y;
}
</script>
</body>
</html>
|
e0ddba8f46a4d03d0e1d29c0ae2d0945f31df8db
|
[
"Markdown",
"PHP"
] | 2
|
Markdown
|
yuriy2017/ChatWithDrawFunctions-PHP-Fabric.js
|
a77eb2e40898a01b64b8c3b4a3f327d9dd7f00f9
|
972b6cca6c66a29eaf64f92557d9d9771a9f16fc
|
refs/heads/master
|
<repo_name>baiyiqi/HSHY_EN<file_sep>/src/Entity/NewsSubjects.java
package Entity;
import KWPatten.Observers;
import KWPatten.Subjects;
import Log.Logs;
import Utility.tool;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
/**
* Created by yiqibai on 12/24/15.
*/
public class NewsSubjects implements Subjects {
private Queue<Observers> observersQueue = new LinkedList<Observers>();
private List<News> currentNews = new ArrayList<News>();
private List<News> pastNews = new ArrayList<News>();
private long ts = 0;
private long timerange = 3600;
Logs loger;
public NewsSubjects(long ts, long timerange, Logs loger){
this.ts = ts;
this.timerange = timerange;
this.loger = loger;
}
//@Override
public List<News> getCurrentNews() {
return currentNews;
}
//@Override
public void setCurrentNews(List<News> newslist) {
currentNews = newslist;
}
//@Override
public List<News> getPastNews() {
return pastNews;
}
//@Override
public void setPastNews(List<News> newsList) {
pastNews = newsList;
}
/**
*
* @param o
*/
public void addObservers(Object o){
if(o instanceof Observers && !observersQueue.contains(o)){
observersQueue.add( (Observers) o);
}
}
/**
*
* @param o
*/
public void removeSubscribe(Object o){
if(o instanceof Observers && observersQueue.contains(o)){
observersQueue.remove(o);
}
}
public void notifyObservers(){
while(observersQueue.size() > 0){
Observers observer = observersQueue.poll();
observer.update(this);
}
}
public long getTs() {
return ts;
}
public long getTimerange(){
return timerange;
}
public List<DBObject> toDBObj(){
List<DBObject> dbObjectList = new ArrayList<DBObject>();
List<News> sortNews = tool.getAllTopNews(currentNews, currentNews.size());
for(News n : sortNews){
if(n.getIsDup()){
continue;
}
if(!tool.isValidNews(n.getNewsTitle(), n.getNewsText())){
continue;
}
DBObject dbObject = new BasicDBObject();
dbObject.put("title", n.getNewsTitle());
dbObject.put("newsurl", n.getNewsUrl());
dbObject.put("src", n.getNewsSrc());
dbObject.put("category", n.getNewsCate());
dbObject.put("content", n.getNewsText());
dbObject.put("dateTS", ts);
dbObject.put("happentime", n.getNewsTimeReadble());
dbObject.put("summary", n.getNewsSummary());
dbObject.put("imageurl", n.getNewsImageUrl());
dbObject.put("showNews", false);
dbObject.put("score", n.getRankScore());
dbObject.put("isTrained", false);
dbObject.put("isNewMedicine", n.getIsNewMedicine());
dbObject.put("isNewTreatment", n.getIsNewTreatment());
dbObject.put("html", n.getNewsHtml());
dbObject.put("enTitle", n.getEnText());
dbObject.put("enText", n.getEnText());
if(n.getNewMedWord() != null && n.getNewMedWord().size() > 0){
DBObject words = new BasicDBObject();
for(String word : n.getNewMedWord().keySet()){
DBObject wInfo = new BasicDBObject();
wInfo.put("cn", "");
wInfo.put("flag", false);
wInfo.put("sentence", n.getNewMedWord().get(word));
words.put(word, wInfo);
}
dbObject.put("newMedWord", words);
}
dbObjectList.add(dbObject);
}
loger.info("最终新闻 " + dbObjectList.size() );
return dbObjectList;
}
}
<file_sep>/src/Utility/tool.java
package Utility; /**
* Created with IntelliJ IDEA.
* User: BYQ
* Date: 2/27/14
* Time: 9:47 PM
* To change this template use File | Settings | File Templates.
*/
import Entity.News;
import Log.Logs;
import com.huaban.analysis.jieba.JiebaSegmenter;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.ProtocolException;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class tool {
/**
* get month timestamp
* @param ts
* @return
*/
public static long getMonthStartTimestamp(long ts) {
Date date = new Date();
date.setTime(ts*1000);
TimeZone timeZoneUTC = TimeZone.getTimeZone("GMT");
Calendar cal = Calendar.getInstance(timeZoneUTC);
cal.setTime(date);
//int month = cal.get(Calendar.MONTH) + 1;
//cal.set(Calendar.MONTH, month);
cal.set(Calendar.DAY_OF_MONTH, 1);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return cal.getTimeInMillis()/1000 ;
}
/**
* Given a timestamp, return the start timestamp of the week UTC
* @param ts The give timestamp
* @return The start timestamp of the week
*/
public static long getWeekStartTimestamp(long ts) {
Date date = new Date();
date.setTime(ts*1000);
TimeZone timeZoneUTC = TimeZone.getTimeZone("GMT");
Calendar cal = Calendar.getInstance(timeZoneUTC);
cal.setTime(date);
int day_of_week = cal.get(Calendar.DAY_OF_WEEK);
if(day_of_week == 1)
day_of_week = 6;
else
day_of_week -= 2;
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return cal.getTimeInMillis()/1000 - 3600*24*day_of_week;
}
/**
* Given a timestamp, return the start timestamp of the day UTC
* @param ts The give timestamp
* @return The start timestamp of the day
*/
public static long getDayStartTimestamp(long ts) {
Date date = new Date();
date.setTime(ts*1000);
TimeZone timeZoneUTC = TimeZone.getTimeZone("GMT");
Calendar cal = Calendar.getInstance(timeZoneUTC);
cal.setTime(date);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return cal.getTimeInMillis()/1000;
}
//ts = 1412431314, return 1412431200
public static long getHourStartTimeStamp(long ts){
long rem = ts % 3600;
return ts- rem;
}
/**
* Given a timestamp, return the start timestamp of the day UTC
* @param ts The give timestamp
* @return The year number like: 2014
*/
public static String getYear(long ts) {
Date date = new Date();
date.setTime(ts*1000);
TimeZone timeZoneUTC = TimeZone.getTimeZone("GMT");
Calendar cal = Calendar.getInstance(timeZoneUTC);
cal.setTime(date);
return String.valueOf(cal.get(Calendar.YEAR));
}
/**
* Given a timestamp, return the start timestamp of the day UTC
* @param ts The give timestamp
* @return The month number like: 07,08,09,10,11,12,01,02...
*/
public static String getMonth(long ts) {
Date date = new Date();
date.setTime(ts*1000);
TimeZone timeZoneUTC = TimeZone.getTimeZone("GMT");
Calendar cal = Calendar.getInstance(timeZoneUTC);
cal.setTime(date);
int month = cal.get(Calendar.MONTH) + 1;
if(month < 10)
return "0"+String.valueOf(month);
else
return String.valueOf(month);
}
public static String getHour(long ts) {
Date date = new Date();
date.setTime(ts*1000);
TimeZone timeZoneUTC = TimeZone.getTimeZone("GMT");
Calendar cal = Calendar.getInstance(timeZoneUTC);
cal.setTime(date);
return String.valueOf(cal.get(Calendar.HOUR_OF_DAY));
}
/**
* get top n news
* @param from
* @param n
* @return top n news (Assending)
*/
public static Map<String, Double> Top(
Map<String, Double> from, final int n) {
ArrayList<Map.Entry<String, Double>> sortedMap = new ArrayList<Map.Entry<String,Double>>(from.entrySet());
Collections.sort(sortedMap, new Comparator<Map.Entry<String, Double>>() {
@Override
public int compare(Map.Entry<String, Double> o1, Map.Entry<String, Double> o2) {
if (o1.getValue() < o2.getValue())
return 1;
else if(o1.getValue() > o2.getValue())
return -1;
else
return 0;
}
});
Map<String, Double> ret = new LinkedHashMap<String, Double>();
if(sortedMap.size() > n){
for (Map.Entry<String, Double> e : sortedMap) {
String topuser = e.getKey();
Double cnt = e.getValue();
ret.put(topuser, cnt);
if (ret.size() >= n)
break;
}
}
else {
for (Map.Entry<String, Double> e : sortedMap) {
String topuser = e.getKey();
Double cnt = e.getValue();
ret.put(topuser, cnt);
}
}
return ret;
}
//get top score News
public static List<News> getAllTopNews(List<News> newsList, int num){
List<News> topNews = new ArrayList<News>();
if(newsList.size() > 0){
Map<String, News> nameToNews = new HashMap<String, News>();
Map<String, Double> nameToScore = new HashMap<String, Double>();
for(News n : newsList){
nameToNews.put(n.getNewsTitle(), n);
nameToScore.put(n.getNewsTitle(), n.getRankScore());
}
Map<String, Double> top = Top(nameToScore, num);
for(String t : top.keySet()){
if(nameToNews.containsKey(t))
topNews.add(nameToNews.get(t));
}
}
return topNews;
}
/**
* calculate similarity between two text, text should be tokenized
* @param text1
* @param text2
* @return
*/
public static double getSimilarity(List<String> text1, List<String> text2) {
HashMap<String, Integer[]> wordMap = new HashMap<String, Integer[]>();
//HashMap<String, Integer[]> wordMap2 = new HashMap<String, Integer>();
for (String word : text1) {
if (!wordMap.containsKey(word)) {
wordMap.put(word, new Integer[2]);
wordMap.get(word)[0] = new Integer(0);
wordMap.get(word)[1] = new Integer(0);
}
wordMap.get(word)[0] += 1;
}
for (String word : text2) {
if (!wordMap.containsKey(word)) {
wordMap.put(word, new Integer[2]);
wordMap.get(word)[0] = new Integer(0);
wordMap.get(word)[1] = new Integer(0);
}
wordMap.get(word)[1] += 1;
}
double result = 0;
int dotProduct = 0;
int eNorm1 = 0;
int eNorm2 = 0;
int magnitude = 0;
for (String word : wordMap.keySet()) {
dotProduct += wordMap.get(word)[0] * wordMap.get(word)[1];
eNorm1 += Math.pow(wordMap.get(word)[0], 2);
eNorm2 += Math.pow(wordMap.get(word)[1], 2);
}
if (eNorm1 != 0 && eNorm2 != 0) {
result = dotProduct / Math.sqrt(eNorm1 * eNorm2);
}
return result;
}
public static double StringSimilarity(String s1, String s2){
List<String> slst1 = segmentText(s1);
List<String> slst2 = segmentText(s2);
return getSimilarity(slst1, slst2 );
}
//convert ts to readable time
public static String convertTS(long ts) {
Date date = new Date();
date.setTime(ts*1000);
SimpleDateFormat df=new SimpleDateFormat("yyyy-MM-dd HH:mm");
return df.format(date);
}
//check whether the url contain http:
public static boolean isValidURL(String url){
String newURL = url.trim();
Pattern p = Pattern.compile("^http:");
Matcher m = p.matcher(newURL);
return m.find();
}
/**
* check time is timestamp, not readable time
* @param ts
* @return true:timestamp, false:readable time
*/
public static boolean isValidTime(long ts){
String time = String.valueOf(ts);
Pattern p = Pattern.compile("^1");
Matcher m = p.matcher(time);
return m.find();
}
/**
* check title does not contain "<a..."
* @param title
* @return true: clean,doesn't contain, false:contain
*/
public static boolean isValidTitle(String title){
Pattern p = Pattern.compile("^<a.*");
Matcher m = p.matcher(title.trim());
return !m.matches();
}
public static boolean isValidImageUrl(String url) {
if (!isValidURL(url) || url.equals("")){
return false;
}
int code = 404;
try {
URL u = new URL( url);
HttpURLConnection huc = ( HttpURLConnection ) u.openConnection ();
huc.setReadTimeout(2000);
huc.setConnectTimeout(3000);
huc.setRequestMethod ("GET"); //OR huc.setRequestMethod ("HEAD");
huc.connect () ;
code = huc.getResponseCode() ;
} catch (MalformedURLException e) {
//e.printStackTrace();
} catch (ProtocolException e) {
//e.printStackTrace();
} catch (IOException e) {
// e.printStackTrace();
}
if(code == 404)
return false;
else
return true;
}
public static String resetTitle(String title){
String timepatten = "(\\d{2}月\\d{2}日) (\\d{2}:\\d{2})";
String wordpatten = "[(图)|(图)|(组图)|/图|\\[图\\]|\\[组图\\]|(高清组图)|图片来源:|图片说明:]";
String timepatten1 = "(\\d{2}月\\d{2}日)[ ]*(\\d{2}:\\d{2})";
String tp1 = "(\\d{2}月\\d{2}日)";
String tp2 = "(\\d{2}:\\d{2})";
return title.replaceAll(tp1,"").replaceAll(wordpatten, "").replaceAll(tp2,"").trim();
}
//segment
public static List<String> segmentText(String content){
//return the words whose length>1
List<String> txtArray = JiebaTokenizer.getInstance().process(getReplacement(content),
JiebaSegmenter.SegMode.SEARCH);
return txtArray;
}
//remove special charactor
public static String getReplacement( String text){
String stxt = text.replaceAll("[\\((]分享自[^\\))]*[\\))]|<a.*</a>0123456789", "");
String regEx="[`~!@#$%^&*()+=|{}':;',\\[\\].-<>/?~!@#¥%……&*-——()——+|{}【】‘;:”“’.。,、?@{a-z|A-Z|0-9}*《》http{a-z|A-Z|0-9}*]";
Pattern p = Pattern.compile(regEx);
Matcher m = p.matcher(stxt);
return m.replaceAll("").trim();
}
public static List<String> getSentence(String paraText){
List<String> ret = new ArrayList<String>();
Pattern senPattern = Pattern.compile("([^\\。\\!\\!\\?\\?\"\\“\\”]|([\"\\“][^\"\\“\\”]*[^\\。\\!\\!\\?\\?]*[\"\\“\\”]))*([\\。\\!\\!\\?\\?]+[\\s\\ \\ ]*|[\"\\“]+[^\"\\“\\”]*[\\。\\!\\!\\?\\?]+[\"\\“\\”]+[\\s\\ \\ ]*|[^\\。\\!\\!\\?\\?\"\\“\\”]*$)");
//建立List,存放本段句子的原始文本
ArrayList<String> senRaws = new ArrayList<String>();
//根据正则,获取段落中的句子,存入senRaws数组
//String tex = new String(paraText);
//tex = tex.replace("\n", "").trim();
//tex = tex.replace("\r","").trim();
//tex = tex.replace(" ", "").trim();
Matcher matcher = senPattern.matcher(paraText.replace("\n", "").replace("\r","").replace(" ", "").trim());
while (matcher.find()) {
String raw = matcher.group().replace(" ", "");
if(!raw.equals(""))
senRaws.add(raw);
}
if(senRaws.size() > 0){
ret.add(senRaws.get(0));
ret.add(senRaws.get(senRaws.size()-1));
}
//System.out.println(ret);
return ret;
}
public static boolean checkSimi(String tex1, String tex2, double textSimi){
List<String> lst1 = getSentence(tex1);
List<String> lst2 = getSentence(tex2);
String sent1 = "";
String sent2 = "";
if(lst1.size() > 0)
sent1 = lst1.get(0) + lst1.get(1);
if(lst2.size() > 0)
sent2 = lst2.get(0) + lst2.get(1);
if(sent1.equals("") && sent2.equals("")){
if(StringSimilarity(tex1, tex2) > textSimi)
return true;
}
if(sent1.equals(sent2) || sent1.contains(sent2) || sent2.contains(sent1))
return true;
return false;
}
public static String sentence(String text){
Pattern firstSenPattern = Pattern.compile("^[^\\。\\!\\!\\?\\?\"\\“\\”]*[\\。\\!\\!\\?\\?\"\\“\\”]");
Pattern lastSenPattern = Pattern.compile("[^\\。\\!\\!\\?\\?\"\\“\\”]*[\\。\\!\\!\\?\\?\"\\“\\”]{0,1}$");
//要匹配的文本
//String text = "这是第一句,第一句。第二句在这里。这是最后一句!";
//第一句话与最后一句话的matcher
Matcher firstSenMatcher = firstSenPattern.matcher(text.replace(" ", "").replace("\n", "").replace("\r","").trim());
Matcher lastSenMatcher = lastSenPattern.matcher(text.replace(" ", "").replace("\n", "").replace("\r","").trim());
//第一句话与最后一句话
String firstSen = "";
String lastSen = "";
//匹配第一句话
if (firstSenMatcher.find()) {
firstSen = firstSenMatcher.group().replace(" ", "");
}
//匹配最后一句话
if (lastSenMatcher.find()) {
lastSen = lastSenMatcher.group().replace(" ", "");
}
return firstSen + lastSen;
}
public static boolean isSameText(String text1, String text2, double textSimi){
String sent1 = sentence(text1);
String sent2 = sentence(text2);
if(sent1.equals("") && sent2.equals("")){
if(tool.StringSimilarity(text1, text2) > textSimi)
return true;
}
if(sent1.equals(sent2) || sent1.contains(sent2) || sent2.contains(sent1))
return true;
return false;
}
public static Set<String> extractEnglish(String txt){
Set<String> newMed = new HashSet<String>();
try {
String cleanTxt = txt.replaceAll("(http://)*(www.)*[\\S]*(.com)(\\/\\S*)*", "");
//String patten = "[\\w]+(\\-)*[\\w]+|[\\w]+";
Pattern p = Pattern.compile("[A-Za-z]+(\\-)*[A-Za-z]+|[A-Za-z]+");
Matcher m = p.matcher(cleanTxt);
while(m.find()){
if(m.group().length() > 1){
newMed.add(m.group().trim());
}
}
}catch (Exception e){
System.out.println(e.getMessage());
}
finally {
return newMed;
}
}
public static Set<String> getWordDict(Logs hsLogger){
Set<String> wordDict = new HashSet<String>();
Scanner sc = null;
try {
FileInputStream inputStream = new FileInputStream("./dict");
sc = new Scanner(inputStream, "UTF-8");
while (sc.hasNextLine()) {
String word = sc.nextLine().replace("\r\n", "").replace("\n", "").trim();
wordDict.add(word.toLowerCase());
}
if (sc.ioException() != null) {
throw sc.ioException();
}
inputStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (sc != null) {
sc.close();
}
}
hsLogger.info("FilterDict Info [ Load Filter Words " + wordDict.size() + "]");
return wordDict;
}
public static List<String> getSegText(String text){
List<String> sentence = new ArrayList<String>();
try{
Pattern senPattern = Pattern.compile("([^\\。\\!\\!\\?\\?\"\\“\\”]|([\"\\“][^\"\\“\\”]*[^\\。\\!\\!\\?\\?]*[\"\\“\\”]))*([\\。\\!\\!\\?\\?]+[\\s\\ \\ ]*|[\"\\“]+[^\"\\“\\”]*[\\。\\!\\!\\?\\?]+[\"\\“\\”]+[\\s\\ \\ ]*|[^\\。\\!\\!\\?\\?\"\\“\\”]*$)");
Matcher matcher = senPattern.matcher(text);
while (matcher.find()) {
String raw = matcher.group().replace(" ", "");
if(!raw.equals("")){
//System.out.println(raw);
sentence.add(raw);
}
}
}catch (Exception e){
System.out.println(e.getMessage());
}
finally {
return sentence;
}
}
public static boolean isValidNews(String title, String text){
if(title.contains("枪") || text.contains("枪") || title.contains("qq")){
return false;
}
return true;
}
public static void printNewsTitle(List<News> newsList){
for(News n : newsList){
System.out.println(n.getNewsTitle());
}
System.out.println();
}
}
<file_sep>/src/Entity/HSKeywords.java
package Entity;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import java.util.*;
/**
* Created by yiqibai on 12/26/15.
*/
public class HSKeywords {
//[category:[word:[tf, df, tfidf]]]
private Map<String, Map<String, double[]>> categoryKeywords = new HashMap<String, Map<String, double[]>>();
//[category: 0: document, 1:words num]
private Map<String, double[]> docWordsNum = new HashMap<String, double[]>();
public void setCategoryKeywords(Map<String, Map<String, double[]>> categoryKeywords) {
this.categoryKeywords = categoryKeywords;
}
public Map<String, Map<String, double[]>> getCategoryKeywords(){
return categoryKeywords;
}
public void setDocWordsNum(Map<String, double[]> docWordsNum) {
this.docWordsNum = docWordsNum;
}
public Map<String, double[]> getDocWordsNum() {
return docWordsNum;
}
public List<DBObject> cateKWordToBasicDB(){
List<DBObject> dbObjectList = new ArrayList<DBObject>();
Map<String, Map<String, double[]>> data = new HashMap<String, Map<String, double[]>>();
for(String cate : categoryKeywords.keySet()){
for(String word : categoryKeywords.get(cate).keySet()){
if(!data.containsKey(word))
data.put(word, new HashMap<String, double[]>());
if(!data.get(word).containsKey(cate))
data.get(word).put(cate, categoryKeywords.get(cate).get(word));
}
}
for(String word : data.keySet()){
BasicDBObject wobj = new BasicDBObject();
wobj.append("word", word);
BasicDBObject catobj = new BasicDBObject();
for(String cate : data.get(word).keySet()){
double[] percent = data.get(word).get(cate);
BasicDBObject perObj = new BasicDBObject();
perObj.append("tf", percent[0]);
perObj.append("df", percent[1]);
catobj.append(cate, perObj);
}
wobj.append("categoryScore", catobj);
dbObjectList.add(wobj);
}
return dbObjectList;
}
public List<DBObject> docToBasicDB(){
List<DBObject> dbObjectList = new ArrayList<DBObject>();
for(String cate : docWordsNum.keySet()){
BasicDBObject obj = new BasicDBObject();
obj.append("category", cate);
obj.append("documents", docWordsNum.get(cate)[0]);
obj.append("words", docWordsNum.get(cate)[1]);
dbObjectList.add(obj);
}
return dbObjectList;
}
public Set<String> getHSKeyWords(){
Set<String> keywords = new HashSet<String>();
for(String c : categoryKeywords.keySet()){
for(String s : categoryKeywords.get(c).keySet()){
keywords.add(s.toLowerCase());
}
}
return keywords;
}
}
<file_sep>/src/Log/Logs.java
package Log;
import Property.Property;
import java.io.File;
import java.io.IOException;
import java.util.logging.LogManager;
import java.util.logging.Logger;
/**
* Created by yiqibai on 12/23/15.
*/
public class Logs {
private Logger KWBloger;
private Property Logproperties;
public Logs(String projectname, Property logproperty){
KWBloger = Logger.getLogger(String.format( "[ %s ]", projectname));
Logproperties = logproperty;
logsetup();
}
/**
* configure the log property
*/
private void logsetup(){
//create log file
String pattern = Logproperties
.getProperty("java.util.logging.FileHandler.pattern");
String logFilePath = pattern.substring(0, pattern.lastIndexOf("/"));
File file = new File(logFilePath);
if (!file.exists()) {
file.mkdirs();
}
//load configuration
LogManager logManager = LogManager.getLogManager();
try {
logManager.readConfiguration(Thread.currentThread().getContextClassLoader().getResourceAsStream(Logproperties.getFile()));
} catch (IOException e) {
e.printStackTrace();
}
}
public Logger getKWBloger(){
return KWBloger;
}
public void info(String msg){
KWBloger.info(msg);
}
public void server(String msg){
KWBloger.severe(msg);
}
}
<file_sep>/src/Utility/JiebaTokenizer.java
package Utility;
import com.huaban.analysis.jieba.JiebaSegmenter;
import com.huaban.analysis.jieba.SegToken;
import com.huaban.analysis.jieba.WordDictionary;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
/**
* Created by yiqibai on 7/4/14.
*/
public class JiebaTokenizer {
private static JiebaTokenizer instance = null;
private static JiebaSegmenter segmenter = null;
WordDictionary dictAdd = WordDictionary.getInstance();
File file = new File("./library/dictionary.dic");
private JiebaTokenizer() {
dictAdd.loadUserDict(file);
segmenter = new JiebaSegmenter();
}
private static synchronized void syncInit() {
if (instance == null)
instance = new JiebaTokenizer();
}
public static JiebaTokenizer getInstance() {
if (instance == null) {
syncInit();
}
return instance;
}
public static String processWithF(String text, JiebaSegmenter.SegMode mode) {
return segmenter.sentenceProcess(text).toString();
}
public static List<String> process(String text, JiebaSegmenter.SegMode mode) {
List<String> list = new ArrayList<String>();
List<SegToken> tokens = segmenter.process(text, mode);
//String[] segTK = new String[tokens.size()];
for(int i = 0; i < tokens.size(); i++) {
String s = tool.getReplacement(tokens.get(i).token);
if(s.trim().length() > 1)
list.add(tokens.get(i).token);
}
return list;
}
}
<file_sep>/huashengkuaiwen2.0.properties
path.variable.maven_repository=/Users/yiqibai/.m2/repository
jdk.home.1.7=/Library/Java/JavaVirtualMachines/jdk1.7.0_60.jdk/Contents/Home
javac2.instrumentation.includeJavaRuntime=false<file_sep>/src/DataBase/MongoDB.java
package DataBase;
import Log.Logs;
import com.mongodb.MongoClient;
/**
* Created by yiqibai on 12/27/15.
*/
public abstract class MongoDB implements DataBase {
private String ip;
private int port;
private String db;
MongoClient mongo;
Logs loger;
MongoDB(String ip, int port, Logs loger) {
this.loger = loger;
this.ip = ip;
this.port = port;
Connect();
}
@Override
public void Connect() {
try {
mongo = new MongoClient(ip, port);
} catch (Exception e) {
loger.server("MongoException [" + e.getMessage() + "]");
}
}
@Override
public void Close() {
mongo.close();
}
public void setIp(String ip) {
this.ip = ip;
}
public String getIp() {
return ip;
}
public void setPort(int port) {
this.port = port;
}
public int getPort() {
return port;
}
public void setDb(String db) {
this.db = db;
}
public String getDb() {
return db;
}
}
<file_sep>/src/Remover/HashRemover.java
package Remover;
import Entity.News;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Created by yiqibai on 12/24/15.
*/
public class HashRemover implements Remover {
private static Set<String> titleSet = new HashSet<String>();
private static Set<String> imageurlset = new HashSet<String>();
private static Set<String> newsurlset = new HashSet<String>();
private static Set<String> sentenceSet = new HashSet<String>();
// public HashRemover(){
// Initiate();
// }
@Override
public void pastRemover(List<News> currentNews, List<News> pastNews) {
if(loadIndex(pastNews)){
hashRemover(currentNews);
}
}
@Override
public void realTimeRemover(List<News> currentNews) {
Initiate();
hashRemover(currentNews);
}
/**
* filter same image url and news url
* @param newsList
* @return
*/
private void hashRemover(List<News> newsList){
int count = 0;
if(newsList != null && newsList.size() > 0){
for(News n : newsList){
if(!n.getIsDup() && !isNotRepeatNews(n.getNewsImageUrl(), n.getNewsUrl(), n.getFirstLastSentence(), n.getNewsTitle())){
n.setIsDup();
// System.out.println(" DUP " + n.getNewsTitle());
count ++;
}
}
}
System.out.println("重复新闻 " + count);
}
/**
*
* @param imageurl
* @param newsurl
* @param sentence
* @param title
* @return True if is not a repeated news
*/
private boolean isNotRepeatNews(String imageurl, String newsurl, String sentence, String title){
if(!newsurl.equals("") && !sentence.equals("") && !title.equals("")){
if(!imageurl.equals("") ){
return imageurlset.add(imageurl) && newsurlset.add(newsurl)
&& sentenceSet.add(sentence) && titleSet.add(title);
}
else{
return newsurlset.add(newsurl)
&& sentenceSet.add(sentence) && titleSet.add(title);
}
}
return false;
}
private void Initiate(){
titleSet.clear();
imageurlset.clear();
newsurlset.clear();
sentenceSet.clear();
}
/**
* load index from past news
* @param pastNews
*/
private boolean loadIndex(List<News> pastNews){
Initiate();
if(pastNews != null && pastNews.size() > 0){
for(News n : pastNews ){
imageurlset.add(n.getNewsImageUrl());
newsurlset.add(n.getNewsUrl());
sentenceSet.add(n.getFirstLastSentence());
titleSet.add(n.getNewsTitle()); //used not exist ???????
}
return true;
}
return false;
}
}
<file_sep>/src/Property/PropertyFactory.java
package Property;
/**
* Created by yiqibai on 12/23/15.
*/
public abstract class PropertyFactory {
/**
* create the kwbproperty object.
* @param file
* @return
*/
public abstract Property createProperty(String file);
}
<file_sep>/src/Entity/CateKWord.java
package Entity;
import java.util.HashMap;
import java.util.Map;
/**
* Created by yiqibai on 12/28/15.
*/
public class CateKWord {
private Map<String, Map<String, Double>> cateKWord = new HashMap<String, Map<String, Double>>();
public Map<String, Map<String, Double>> getCateKWord() {
return cateKWord;
}
public void setCateKWord(Map<String, Map<String, Double>> category) {
cateKWord = category;
}
}
<file_sep>/src/Entity/HSNews.java
package Entity;
import java.text.DecimalFormat;
import java.util.HashMap;
import java.util.Map;
/**
* Created by yiqibai on 12/25/15.
* score : popurank
* textArray : segment text to List
* simiTag : false (no similar)
* sentence : first sentence + last sentence. For remove duplications
* keywordmap :
* newMedicineTag : false(not new medicine)
*/
public class HSNews extends News {
private double rankScore;
private boolean isDup;
private Map<String, Integer> keyWordMap;
private boolean isNewMedicine ;
private boolean isNewTreatment ;
private Map<String, String> newMedWord;
private String html;
private String enTitle;
private String enText;
public HSNews(String title, String text, long ts, String src, String imageurl, String summary,
String newsurl, String cate, long id, String html, String entitle, String entext) {
super(title, text, ts, src, imageurl, summary, newsurl, cate, id);
keyWordMap = new HashMap<String, Integer>();
isNewMedicine = false;
isNewTreatment = false;
isDup = false;
rankScore = 0.0;
newMedWord = new HashMap<String, String>();
this.html = html;
enTitle = entitle;
enText = entext;
}
// @Override
// public boolean isValidNews() {
// if(!isDup && !getNewsTitle().equals("") && !getNewsText().equals("") && !getNewsSummary().equals("")
// && !getNewsTimeReadble().equals("") && !getNewsUrl().equals("")){
// return true;
// }
// return false;
// }
public double getRankScore(){
rankScore = Double.parseDouble(new DecimalFormat("##.######").format(rankScore));
return rankScore;
}
public void setRankScore(double score){
this.rankScore = score;
}
public void addRankScore(double score) {
}
public void setIsDup(){
isDup = true;
}
public boolean getIsDup(){ return isDup; }
public void setKeyWordMap(Map<String, Integer> map){
keyWordMap = map;
}
public Map<String, Integer> getKeyWordMap(){
return keyWordMap;
}
public void setNewMedicineTrue(){
isNewMedicine = true;
}
public boolean getIsNewMedicine(){
return isNewMedicine;
}
public void setNewTreatmentTrue() {
isNewTreatment = true;
}
public boolean getIsNewTreatment() {
return isNewTreatment;
}
public int getKeyWordNum(String word){
if(!keyWordMap.containsKey(word)){
return 0;
}
return keyWordMap.get(word);
}
public void setNewMedWord(Map<String, String> map) {
this.newMedWord = map;
}
@Override
public Map<String, String> getNewMedWord() {
return newMedWord;
}
@Override
public String getNewsHtml(){
return html;
}
@Override
public String getEnText() {
return enText;
}
@Override
public String getEnTitle() {
return enTitle;
}
}
|
2737d4b2e0c87e3cbc7d9cb81e8bbbd1f5cc7518
|
[
"Java",
"INI"
] | 11
|
Java
|
baiyiqi/HSHY_EN
|
1d39e806860ac3ce72d1917e9b72b8a5659eb5d5
|
44f177cbf224ba04210ea13fcace2c579f8d74ca
|
refs/heads/main
|
<repo_name>richardsoriano/dml-hw-2<file_sep>/src/App.js
import './App.css'
import TextField from './ui/text-field'
import { useState } from 'react'
function App() {
const [values, setValues] = useState({
name: '',
email: '',
})
const { name, email } = values
return (
<div className='App'>
<form>
<div></div>
<div></div>
<ul>
<li></li>
<li></li>
<li></li>
</ul>
<div>
<h2>Contact Info</h2>
<div>
<TextField
type='text'
label='Your full name$$'
placeholder='<NAME>'
onChange={(name) =>
setValues((prev) => ({
...prev,
name,
}))
}
value={name}
/>
<TextField
type='text'
label='Email'
placeholder='<EMAIL>'
onChange={(email) =>
setValues((prev) => ({
...prev,
email,
}))
}
value={email}
/>
<div>
<div>
<label>Phone</label>:
</div>
<input type='text' id='phone' placeholder='222-222-2222' />
</div>
</div>
</div>
<div>
<h2>Shipping Info</h2>
<div>
<div>
<div>
<label>Label</label>:
</div>
<input type='text' id='home' placeholder='Home' />
</div>
<div>
<div>
<label>Address</label>:
</div>
<input type='text' id='address' placeholder='8000 Sunset Blvd' />
</div>
<div>
<div>
<label>Unit</label>:
</div>
<input type='text' id='unit' placeholder='#21' />
</div>
</div>
</div>
</form>
</div>
)
}
export default App
<file_sep>/src/ui/text-field/index.jsx
import React, { useState } from 'react'
export default function TextField({
type = 'text',
label,
placeholder,
value = '',
onChange = () => {},
}) {
// const id = label
// .toLowerCase()
// .split(' ')
// .map((word) => word.replace(/[^a-z]+/g, ''))
// .join('-')
const id = label
.toLowerCase()
.split(' ')
.map((word) => word.replace(/[^a-z]+/g, ''))
.join('-')
return (
<div className='mr-2'>
<div>
<label className='m1-1' htmlFor={id}>
{label}
</label>
:
</div>
<input
className='border rounded-sm p-2 w-full shadow-sm'
type={type}
id={id}
placeholder={placeholder}
value={value}
onChange={(e) => onChange(e.target.value, e)}
onChange={(e) => onChange(e.target.value, e)}
/>
</div>
)
}
|
730cfb1deb950d26f66e57fca2ff60f99f539120
|
[
"JavaScript"
] | 2
|
JavaScript
|
richardsoriano/dml-hw-2
|
348b23c26b87fee324a85f0c77ed0920eb9ce355
|
aa722c27eea660ecfbb2845965bae443c31268d5
|
refs/heads/master
|
<repo_name>MLEnthusiast/wsn-source-routing<file_sep>/README.md
# wsn-source-routing
Source Routing for Downward Data Traffic in Wireless Sensor Networks
Brief description can be found in the final report project-wireless-sensor.pdf
To know more about the problem setting please refer to Project Description.pdf
<file_sep>/MyCollection.h
#ifndef MYCOLLECTION_H
#define MYCOLLECTION_H
enum {
AM_COLLECTIONBEACON = 0x88,
AM_COLLECTIONDATA = 0x99,
AM_PAYLOADDATA = 0x77,
MAX_NODES = 15,
MAX_PATH_LENGTH = 10,
};
// beacon packet
typedef nx_struct CollectionBeacon {
nx_uint8_t seq_no;
nx_uint16_t metric;
} CollectionBeacon;
// application-level data packet
typedef nx_struct {
nx_uint16_t parent;
} MyData;
// network-level data packet
typedef nx_struct {
nx_uint16_t from;
nx_uint16_t hops;
MyData data; // includes the app-level data
} CollectionData;
struct DataItem{
int node; // key
int parent; // value
}*Items[MAX_NODES];
#endif
<file_sep>/parser.py
#!/usr/bin/env python2.7
import sys
import re
from collections import OrderedDict
num_nodes = 15
sink_id = 1
senders = range(2, num_nodes+1)
input_file = sys.argv[1]
record_format = "cooja_tab"
record_pattern = {
"cooja":"(?P<time>\d+):(?P<self_id>\d+):%s", # Cooja
"cooja_tab":"(?P<time>[\w:.]+)\s+ID:(?P<self_id>\d+)\s+%s", # Cooja with tabs
}.get(record_format, None)
recv = re.compile(record_pattern%"app:Recv from (?P<src>\d+) seqn (?P<seqn>\d+)")
unicast_send = re.compile(record_pattern%"app:Send to sink seqn (?P<seqn>\d+)")
recv_seqn = {}
send_seqn = {}
testlog = open(input_file,'r')
sendlog = open("send.log",'w')
recvlog = open("recv.log",'w')
sendlog.write("time\tdst\tsrc\tseqn\n")
recvlog.write("time\tdst\tsrc\tseqn\n")
def parse_collect():
global recv_seqn, send_seqn
global testlog, recvlog, sendlog
for l in testlog:
m = recv.match(l)
if m:
g = m.groupdict()
time = g["time"]
src = int(g["src"])
dst = int(g["self_id"])
seqn = int(g["seqn"])
if dst == sink_id:
recv_seqn.setdefault(src, {})[seqn] = time
recvlog.write("%s\t%d\t%d\t%d\n"%(time, dst, src, seqn))
else:
m = unicast_send.match(l)
if m:
g = m.groupdict()
time = g["time"]
src = int(g["self_id"])
dst = 1
seqn = int(g["seqn"])
sendlog.write("%s\t%d\t%d\t%d\n"%(time, dst, src, seqn))
if dst == sink_id:
send_seqn.setdefault(src, {})[seqn] = time
parse_collect()
all_sent_seqns = set()
for node in send_seqn.values():
all_sent_seqns.update(node.keys())
all_recv_seqns = set()
for node in recv_seqn.values():
all_recv_seqns.update(node.keys())
missing_send_record = all_recv_seqns - all_sent_seqns
all_seqns = all_sent_seqns.union(all_recv_seqns)
PSNs = {i:len(send_seqn.get(i,())) for i in senders}
PRNs = {i:len(set(recv_seqn.get(i,{}).keys())-missing_send_record) for i in senders}
PRNs_total = {i:len(recv_seqn.get(i,{})) for i in senders}
PDRs = {i:(float(PRNs[i])/PSNs[i]*100 if PSNs[i]!=0 else 0) for i in senders}
not_sent = {i for i in senders if i not in send_seqn}
not_recvd = {i for i in senders if i not in recv_seqn}
print
print "-- Nodes stats ----------------"
print "# nodes in topology:", num_nodes
print "Not probed:", ", ".join(str(x) for x in sorted(not_sent))
print "Isolated:", ", ".join(str(x) for x in sorted(not_recvd-not_sent))
print
print "-- Seqnum stats ---------------"
print "Not sent but received:", sorted(missing_send_record)
print
print "-- Packets stats --------------"
print "# packets node:received/sent:"
print ", ".join("%d: %d/%d"%(i,PRNs_total[i],PSNs[i]) for i in senders)
print
print "PDRs:"
print ", ".join(["%d: %.1f"%(i,PDRs[i]) for i in senders])
print
l = [PDRs[i] for i in senders]
min_pdr = min(l) if (len(l) != 0) else 0
print "min PDR (of those probed):", min_pdr
n_sent = 0
for node,seqns in send_seqn.items():
n_sent += len(seqns)
n_recv = 0
for node,seqns in recv_seqn.items():
n_recv += len(seqns)
print "average network PDR:", float(n_recv)/n_sent
print
# -*- vim: ts=4 sw=4 noexpandtab
|
27781a7f6cddb8b2fa1597c981be32488c070521
|
[
"Markdown",
"C",
"Python"
] | 3
|
Markdown
|
MLEnthusiast/wsn-source-routing
|
83b48c48e1958938ceba942b624e9ae14cb1f17c
|
b9db084c4d05d4624459132f9ab46e402b689a80
|
refs/heads/master
|
<file_sep>import React, { Component } from 'react';
import '../App.css';
import ItemIcons from '../ItemIcons.js';
import PropTypes from 'prop-types';
class GameItem extends Component {
constructor(props) {
super();
this.state = {
clicked: false,
}
}
//method to update whether/not an item has been clicked
updateState = () => {
this.setState({ clicked: true });
}
//method to update the score if a) the icon is litter and b) the item hasn't been clicked already
updateScore = () => {
if (this.props.type === "litter" && this.state.clicked === false) {
this.props.addPoint()
}
}
//method to aggregate both of the above functions
onClickFunctions = () => {
this.updateScore();
this.updateState();
}
render() {
const itemStyle = {
bottom: `${this.props.height}px`, // use props.height to offset from the bottom of screen
zIndex: this.props.layer, // use props.layer to set z-index, so we display ontop of background
};
// Update this to select the correct icon for each item
const icon = ItemIcons[this.props.type]
//start of game-item class assignments
let gameItemClass = ["game-item"]
//logic to see what class should be appended on the click
if (this.state.clicked) {
if (this.props.type === "litter") {
gameItemClass.push('spotted-litter');
} else {
gameItemClass.push('spotted-nature');
}
}
return (
<div onClick = { this.onClickFunctions } className = {gameItemClass.join(' ')} style={itemStyle}>
<img src={icon} alt="Item" className="icon-item"></img>
</div>
);
}
}
GameItem.propTypes = {
height: PropTypes.number.isRequired,
layer: PropTypes.number.isRequired,
}
export default GameItem;
|
6cfa2e7178cde890c108d2d7080ae70998266ae9
|
[
"JavaScript"
] | 1
|
JavaScript
|
elle-terch/litter-patrol
|
8dfa47c860f16cd134a44239cf407d90026f413a
|
6a3a867b5b8d9f7659b8eb341172a52a94de1820
|
refs/heads/main
|
<file_sep># Challenge 2 (Creating a histogram)
import matplotlib.pyplot as plt
nums = [0.5, 0.7, 1, 1.2, 1.3, 2.1]
bins = [0, 1, 2, 3]
plt.hist(nums, bins, color="black")
plt.xlabel("nums")
plt.ylabel("bins")
plt.title("Histogram of nums against bins")
plt.style.use('ggplot')
plt.show()
<file_sep># Challenge 1 (Statistics)
import numpy as np
from scipy import stats
numbers = np.arange(0, 20)
mean = np.mean(numbers)
standard_dev = np.std(numbers)
variance = np.var(numbers)
print("The mean is ", mean)
print("The standard deviation ", standard_dev)
print("The variance is ", variance)
|
69aa296f94cf1f61a226a99f4c7d4f1f7f7e0323
|
[
"Python"
] | 2
|
Python
|
izzyevermore/machine-learning-challenge
|
fa07aa3e0e8447dca0c7790e6e6e9841e3af3744
|
234c9ded43ee92ab0bdd2692af2c7fb6271dc9cc
|
refs/heads/master
|
<file_sep>using BonaFinders.Data.Contexts;
using BonaFinders.Data.Entities;
using BonaFinders.Models.EthicalModels;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Services
{
public class EthicalService
{
private readonly Guid _userId;
public EthicalService(Guid userId)
{
_userId = userId;
}
public EthicalService() { }
// Create Method
public bool CreateEthicalOrganization(ECreate model)
{
var entity =
new EthicalOrganization()
{
Id = _userId,
EthicalOrganizationName = model.EthicalOrganizationName,
CrueltyFree = model.CrueltyFree,
Sustainable = model.Sustainable,
Diverse = model.Diverse,
ECountry = model.ECountry,
EImprove = model.EImprove
};
using (var ctx = new ApplicationDbContext())
{
ctx.EthicalOrganizations.Add(entity);
return ctx.SaveChanges() == 1;
}
}
// Read All: GetEthicalOrganizations() Method
public IEnumerable<EListItem> GetEthicalOrganizations()
{
using (var ctx = new ApplicationDbContext())
{
var query =
ctx
.EthicalOrganizations
.Select(
e =>
new EListItem
{
EthicalOrganizationId = e.EthicalOrganizationId,
EthicalOrganizationName = e.EthicalOrganizationName,
CrueltyFree = e.CrueltyFree,
Sustainable = e.Sustainable,
Diverse = e.Diverse,
ECountry = e.ECountry,
EImprove = e.EImprove
}
);
return query.ToList();
}
}
// Read Single: Get by id
public EDetail GetEthicalOrganizationById(int id)
{
using (var ctx = new ApplicationDbContext())
{
var entity =
ctx
.EthicalOrganizations
.Single(e => e.EthicalOrganizationId == id && e.Id == _userId);
return
new EDetail
{
EthicalOrganizationName = entity.EthicalOrganizationName,
CrueltyFree = entity.CrueltyFree,
Sustainable = entity.Sustainable,
Diverse = entity.Diverse,
ECountry = entity.ECountry,
EImprove = entity.EImprove
};
}
}
// Update
public bool UpdateEthicalOrganization(EEdit model, int id)
{
using (var ctx = new ApplicationDbContext())
{
var entity =
ctx
.EthicalOrganizations
.Single(e => e.EthicalOrganizationId == id && e.Id == _userId);
entity.EthicalOrganizationName = model.EthicalOrganizationName;
entity.CrueltyFree = model.CrueltyFree;
entity.Sustainable = model.Sustainable;
entity.Diverse = model.Diverse;
entity.ECountry = model.ECountry;
entity.EImprove = model.EImprove;
return ctx.SaveChanges() == 1;
}
}
// Delete
public bool DeleteEthicalOrganization(int ethicalOrganizationId)
{
using (var ctx = new ApplicationDbContext())
{
var entity =
ctx
.EthicalOrganizations
.Single(e => e.EthicalOrganizationId == ethicalOrganizationId && e.Id == _userId);
ctx.EthicalOrganizations.Remove(entity);
return ctx.SaveChanges() == 1;
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Models.TipModels
{
public class TDetail
{
public int TipId { get; set; }
public string Id { get; set; }
public string Title { get; set; }
public string Text { get; set; }
}
}
<file_sep>using BonaFinders.Data.Contexts;
using BonaFinders.Data.Entities;
using BonaFinders.Models.TipModels;
using BonaFinders.Services;
using Microsoft.AspNet.Identity;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace BonaFinders.WebMVC.Controllers
{
public class TipController : Controller
{
// Adding a link to the database
private ApplicationDbContext _db = new ApplicationDbContext();
// Index
// GET: Tip/Index
public ActionResult Index()
{
var service = CreateNoUserService();
var model = service.GetTips();
return View(model);
}
// Create
// GET: Tip/Create
public ActionResult Create()
{
return View();
}
// POST: Tip
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create(TCreate model)
{
if (!ModelState.IsValid) return View(model);
var service = CreateTipService();
if (service.CreateTip(model))
{
TempData["SaveResult"] = "Your tip was created.";
return RedirectToAction("Index");
};
ModelState.AddModelError("", "Tip could not be created.");
return View(model);
}
// Delete
// GET: Delete
// Tip/Delete/{id}
[ActionName("Delete")]
public ActionResult Delete(int? id)
{
if (id == null)
{
return new HttpStatusCodeResult(System.Net.HttpStatusCode.BadRequest);
}
Tip tip = _db.Tips.Find(id);
if (tip == null)
{
return HttpNotFound();
}
return View(tip);
}
// POST: Delete
// Tip/Delete/{id}
[HttpPost, ActionName("Delete")]
[ValidateAntiForgeryToken]
public ActionResult DeletePost(int id)
{
Tip tip = _db.Tips.Find(id);
_db.Tips.Remove(tip);
_db.SaveChanges();
return RedirectToAction("Index");
}
// Update
// GET: Edit (Update)
// Tip/Edit/{id}
public ActionResult Edit(int id)
{
var service = CreateTipService();
var detail = service.GetTipById(id);
var model =
new TEdit
{
Title = detail.Title,
Text = detail.Text
};
return View(model);
}
// POST: Edit
// Tip/Edit/{id}
[HttpPost, ActionName("Edit")]
[ValidateAntiForgeryToken]
public ActionResult Edit(int id, TEdit model)
{
var svc = CreateTipService();
try
{
if (!ModelState.IsValid)
{
return View(model);
}
else if (svc.UpdateTip(model, id))
{
TempData["SaveResult"] = " Your tip was updated.";
return RedirectToAction("Index");
}
}
catch
{
if (model.TipId != id)
{
ModelState.AddModelError("", "Tip ID Missmatch");
return View(model);
}
}
return View(model);
}
// Details
// GET: Details
// Tip/Details/{id}
public ActionResult Details(int id)
{
var svc = CreateTipService();
var model = svc.GetTipById(id);
return View(model);
}
// Helper Methods
private TipService CreateTipService()
{
var userId = Guid.Parse(User.Identity.GetUserId());
var service = new TipService(userId);
return service;
}
private TipService CreateNoUserService()
{
var service = new TipService();
return service;
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Data.Entities
{
public class UnethicalOrganization
{
[Key]
public int UnethicalOrganizationId { get; set; }
public Guid Id { get; set; }
[ForeignKey(nameof(ApplicationUser))]
public string UserId { get; set; }
public virtual ApplicationUser ApplicationUser { get; set; }
[Required]
[Display(Name = "Name of Organization")]
public string UnethicalOrganizationName { get; set; }
[Display(Name = "Is Fast Fashion")]
public bool FastFashion { get; set; }
[Display(Name = "Exploitation of Labour")]
public bool Exploitation { get; set; }
[Display(Name = "Sweatshop Labour/ Child Labour")]
public bool Sweatshop { get; set; }
[Display(Name = "Copyright Infringement/ Stolen Designs")]
public bool Copyright { get; set; }
[Display(Name = "Based In")]
public string UCountry { get; set; }
[Display(Name = "Needs Improvement")]
public string UImprove { get; set; }
public virtual ICollection<UnethicalReview> UnethicalReviews { get; set; }
}
}
<file_sep>using BonaFinders.Data.Contexts;
using BonaFinders.Data.Entities;
using BonaFinders.Models.UnethicalModels;
using BonaFinders.Services;
using Microsoft.AspNet.Identity;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace BonaFinders.WebMVC.Controllers
{
public class UnethicalController : Controller
{
// Adding a link to the database
private ApplicationDbContext _db = new ApplicationDbContext();
// Index
// GET: Unethical/Index
public ActionResult Index()
{
var service = CreateNoUserService();
var model = service.GetUnethicalOrganizations();
return View(model);
}
// Create
// GET: Unethical/Create
public ActionResult Create()
{
return View();
}
// POST:
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create(UCreate model)
{
if (!ModelState.IsValid) return View(model);
var service = CreateUnethicalService();
if (service.CreateUnethicalOrganization(model))
{
TempData["SaveResult"] = "Your organization was created.";
return RedirectToAction("Index");
};
ModelState.AddModelError("", "Organization could not be created.");
return View(model);
}
// Delete
// GET: Delete
// Unethical/Delete/{id}
[ActionName("Delete")]
public ActionResult Delete(int? id)
{
if (id == null)
{
return new HttpStatusCodeResult(System.Net.HttpStatusCode.BadRequest);
}
UnethicalOrganization unethicalOrganization = _db.UnethicalOrganizations.Find(id);
if (unethicalOrganization == null)
{
return HttpNotFound();
}
return View(unethicalOrganization);
}
// POST: Delete
// Unethical/Delete/{id}
[HttpPost, ActionName("Delete")]
[ValidateAntiForgeryToken]
public ActionResult DeletePost(int id)
{
UnethicalOrganization unethicalOrganization = _db.UnethicalOrganizations.Find(id);
_db.UnethicalOrganizations.Remove(unethicalOrganization);
_db.SaveChanges();
return RedirectToAction("Index");
}
// Update
// GET: Edit (Update)
// Unethical/Edit/{id}
public ActionResult Edit(int id)
{
var service = CreateUnethicalService();
var detail = service.GetUnethicalOrganizationById(id);
var model =
new UEdit
{
UnethicalOrganizationName = detail.UnethicalOrganizationName,
FastFashion = detail.FastFashion,
Exploitation = detail.Exploitation,
Sweatshop = detail.Sweatshop,
Copyright = detail.Copyright,
UCountry = detail.UCountry,
UImprove = detail.UImprove
};
return View(model);
}
// POST: Edit
// Unethical/Edit/{id}
[HttpPost, ActionName("Edit")]
[ValidateAntiForgeryToken]
public ActionResult Edit(int id, UEdit model)
{
var svc = CreateUnethicalService();
try
{
if (!ModelState.IsValid)
{
return View(model);
}
else if (svc.UpdateUnethicalOrganization(model, id))
{
TempData["SaveResult"] = " Your organization was updated.";
return RedirectToAction("Index");
}
}
catch
{
if (model.UnethicalOrganizationId != id)
{
ModelState.AddModelError("", "Unethical Organization ID Missmatch");
return View(model);
}
}
return View(model);
}
// Details
// GET: Details
// Unethical/Details/{id}
public ActionResult Details(int id)
{
var svc = CreateUnethicalService();
var model = svc.GetUnethicalOrganizationById(id);
return View(model);
}
// Helper Methods
private UnethicalService CreateUnethicalService()
{
var userId = Guid.Parse(User.Identity.GetUserId());
var service = new UnethicalService(userId);
return service;
}
private UnethicalService CreateNoUserService()
{
var service = new UnethicalService();
return service;
}
}
}<file_sep>using BonaFinders.Data.Contexts;
using BonaFinders.Data.Entities;
using BonaFinders.Models.UnethicalModels;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Services
{
public class UnethicalService
{
private readonly Guid _userId;
public UnethicalService(Guid userId)
{
_userId = userId;
}
public UnethicalService() { } //
// Create Method
public bool CreateUnethicalOrganization(UCreate model)
{
var entity =
new UnethicalOrganization()
{
Id = _userId,
UnethicalOrganizationName = model.UnethicalOrganizationName,
FastFashion = model.FastFashion,
Exploitation = model.Exploitation,
Sweatshop = model.Sweatshop,
Copyright = model.Copyright,
UCountry = model.UCountry,
UImprove = model.UImprove
};
using (var ctx = new ApplicationDbContext())
{
ctx.UnethicalOrganizations.Add(entity);
return ctx.SaveChanges() == 1;
}
}
// Read All: GetUnethicalOrganizations() Method
public IEnumerable<UListItem> GetUnethicalOrganizations()
{
using (var ctx = new ApplicationDbContext())
{
var query =
ctx
.UnethicalOrganizations
.Select(
e =>
new UListItem
{
UnethicalOrganizationId = e.UnethicalOrganizationId,
UnethicalOrganizationName = e.UnethicalOrganizationName,
FastFashion = e.FastFashion,
Exploitation = e.Exploitation,
Sweatshop = e.Sweatshop,
Copyright = e.Copyright,
UCountry = e.UCountry,
UImprove = e.UImprove
}
);
return query.ToList();
}
}
// Read Single: Get by id
public UDetail GetUnethicalOrganizationById(int id)
{
using (var ctx = new ApplicationDbContext())
{
var entity =
ctx
.UnethicalOrganizations
.Single(e => e.UnethicalOrganizationId == id && e.Id == _userId);
return
new UDetail
{
UnethicalOrganizationName = entity.UnethicalOrganizationName,
FastFashion = entity.FastFashion,
Exploitation = entity.Exploitation,
Sweatshop = entity.Sweatshop,
Copyright = entity.Copyright,
UCountry = entity.UCountry,
UImprove = entity.UImprove
};
}
}
// Update
public bool UpdateUnethicalOrganization(UEdit model, int id)
{
using (var ctx = new ApplicationDbContext())
{
var entity =
ctx
.UnethicalOrganizations
.Single(e => e.UnethicalOrganizationId == id && e.Id == _userId);
entity.UnethicalOrganizationName = model.UnethicalOrganizationName;
entity.FastFashion = model.FastFashion;
entity.Exploitation = model.Exploitation;
entity.Sweatshop = model.Sweatshop;
entity.Copyright = model.Copyright;
entity.UCountry = model.UCountry;
entity.UImprove = model.UImprove;
return ctx.SaveChanges() == 1;
}
}
// Delete
public bool DeleteUnethicalOrganization(int unethicalOrganizationId)
{
using (var ctx = new ApplicationDbContext())
{
var entity =
ctx
.UnethicalOrganizations
.Single(e => e.UnethicalOrganizationId == unethicalOrganizationId && e.Id == _userId);
ctx.UnethicalOrganizations.Remove(entity);
return ctx.SaveChanges() == 1;
}
}
}
}
<file_sep>using BonaFinders.Data.Entities;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Models.UnethicalModels
{
public class UListItem
{
public int UnethicalOrganizationId { get; set; }
public string Id { get; set; }
[Display(Name = "Name of Organization")]
public string UnethicalOrganizationName { get; set; }
[Display(Name = "Is Fast Fashion")]
public bool FastFashion { get; set; }
[Display(Name = "Exploitation of Labour")]
public bool Exploitation { get; set; }
[Display(Name = "Sweatshop Labour/ Child Labour")]
public bool Sweatshop { get; set; }
[Display(Name = "Copyright Infringement/ Stolen Designs")]
public bool Copyright { get; set; }
[Display(Name = "Based In")]
public string UCountry { get; set; }
[Display(Name = "Needs Improvement")]
public string UImprove { get; set; }
[Display(Name = "Review")]
public virtual ICollection<UnethicalReview> UnethicalReviews { get; set; }
}
}
<file_sep>using BonaFinders.Data.Entities;
using Microsoft.AspNet.Identity.EntityFramework;
using System;
using System.Collections.Generic;
using System.Data.Entity;
using System.Data.Entity.ModelConfiguration.Conventions;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Data.Contexts
{
public class ApplicationDbContext : IdentityDbContext<ApplicationUser>
{
public ApplicationDbContext()
: base("DefaultConnection", throwIfV1Schema: false)
{
}
public static ApplicationDbContext Create()
{
return new ApplicationDbContext();
}
// Adding database entry properties
public DbSet<EthicalOrganization> EthicalOrganizations { get; set; }
public DbSet<UnethicalOrganization> UnethicalOrganizations { get; set; }
public DbSet<Tip> Tips { get; set; }
public DbSet<EthicalReview> EthicalReviews { get; set; }
public DbSet<UnethicalReview> UnethicalReviews { get; set; }
// Adding override
protected override void OnModelCreating(DbModelBuilder modelBuilder)
{
modelBuilder
.Conventions
.Remove<PluralizingTableNameConvention>();
modelBuilder
.Configurations
.Add(new IdentityUserLoginConfiguration())
.Add(new IdentityUserRoleConfiguration());
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Models.EthicalReviewModels
{
public class EREdit
{
public int EthicalReviewId { get; set; }
public string Id { get; set; }
public int EthicalOrganizationId { get; set; }
[Display(Name = "Title")]
public string EthicalReviewTitle { get; set; }
[Display(Name = "Text")]
public string EthicalReviewText { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Models.UnethicalModels
{
public class UCreate
{
public string Id { get; set; }
[Required]
[Display(Name = "Name of Organization")]
[MinLength(1, ErrorMessage = "Please enter at least 1 character.")]
[MaxLength(50, ErrorMessage = "Please use 50 or less characters in this field.")]
public string UnethicalOrganizationName { get; set; }
[Display(Name = "Is Fast Fashion")]
public bool FastFashion { get; set; }
[Display(Name = "Exploitation of Labour")]
public bool Exploitation { get; set; }
[Display(Name = "Sweatshop Labour/ Child Labour")]
public bool Sweatshop { get; set; }
[Display(Name = "Copyright Infringement/ Stolen Designs")]
public bool Copyright { get; set; }
[Display(Name = "Based In")]
[MinLength(2, ErrorMessage = "Please enter at least 2 characters.")]
[MaxLength(30, ErrorMessage = "Please use 30 or less characters in this field.")]
public string UCountry { get; set; }
[Display(Name = "Needs Improvement")]
[MinLength(1, ErrorMessage = "Please enter at least 1 character.")]
[MaxLength(300, ErrorMessage = "Please use 300 or less characters in this field.")]
public string UImprove { get; set; }
}
}
<file_sep>using BonaFinders.Data.Entities;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Models.EthicalModels
{
public class EListItem
{
public int EthicalOrganizationId { get; set; }
public string Id { get; set; }
[Display(Name = "Name of Organization")]
public string EthicalOrganizationName { get; set; }
[Display(Name = "Cruelty Free/ Vegan")]
public bool CrueltyFree { get; set; }
[Display(Name = "Sustainable/ Eco-friendly")]
public bool Sustainable { get; set; }
[Display(Name = "Diverse/ Inclusive")]
public bool Diverse { get; set; }
[Display(Name = "Based In")]
public string ECountry { get; set; }
[Display(Name = "Needs Improvement")]
public string EImprove { get; set; }
[Display(Name = "Review")]
public virtual ICollection<EthicalReview> EthicalReviews { get; set; }
}
}
<file_sep>using BonaFinders.Data.Entities;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Models.EthicalReviewModels
{
public class ERCreate
{
public int EthicalReviewId { get; set; }
public string Id { get; set; }
[ForeignKey(nameof(EthicalOrganization))]
public int EthicalOrganizationId { get; set; }
public virtual EthicalOrganization EthicalOrganization { get; set; }
// Do not need [Required] in models
[MinLength(1, ErrorMessage = "Please enter at least 1 character.")]
[MaxLength(50, ErrorMessage = "Please use 50 or less characters in this field.")]
[Display(Name = "Title")]
public string EthicalReviewTitle { get; set; }
[Display(Name = "Text")]
[MinLength(2, ErrorMessage = "Please enter at least 2 characters.")]
[MaxLength(5000, ErrorMessage = "Please use 5000 or less characters in this field.")]
public string EthicalReviewText { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Models.EthicalReviewModels
{
public class ERListItem
{
public int EthicalReviewId { get; set; }
public string Id { get; set; }
// This name doesnt need to match the EthicalOrganizationName in order to represent it
[Display(Name = "Organization Name")]
public string OrganizationName { get; set; }
[Display(Name = "Title")]
public string EthicalReviewTitle { get; set; }
[Display(Name = "Text")]
public string EthicalReviewText { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Models.EthicalModels
{
public class ECreate
{
public string Id { get; set; }
[Required]
[Display(Name = "Name of Organization")]
[MinLength(1, ErrorMessage = "Please enter at least 1 character.")]
[MaxLength(50, ErrorMessage = "Please use 50 or less characters in this field.")]
public string EthicalOrganizationName { get; set; }
[Display(Name = "Cruelty Free/ Vegan")]
public bool CrueltyFree { get; set; }
[Display(Name = "Sustainable/ Eco-friendly")]
public bool Sustainable { get; set; }
[Display(Name = "Diverse/ Inclusive")]
public bool Diverse { get; set; }
[Display(Name = "Based In")]
[MinLength(2, ErrorMessage = "Please enter at least 2 characters.")]
[MaxLength(30, ErrorMessage = "Please use 30 or less characters in this field.")]
public string ECountry { get; set; }
[Display(Name = "Needs Improvement")]
[MinLength(1, ErrorMessage = "Please enter at least 1 character.")]
[MaxLength(300, ErrorMessage = "Please use 300 or less characters in this field.")]
public string EImprove { get; set; }
}
}
<file_sep>using BonaFinders.Data.Entities;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Models.UnethicalReviewModels
{
public class URDetail
{
public int UnethicalReviewId { get; set; }
public string Id { get; set; }
public int UnethicalOrganizationId { get; set; }
[Display(Name = "Title")]
public string UnethicalReviewTitle { get; set; }
[Display(Name = "Text")]
public string UnethicalReviewText { get; set; }
}
}
<file_sep>using BonaFinders.Data.Entities;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Models.UnethicalReviewModels
{
public class URListItem
{
public int UnethicalReviewId { get; set; }
public string Id { get; set; }
// This name doesnt need to match the UnethicalOrganizationName in order to represent it
[Display(Name = "Organization Name")]
public string OrganizationName { get; set; }
[Display(Name = "Title")]
public string UnethicalReviewTitle { get; set; }
[Display(Name = "Text")]
public string UnethicalReviewText { get; set; }
}
}
<file_sep>using BonaFinders.Data.Contexts;
using BonaFinders.Data.Entities;
using BonaFinders.Models.EthicalModels;
using BonaFinders.Services;
using Microsoft.AspNet.Identity;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace BonaFinders.WebMVC.Controllers
{
public class EthicalController : Controller
{
// Adding a link to the database
private ApplicationDbContext _db = new ApplicationDbContext();
// Index
// GET: Ethical/Index
public ActionResult Index()
{
var service = CreateNoUserService(); // var service = CreateEthicalService();
var model = service.GetEthicalOrganizations();
return View(model);
}
// Create
// GET: Ethical/Create
public ActionResult Create()
{
return View();
}
// POST:
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create(ECreate model)
{
if (!ModelState.IsValid) return View(model);
var service = CreateEthicalService();
if (service.CreateEthicalOrganization(model))
{
TempData["SaveResult"] = "Your organization was created.";
return RedirectToAction("Index");
};
ModelState.AddModelError("", "Organization could not be created.");
return View(model);
}
// Delete
// GET: Delete
// Ethical/Delete/{id}
[ActionName("Delete")]
public ActionResult Delete(int? id)
{
if (id == null)
{
return new HttpStatusCodeResult(System.Net.HttpStatusCode.BadRequest);
}
EthicalOrganization ethicalOrganization = _db.EthicalOrganizations.Find(id);
if (ethicalOrganization == null)
{
return HttpNotFound();
}
return View(ethicalOrganization);
}
// POST: Delete
// Ethical/Delete/{id}
[HttpPost, ActionName("Delete")]
[ValidateAntiForgeryToken]
public ActionResult DeletePost(int id)
{
EthicalOrganization ethicalOrganization = _db.EthicalOrganizations.Find(id);
_db.EthicalOrganizations.Remove(ethicalOrganization);
_db.SaveChanges();
return RedirectToAction("Index");
}
// Update
// GET: Edit (Update)
// Ethical/Edit/{id}
public ActionResult Edit(int id)
{
var service = CreateEthicalService();
var detail = service.GetEthicalOrganizationById(id);
var model =
new EEdit
{
EthicalOrganizationName = detail.EthicalOrganizationName,
Sustainable = detail.Sustainable,
Diverse = detail.Diverse,
ECountry = detail.ECountry,
EImprove = detail.EImprove
};
return View(model);
}
// POST: Edit
// Ethical/Edit/{id}
[HttpPost, ActionName("Edit")]
[ValidateAntiForgeryToken]
public ActionResult Edit(int id, EEdit model)
{
var svc = CreateEthicalService();
try
{
if (!ModelState.IsValid)
{
return View(model);
}
else if (svc.UpdateEthicalOrganization(model, id))
{
TempData["SaveResult"] = " Your organization was updated.";
return RedirectToAction("Index");
}
}
catch
{
if (model.EthicalOrganizationId != id)
{
ModelState.AddModelError("", "Ethical Organization ID Missmatch");
return View(model);
}
}
return View(model);
}
// Details
// GET: Details
// Ethical/Details/{id}
public ActionResult Details(int id)
{
var svc = CreateEthicalService();
var model = svc.GetEthicalOrganizationById(id);
return View(model);
}
// Helper Methods
private EthicalService CreateEthicalService()
{
var userId = Guid.Parse(User.Identity.GetUserId());
var service = new EthicalService(userId);
return service;
}
private EthicalService CreateNoUserService()
{
var service = new EthicalService();
return service;
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Data.Entities
{
public class EthicalOrganization
{
[Key]
public int EthicalOrganizationId { get; set; }
[Required]
public Guid Id { get; set; }
[ForeignKey(nameof(ApplicationUser))]
public string UserId { get; set; }
public virtual ApplicationUser ApplicationUser { get; set; }
[Required]
[Display(Name = "Name of Organization")]
public string EthicalOrganizationName { get; set; }
[Display(Name = "Cruelty Free/ Vegan")]
public bool CrueltyFree { get; set; }
[Display(Name = "Sustainable/ Eco-friendly")]
public bool Sustainable { get; set; }
[Display(Name = "Diverse/ Inclusive")]
public bool Diverse { get; set; }
[Display(Name = "Based In")]
public string ECountry { get; set; }
[Display(Name = "Needs Improvement")]
public string EImprove { get; set; }
public virtual ICollection<EthicalReview> EthicalReviews { get; set; }
}
}
<file_sep>using BonaFinders.Data.Contexts;
using BonaFinders.Data.Entities;
using BonaFinders.Models.UnethicalReviewModels;
using BonaFinders.Services;
using Microsoft.AspNet.Identity;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace BonaFinders.WebMVC.Controllers
{
public class UnethicalReviewController : Controller
{
// Adding a link to the database
private ApplicationDbContext _db = new ApplicationDbContext();
// Index
// GET: Index
// GET: Post
public ActionResult Index()
{
var service = CreateNoUserService();
var model = service.GetUnethicalReviews();
return View(model);
}
// Create
// GET: Create Viewpage
public ActionResult Create()
{
var service = CreateUnethicalReviewService();
ViewBag.Organizations = service.GetUnethicalOrganizationsList();
return View();
}
// POST:
// [Route(/Post/Create")]
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create(URCreate model)
{
if (!ModelState.IsValid) return View(model);
var service = CreateUnethicalReviewService();
if (service.CreateUnethicalReview(model))
{
TempData["SaveResult"] = "Your review was created.";
return RedirectToAction("Index");
};
ModelState.AddModelError("", "Review could not be created.");
return View(model);
}
// Helper Methods
private UnethicalReviewService CreateUnethicalReviewService()
{
var userId = Guid.Parse(User.Identity.GetUserId());
var service = new UnethicalReviewService(userId);
return service;
}
private UnethicalReviewService CreateNoUserService()
{
var service = new UnethicalReviewService();
return service;
}
}
}<file_sep>namespace BonaFinders.Data.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class initCreate : DbMigration
{
public override void Up()
{
CreateTable(
"dbo.EthicalOrganization",
c => new
{
EthicalOrganizationId = c.Int(nullable: false, identity: true),
Id = c.Guid(nullable: false),
UserId = c.String(maxLength: 128),
EthicalOrganizationName = c.String(nullable: false),
CrueltyFree = c.Boolean(nullable: false),
Sustainable = c.Boolean(nullable: false),
Diverse = c.Boolean(nullable: false),
ECountry = c.String(),
EImprove = c.String(),
})
.PrimaryKey(t => t.EthicalOrganizationId)
.ForeignKey("dbo.ApplicationUser", t => t.UserId)
.Index(t => t.UserId);
CreateTable(
"dbo.ApplicationUser",
c => new
{
Id = c.String(nullable: false, maxLength: 128),
Email = c.String(),
EmailConfirmed = c.Boolean(nullable: false),
PasswordHash = c.String(),
SecurityStamp = c.String(),
PhoneNumber = c.String(),
PhoneNumberConfirmed = c.Boolean(nullable: false),
TwoFactorEnabled = c.Boolean(nullable: false),
LockoutEndDateUtc = c.DateTime(),
LockoutEnabled = c.Boolean(nullable: false),
AccessFailedCount = c.Int(nullable: false),
UserName = c.String(),
})
.PrimaryKey(t => t.Id);
CreateTable(
"dbo.IdentityUserClaim",
c => new
{
Id = c.Int(nullable: false, identity: true),
UserId = c.String(),
ClaimType = c.String(),
ClaimValue = c.String(),
ApplicationUser_Id = c.String(maxLength: 128),
})
.PrimaryKey(t => t.Id)
.ForeignKey("dbo.ApplicationUser", t => t.ApplicationUser_Id)
.Index(t => t.ApplicationUser_Id);
CreateTable(
"dbo.IdentityUserLogin",
c => new
{
UserId = c.String(nullable: false, maxLength: 128),
LoginProvider = c.String(),
ProviderKey = c.String(),
ApplicationUser_Id = c.String(maxLength: 128),
})
.PrimaryKey(t => t.UserId)
.ForeignKey("dbo.ApplicationUser", t => t.ApplicationUser_Id)
.Index(t => t.ApplicationUser_Id);
CreateTable(
"dbo.IdentityUserRole",
c => new
{
UserId = c.String(nullable: false, maxLength: 128),
RoleId = c.String(),
ApplicationUser_Id = c.String(maxLength: 128),
IdentityRole_Id = c.String(maxLength: 128),
})
.PrimaryKey(t => t.UserId)
.ForeignKey("dbo.ApplicationUser", t => t.ApplicationUser_Id)
.ForeignKey("dbo.IdentityRole", t => t.IdentityRole_Id)
.Index(t => t.ApplicationUser_Id)
.Index(t => t.IdentityRole_Id);
CreateTable(
"dbo.EthicalReview",
c => new
{
EthicalReviewId = c.Int(nullable: false, identity: true),
Id = c.Guid(nullable: false),
UserId = c.String(maxLength: 128),
EthicalOrganizationId = c.Int(nullable: false),
EthicalReviewTitle = c.String(nullable: false),
EthicalReviewText = c.String(),
})
.PrimaryKey(t => t.EthicalReviewId)
.ForeignKey("dbo.ApplicationUser", t => t.UserId)
.ForeignKey("dbo.EthicalOrganization", t => t.EthicalOrganizationId, cascadeDelete: true)
.Index(t => t.UserId)
.Index(t => t.EthicalOrganizationId);
CreateTable(
"dbo.IdentityRole",
c => new
{
Id = c.String(nullable: false, maxLength: 128),
Name = c.String(),
})
.PrimaryKey(t => t.Id);
CreateTable(
"dbo.Tip",
c => new
{
TipId = c.Int(nullable: false, identity: true),
Id = c.Guid(nullable: false),
UserId = c.String(maxLength: 128),
Title = c.String(nullable: false),
Text = c.String(),
})
.PrimaryKey(t => t.TipId)
.ForeignKey("dbo.ApplicationUser", t => t.UserId)
.Index(t => t.UserId);
CreateTable(
"dbo.UnethicalOrganization",
c => new
{
UnethicalOrganizationId = c.Int(nullable: false, identity: true),
Id = c.Guid(nullable: false),
UserId = c.String(maxLength: 128),
UnethicalOrganizationName = c.String(nullable: false),
FastFashion = c.Boolean(nullable: false),
Exploitation = c.Boolean(nullable: false),
Sweatshop = c.Boolean(nullable: false),
Copyright = c.Boolean(nullable: false),
UCountry = c.String(),
UImprove = c.String(),
})
.PrimaryKey(t => t.UnethicalOrganizationId)
.ForeignKey("dbo.ApplicationUser", t => t.UserId)
.Index(t => t.UserId);
CreateTable(
"dbo.UnethicalReview",
c => new
{
UnethicalReviewId = c.Int(nullable: false, identity: true),
Id = c.Guid(nullable: false),
UserId = c.String(maxLength: 128),
UnethicalOrganizationId = c.Int(nullable: false),
UnethicalReviewTitle = c.String(nullable: false),
UnethicalReviewText = c.String(),
})
.PrimaryKey(t => t.UnethicalReviewId)
.ForeignKey("dbo.ApplicationUser", t => t.UserId)
.ForeignKey("dbo.UnethicalOrganization", t => t.UnethicalOrganizationId, cascadeDelete: true)
.Index(t => t.UserId)
.Index(t => t.UnethicalOrganizationId);
}
public override void Down()
{
DropForeignKey("dbo.UnethicalReview", "UnethicalOrganizationId", "dbo.UnethicalOrganization");
DropForeignKey("dbo.UnethicalReview", "UserId", "dbo.ApplicationUser");
DropForeignKey("dbo.UnethicalOrganization", "UserId", "dbo.ApplicationUser");
DropForeignKey("dbo.Tip", "UserId", "dbo.ApplicationUser");
DropForeignKey("dbo.IdentityUserRole", "IdentityRole_Id", "dbo.IdentityRole");
DropForeignKey("dbo.EthicalReview", "EthicalOrganizationId", "dbo.EthicalOrganization");
DropForeignKey("dbo.EthicalReview", "UserId", "dbo.ApplicationUser");
DropForeignKey("dbo.EthicalOrganization", "UserId", "dbo.ApplicationUser");
DropForeignKey("dbo.IdentityUserRole", "ApplicationUser_Id", "dbo.ApplicationUser");
DropForeignKey("dbo.IdentityUserLogin", "ApplicationUser_Id", "dbo.ApplicationUser");
DropForeignKey("dbo.IdentityUserClaim", "ApplicationUser_Id", "dbo.ApplicationUser");
DropIndex("dbo.UnethicalReview", new[] { "UnethicalOrganizationId" });
DropIndex("dbo.UnethicalReview", new[] { "UserId" });
DropIndex("dbo.UnethicalOrganization", new[] { "UserId" });
DropIndex("dbo.Tip", new[] { "UserId" });
DropIndex("dbo.EthicalReview", new[] { "EthicalOrganizationId" });
DropIndex("dbo.EthicalReview", new[] { "UserId" });
DropIndex("dbo.IdentityUserRole", new[] { "IdentityRole_Id" });
DropIndex("dbo.IdentityUserRole", new[] { "ApplicationUser_Id" });
DropIndex("dbo.IdentityUserLogin", new[] { "ApplicationUser_Id" });
DropIndex("dbo.IdentityUserClaim", new[] { "ApplicationUser_Id" });
DropIndex("dbo.EthicalOrganization", new[] { "UserId" });
DropTable("dbo.UnethicalReview");
DropTable("dbo.UnethicalOrganization");
DropTable("dbo.Tip");
DropTable("dbo.IdentityRole");
DropTable("dbo.EthicalReview");
DropTable("dbo.IdentityUserRole");
DropTable("dbo.IdentityUserLogin");
DropTable("dbo.IdentityUserClaim");
DropTable("dbo.ApplicationUser");
DropTable("dbo.EthicalOrganization");
}
}
}
<file_sep>using BonaFinders.Data.Contexts;
using BonaFinders.Data.Entities;
using BonaFinders.Models.UnethicalReviewModels;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Services
{
public class UnethicalReviewService
{
private readonly Guid _userId;
public UnethicalReviewService(Guid userId)
{
_userId = userId;
}
public UnethicalReviewService() { }
// Create Method
public bool CreateUnethicalReview(URCreate model)
{
var entity =
new UnethicalReview()
{
Id = _userId,
UnethicalOrganizationId = model.UnethicalOrganizationId,
UnethicalReviewTitle = model.UnethicalReviewTitle,
UnethicalReviewText = model.UnethicalReviewText
};
using (var ctx = new ApplicationDbContext())
{
ctx.UnethicalReviews.Add(entity);
return ctx.SaveChanges() == 1;
}
}
// For Dropdown
public List<UnethicalOrganization> GetUnethicalOrganizationsList()
{
using (var ctx = new ApplicationDbContext())
{
var query = ctx.UnethicalOrganizations;
return query.ToList();
}
}
// Read All: Method
public IEnumerable<URListItem> GetUnethicalReviews()
{
using (var ctx = new ApplicationDbContext())
{
var query =
ctx
.UnethicalReviews
.Select(
e =>
new URListItem
{
OrganizationName = e.UnethicalOrganization.UnethicalOrganizationName, // Great example (w/ models like UListItem)
UnethicalReviewTitle = e.UnethicalReviewTitle,
UnethicalReviewText = e.UnethicalReviewText
}
);
return query.ToList();
}
}
// Read Single: Get by id
public URDetail GetUnethicalReviewById(int id)
{
using (var ctx = new ApplicationDbContext())
{
var entity =
ctx
.UnethicalReviews
.Single(e => e.UnethicalReviewId == id && e.Id == _userId);
return
new URDetail
{
UnethicalReviewId = entity.UnethicalReviewId,
UnethicalOrganizationId = entity.UnethicalOrganizationId,
UnethicalReviewTitle = entity.UnethicalReviewTitle,
UnethicalReviewText = entity.UnethicalReviewText
};
}
}
}
}
<file_sep>using BonaFinders.Data.Contexts;
using BonaFinders.Data.Entities;
using BonaFinders.Models.EthicalReviewModels;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Services
{
public class EthicalReviewService
{
private readonly Guid _userId;
public EthicalReviewService(Guid userId)
{
_userId = userId;
}
public EthicalReviewService() { }
// Create Method
public bool CreateEthicalReview(ERCreate model)
{
var entity =
new EthicalReview()
{
Id = _userId,
EthicalOrganizationId = model.EthicalOrganizationId,
EthicalReviewTitle = model.EthicalReviewTitle,
EthicalReviewText = model.EthicalReviewText
};
using (var ctx = new ApplicationDbContext())
{
ctx.EthicalReviews.Add(entity);
return ctx.SaveChanges() == 1;
}
}
// For Dropdown
public List<EthicalOrganization> GetEthicalOrganizationsList()
{
using (var ctx = new ApplicationDbContext())
{
var query = ctx.EthicalOrganizations;
return query.ToList();
}
}
// Read All: Method
public IEnumerable<ERListItem> GetEthicalReviews()
{
using (var ctx = new ApplicationDbContext())
{
var query =
ctx
.EthicalReviews
.Select(
e =>
new ERListItem
{
OrganizationName = e.EthicalOrganization.EthicalOrganizationName,
EthicalReviewTitle = e.EthicalReviewTitle,
EthicalReviewText = e.EthicalReviewText
}
);
return query.ToList();
}
}
// Read Single: Get by id
public ERDetail GetEthicalReviewById(int id)
{
using (var ctx = new ApplicationDbContext())
{
var entity =
ctx
.EthicalReviews
.Single(e => e.EthicalReviewId == id && e.Id == _userId);
return
new ERDetail
{
EthicalReviewId = entity.EthicalReviewId,
EthicalOrganizationId = entity.EthicalOrganizationId,
EthicalReviewTitle = entity.EthicalReviewTitle,
EthicalReviewText = entity.EthicalReviewText
};
}
}
}
}
<file_sep># Bona Finders
Bona Finders is an ASP.NET MVC 5 n-tier architecture application (using HTML, CSS, and C#) to keep track of ethical places to shop and the progress of unethical businesses to make it more convenient to be a concious consumer.
Users can view lists of organizations, tips, and reviews as well as create, edit, delete, and update.
---
## Installation
1. Clone the repository or download the zip provided by Github
2. Run the application
## Instructions
- User must be logged in to use the app. Register an account and log in.
FOR ORGANIZATIONS AND TIPS
- Navigate to "Ethical Organizations"(Ethical/Index), "Unethical Organizations"(Unethical/Index), or "Tips"(Tip/Index) to be able to view, update, or delete.
FOR REVIEWS
- On the "Ethical Organizations" and "Unethical Organization" page you can find a link labeled "View Reviews" to direct you to view and create.
## Resources
- https://fontawesome.com
- https://www.w3schools.com/
- https://www.dictionary.com/browse/bona-fide
- https://fonts.google.com/
- https://getbootstrap.com/
## Author
<NAME> 2020
## Build Status
- Will be expanding EthicalReview and UnethicalReview (tables, access, condensed to one page)
- Adding images and icons
- Update styling on views
- Add organization categories
- Add like/star/heart/thumb function
- Clean up device sizing compatibility <file_sep>using System.Data.Entity;
using System.Data.Entity.ModelConfiguration;
using System.Data.Entity.ModelConfiguration.Conventions;
using System.Security.Claims;
using System.Threading.Tasks;
using BonaFinders.Data.Entities;
using Microsoft.AspNet.Identity;
using Microsoft.AspNet.Identity.EntityFramework;
namespace BonaFinders.Data
{
// You can add profile data for the user by adding more properties to your ApplicationUser class, please visit https://go.microsoft.com/fwlink/?LinkID=317594 to learn more.
// Adding user login
public class IdentityUserLoginConfiguration : EntityTypeConfiguration<IdentityUserLogin>
{
public IdentityUserLoginConfiguration()
{
HasKey(iul => iul.UserId);
}
}
// Adding user role
public class IdentityUserRoleConfiguration : EntityTypeConfiguration<IdentityUserRole>
{
public IdentityUserRoleConfiguration()
{
HasKey(iur => iur.UserId);
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Data.Entities
{
public class UnethicalReview
{
[Key]
public int UnethicalReviewId { get; set; }
[Required]
public Guid Id { get; set; }
[ForeignKey(nameof(ApplicationUser))]
public string UserId { get; set; }
public virtual ApplicationUser ApplicationUser { get; set; }
[ForeignKey(nameof(UnethicalOrganization))]
public int UnethicalOrganizationId { get; set; }
public virtual UnethicalOrganization UnethicalOrganization { get; set; }
[Required]
public string UnethicalReviewTitle { get; set; }
public string UnethicalReviewText { get; set; }
}
}
<file_sep>using BonaFinders.Data.Contexts;
using BonaFinders.Data.Entities;
using BonaFinders.Models.TipModels;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Services
{
public class TipService
{
private readonly Guid _userId;
public TipService(Guid userId)
{
_userId = userId;
}
public TipService() { } //
// Create Method
public bool CreateTip(TCreate model)
{
var entity =
new Tip()
{
Id = _userId,
Title = model.Title,
Text = model.Text
};
using (var ctx = new ApplicationDbContext())
{
ctx.Tips.Add(entity);
return ctx.SaveChanges() == 1;
}
}
// Read All: GetTips() Method
public IEnumerable<TListItem> GetTips()
{
using (var ctx = new ApplicationDbContext())
{
var query =
ctx
.Tips
//.Where(e => e.Id == _userId) Reason: Anyone can view the Index pages of the tables but only Users logged in can use CRUD
.Select(
e =>
new TListItem
{
TipId = e.TipId,
Title = e.Title,
Text = e.Text
}
);
return query.ToList();
}
}
// Read Single: Get by id
public TDetail GetTipById(int id)
{
using (var ctx = new ApplicationDbContext())
{
var entity =
ctx
.Tips
.Single(e => e.TipId == id && e.Id == _userId);
return
new TDetail
{
Title = entity.Title,
Text = entity.Text
};
}
}
// Update
public bool UpdateTip(TEdit model, int id)
{
using (var ctx = new ApplicationDbContext())
{
var entity =
ctx
.Tips
.Single(e => e.TipId == id && e.Id == _userId);
entity.Title = model.Title;
entity.Text = model.Text;
return ctx.SaveChanges() == 1;
}
}
// Delete
public bool DeleteTip(int tipId)
{
using (var ctx = new ApplicationDbContext())
{
var entity =
ctx
.Tips
.Single(e => e.TipId == tipId && e.Id == _userId);
ctx.Tips.Remove(entity);
return ctx.SaveChanges() == 1;
}
}
}
}
<file_sep>using BonaFinders.Data.Entities;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BonaFinders.Models.UnethicalReviewModels
{
public class URCreate
{
public int UnethicalReviewId { get; set; }
public string Id { get; set; }
[ForeignKey(nameof(UnethicalOrganization))]
public int UnethicalOrganizationId { get; set; }
public virtual UnethicalOrganization UnethicalOrganization { get; set; }
[MinLength(1, ErrorMessage = "Please enter at least 1 character.")]
[MaxLength(50, ErrorMessage = "Please use 50 or less characters in this field.")]
[Display(Name = "Title")]
public string UnethicalReviewTitle { get; set; }
[Display(Name = "Text")]
[MinLength(2, ErrorMessage = "Please enter at least 2 characters.")]
[MaxLength(5000, ErrorMessage = "Please use 5000 or less characters in this field.")]
public string UnethicalReviewText { get; set; }
}
}
|
9db73ca5b78c33a194df41aad029609c584a95d9
|
[
"Markdown",
"C#"
] | 27
|
C#
|
cshaym/BonaFinders
|
e7e17be6444bcee72a1cf9f890f6f8924da06bc5
|
704d0daba082bc486bee6a49ce96015433df8a9b
|
refs/heads/master
|
<file_sep>
var mongoose = require('mongoose');
var Schema = mongoose.Schema;
var schema = new Schema({
nameOfEvent: {type: String, required: true},
notes: {type: String, required: true},
location: {type: String, required: true},
date: {type: Date, required: true},
newEvent: {type: Object, required: true},
});
module.exports = mongoose.model('Event', schema);
<file_sep>var mongoose = require('mongoose');
var Schema = mongoose.Schema;
var schema = new Schema({
name: {type: String, required: true},
description: {type: String, required: false},
dimensions: {type: String, required: true},
color: {type: String, required: true},
numberOf: {type: Number, required: true},
location: {type: String, required: true},
img: { data: Buffer, contentType: String },
available : {type: Boolean, required: true},
quantityForEvent : {type: String, required: false},
notes : {type: String, required: true}
});
module.exports = mongoose.model('Item', schema);
<file_sep>var express = require('express');
var router = express.Router();
var mongoose = require('mongoose');
let db = mongoose.connection;
var bodyParser = require('body-parser');
var multer = require('multer');
var fs = require('fs');
var btoa = require('btoa');
var Item = require('../models/item');
/* GET home page. */
router.get('/', function(req, res, next) {
res.render('index', { title: 'TheFoodMattersInventorySystem' });
});
/* GET error page. */
router.get('/error', function(req, res, next) {
res.render('error', { title: 'TheFoodMattersS' });
});
/* GET signin page. */
router.get('/signin', function(req, res, next) {
res.render('signin', { title: 'TheFoodMattersS/signin' });
});
/* Function for convertedImageData*/
function hexToBase64(str) {
return btoa(String.fromCharCode.apply(null, str.toString().replace(/\r|\n/g, "").replace(/([\da-fA-F]{2}) ?/g, "0x$1 ").replace(/ +$/, "").split(" ")));
}
/*GET Item by id*/
router.get('/item/:itemId', function (req, res, next) {
//var successMsg = req.flash('success')[0];
Item.findById(req.params.itemId, function(err, item) {
if (err) {
return console.log(err);;
}
var theItem = item;
var imageData = item.img.data;
console.log(imageData);
//convertedImageData = 'data:image/jpeg;base64,' + hexToBase64(imageData);
// encode the file as a base64 string.
var convertedImageData = imageData.toString('base64');
console.log(convertedImageData);
// define your new document
res.render('item', {title: 'theFoodMattersWebsite', item: theItem, base64Data: convertedImageData});
});
});
/* GET addItem page. */
router.get('/addItem', function(req, res, next) {
res.render('addItem', { title: 'TheFoodMattersS/addItem' });
});
/* GET inventory page. */
router.get('/inventory', function(req, res, next) {
Item.find(function (err, docs) {
var itemChunks = [];
var chunkSize = 3;
for (var i = 0; i < docs.length; i += chunkSize) {
itemChunks.push(docs.slice(i, i + chunkSize));
}
res.render('inventory', {title: 'theFoodMattersWebsite', items: itemChunks});
});
});
const multerConfig = {
storage: multer.diskStorage({
//Setup where the user's file will go
destination: function(req, file, next){
next(null, 'public/images/itemImages/allInventory');
},
//Then give the file a unique name
filename: function(req, file, next){
console.log(file);
console.log(file);
const ext = file.mimetype.split('/')[1];
next(null, file.originalname);
}
}),
//A means of ensuring only images are uploaded.
fileFilter: function(req, file, next){
if(!file){
next();
}
const image = file.mimetype.startsWith('image/');
if(image){
console.log('photo uploaded');
next(null, true);
}else{
console.log("file not supported");
//TODO: A better message response to user on failure.
return next();
}
}
};
/*POST Item to datatbase GOOD CODE*/
router.post('/addItem', multer(multerConfig).single('photo'),function(req, res){
var itemName = req.body.name;
console.log(itemName);
console.log(req.body.name);
console.log(req.body.fileName);
console.log(req.body.name);
var theFile = req.body.fileName;
console.log(theFile);
var path = 'public/images/itemImages/allInventory/' + theFile;
console.log(path);
var item = new Item();
item.name = req.body.name,
item.description = req.body.description,
item.dimensions = req.body.dimensions,
item.color = req.body.color,
item.numberOf = req.body. numberOf,
item.location = req.body.location,
item.notes = null,
item.quantityForEvent = null,
item.available = true;
console.log(item);
//item.img.data = fs.readFileSync(path),
item.img.contentType = 'image/png'
// read the img file from tmp in-memory location
var newImg = fs.readFileSync(path);
// encode the file as a base64 string.
var encImg = newImg.toString('base64');
// define your new document
console.log(item);
item.img.data = encImg,
//item.img.contentType = 'image/png',
db.collection('items').insertOne(item, (err, result) => {
if (err) return console.log(err);
console.log('saved to database');
fs.unlink(path, (err) => {
if (err){ throw err;}
console.log(path + ' was deleted');
});
});
res.redirect('/inventory');
});
router.get('/searchResults', function(req, res, next) {
var searchString = req.query.searchString;
console.log(searchString);
Item.find({$or:[{name: {$regex : searchString, '$options' : 'i'}},{description: {$regex : searchString, '$options' : 'i'}},{color: {$regex : searchString, '$options' : 'i'}}]}, function (err, docs) {
var itemsArray = [];
if(err) {
console.log(err);
}
for (var i = 0; i < docs.length; i++) {
itemsArray.push(docs[i]);
}
if(itemsArray == null){
console.log('No Items With this name found')
}
console.log('itemsArray:' + itemsArray);
res.render('searchResults', {title: 'theFoodMattersWebsite', searchResults: itemsArray});
})
});
module.exports = router;
|
bfc5b08fa937000cc3eaf74864062d005d362332
|
[
"JavaScript"
] | 3
|
JavaScript
|
WyattTheG/theFoodMattersS
|
3cabafdfd84dbcfe5b7da0dbe8269fb815166433
|
44bc4c8bfca8a7cf2e8cb30dfb682551982e6ce0
|
refs/heads/main
|
<repo_name>MianHamzaHussain/olx-clone<file_sep>/src/components/Slider.js
import React, { useState } from "react";
import { Carousel } from "react-bootstrap";
const Slider = ({ pics }) => {
const [index, setIndex] = useState(0);
const handleSelect = (selectedIndex, e) => {
setIndex(selectedIndex);
};
pics ? console.log(pics.length) : console.log(pics);
return (
<Carousel activeIndex={index} onSelect={handleSelect} fade>
{pics ? (
pics.map((image, i) => (
<Carousel.Item>
<img className="d-block w-100" src={image} alt={`${i}`} />
</Carousel.Item>
))
) : (
<p> pic not found</p>
)}
</Carousel>
);
};
export default Slider;
<file_sep>/src/store/reducers/rootReducer.js
import { combineReducers } from "redux";
import { userLoginReducer, userRegisterReducer } from "./userReducers";
import {
adCreateReducer,
adDeleteReducer,
adDetailsReducer,
adFilterReducer,
adSearchReducer,
adUupdateReducer,
ad_ListReducer,
} from "./adReducers";
import { favouriteReducer } from "./favouriteReducer";
const reducer = combineReducers({
userLogin: userLoginReducer,
userRegister: userRegisterReducer,
adsList: ad_ListReducer,
adDetails: adDetailsReducer,
adSearch: adSearchReducer,
adFilter: adFilterReducer,
adDelete: adDeleteReducer,
adCreate: adCreateReducer,
adUpdate: adUupdateReducer,
favourite: favouriteReducer,
});
export default reducer;
<file_sep>/src/store/store.js
import { createStore, applyMiddleware } from "redux";
import thunk from "redux-thunk";
import { composeWithDevTools } from "redux-devtools-extension";
import reducer from "./reducers/rootReducer";
const userInfoFromLocalStorage = localStorage.getItem("userInfo")
? JSON.parse(localStorage.getItem("userInfo"))
: null;
const favourItemsFromLocalStorage = localStorage.getItem("favouriteItems")
? JSON.parse(localStorage.getItem("favouriteItems"))
: [];
const intialState = {
userLogin: {
userInfo: userInfoFromLocalStorage,
},
favourite: {
favouriteItems: favourItemsFromLocalStorage,
},
};
const middleware = [thunk];
const store = createStore(
reducer,
intialState,
composeWithDevTools(applyMiddleware(...middleware))
);
export default store;
<file_sep>/src/store/constants/favourite.js
export const Favourite_Clear_Items = "Favourite_Clear_Items";
export const Favourite_Item_Add = "Favourite_Item_Add";
export const Favourite_Item_Remove = "Favourite_Item_Remove";
<file_sep>/src/components/AdForm.js
import React, { useState, useEffect } from "react";
import { Form, Button, ProgressBar } from "react-bootstrap";
import { storage } from "../config/firebase";
import { createAd, updateAd } from "../store/actions/adActions";
import { useDispatch } from "react-redux";
const AdForm = ({ recordForEdit, userId, close }) => {
const [ReImages, setReImages] = useState(null);
const [images, setImages] = useState([]);
const [urls, setURLS] = useState([]);
const [progress, setProgress] = useState(0);
const dispatch = useDispatch();
const initial = {
id: 0,
uid: userId,
name: "",
category: "",
price: 0,
description: "",
city: "",
condition: "used",
};
const [values, setValues] = useState(initial);
const changehandler = (e) => {
const { name, value } = e.target;
setValues({
...values,
[name]: value,
});
};
const handleFileChange = (e) => {
for (let index = 0; index < e.target.files.length; index++) {
const newImage = e.target.files[index];
newImage["id"] = Math.random();
setImages((prevState) => [...prevState, newImage]);
}
};
useEffect(() => {
if (recordForEdit !== null) {
setValues({
id: recordForEdit.id,
name: recordForEdit.name,
uid: recordForEdit.uid,
price: recordForEdit.price,
description: recordForEdit.description,
city: recordForEdit.city,
condition: recordForEdit.condition,
category: recordForEdit.category,
});
setReImages(recordForEdit.images);
}
}, [recordForEdit]);
const handleUpload = () => {
images.map((image) => {
const uploadTask = storage.ref(`images/${image.name}`).put(image);
uploadTask.on(
"state_changed",
(snapshot) => {
const progress = Math.round(
(snapshot.bytesTransferred / snapshot.totalBytes) * 100
);
setProgress(progress);
},
(error) => console.log("error block==", error),
async () => {
try {
const url = await storage
.ref("images")
.child(image.name)
.getDownloadURL();
// console.log("url", url);
setURLS((prevState) => [...prevState, url]);
} catch (error) {
console.log("getting download error", error);
}
}
);
});
};
// console.log(urls);
const submithandler = (e) => {
e.preventDefault();
if (values.id === 0) {
if (urls.length > 0) {
dispatch(createAd({ ...values, images: urls }));
close();
} else {
alert("please upload atleast single photo of product");
}
} else {
if (urls.length > 0) {
dispatch(updateAd({ ...values, images: urls }));
} else {
dispatch(updateAd({ ...values, images: ReImages }));
}
close();
}
};
return (
<Form>
<Form.Group className="mt-3" controlId="Title">
<Form.Label>Title</Form.Label>
<Form.Control
type="text"
placeholder="Enter Title"
name="name"
value={values.name}
onChange={changehandler}
/>
</Form.Group>
<Form.Group controlId="category">
<Form.Label>Category</Form.Label>
<Form.Control
as="select"
name="category"
value={values.category}
onChange={changehandler}
>
<option value=""> Select </option>
<option value="cars">Cars</option>
<option value="houses"> Houses</option>
<option value="mobiles">Mobiles</option>
</Form.Control>
</Form.Group>
<Form.Group controlId="Title">
<Form.Label>Price</Form.Label>
<Form.Control
type="number"
placeholder="Enter Price"
name="price"
value={values.price}
onChange={changehandler}
/>
</Form.Group>
<Form.Group controlId="images" className="my-3">
<Form.Label>Select Images</Form.Label>
<ProgressBar
animated
min="0"
now={progress}
label={`${progress}%`}
max="100"
/>
<Form.Control
type="file"
multiple
name="images"
size="md"
onChange={handleFileChange}
/>
<Button onClick={handleUpload}>Upload</Button>
</Form.Group>
<Form.Group controlId="description">
<Form.Label>Description</Form.Label>
<Form.Control
as="textarea"
rows={3}
name="description"
value={values.description}
onChange={changehandler}
/>
</Form.Group>
<Form.Group controlId="location">
<Form.Label>Location</Form.Label>
<Form.Control
as="select"
name="city"
value={values.city}
onChange={changehandler}
>
<option value=""> Select </option>
<option value="Faisalabad">Faisalabad</option>
<option value="Lahore"> Lahore</option>
<option value="Karachi">Karachi</option>
</Form.Control>
</Form.Group>
<Form.Group>
<label> Condition</label>
<div>
<Form.Check
inline
label="Used"
name="condition"
type="radio"
id={`used`}
value="used"
checked={values.condition === "used"}
onChange={changehandler}
/>
<Form.Check
inline
label="New"
name="condition"
type="radio"
id={`new`}
value="new"
checked={values.condition === "new"}
onChange={changehandler}
/>
</div>
</Form.Group>
<Form.Group controlId="submit" className="my-3">
<Button onClick={submithandler}>Submit</Button>
</Form.Group>
</Form>
);
};
export default AdForm;
<file_sep>/src/components/Ad.js
import React from "react";
import { Card, Col, Row } from "react-bootstrap";
import { useDispatch } from "react-redux";
import { addToFavourite } from "../store/actions/favouriteActions";
import { Link } from "react-router-dom";
import "./Ad.css";
const Ad = ({ ad }) => {
const dispatch = useDispatch();
return (
<Link to={`/ad/${ad.id}`} style={{ textDecoration: "none" }}>
<Card className="cardCon rounded">
<Col className="mx-auto">
<Card.Img id="pic" src={ad.images[0]} fluid />
</Col>
<Card.Body>
<Card.Text as="div" className="mt-1">
<Row>
<Col md={10}>
<h6>
<strong>{ad.name}</strong>
</h6>
<p> RS{ad.price} </p>
<p> Location {ad.city} </p>
</Col>
<Col md={2}>
<Link
to="/myfavourite"
style={{
color: "black",
position: "relative",
}}
onClick={() => dispatch(addToFavourite(ad))}
>
<i className="fa fa-heart" aria-hidden="true"></i>
</Link>
</Col>
</Row>
</Card.Text>
</Card.Body>
</Card>
</Link>
);
};
export default Ad;
<file_sep>/src/store/actions/userActions.js
import {
userLoginFail,
userLoginRequest,
userLoginSuccess,
userLogout,
userRegisterFail,
userRegisterRequest,
userRegisterSuccess,
} from "../constants/userConstants";
import { auth } from "../../config/firebase";
export const register = (name, mail, password) => async (dispatch) => {
try {
dispatch({
type: userRegisterRequest,
});
const newUser = await auth.createUserWithEmailAndPassword(mail, password);
console.log("user", newUser.user);
if (!newUser.user.displayName) {
await newUser.user.updateProfile({ displayName: name });
}
const { uid, email, displayName } = newUser.user;
let data = { uid, email, displayName };
dispatch({
type: userRegisterRequest,
payload: data,
});
dispatch({
type: userRegisterSuccess,
payload: data,
});
dispatch({
type: userLoginSuccess,
payload: data,
});
localStorage.setItem("userInfo", JSON.stringify(data));
} catch (error) {
dispatch({
type: userRegisterFail,
payload: error.code && error.message ? error.message : error.code,
});
}
};
export const login = (mail, password) => async (dispatch) => {
try {
dispatch({
type: userLoginRequest,
});
const { user } = await auth.signInWithEmailAndPassword(mail, password);
const { displayName, email, uid } = user;
const data = {
displayName,
email,
uid,
};
dispatch({
type: userLoginSuccess,
payload: data,
});
localStorage.setItem("userInfo", JSON.stringify(data));
} catch (error) {
dispatch({
type: userLoginFail,
payload: error.code && error.message ? error.message : error.code,
});
}
};
export const logout = () => (dispatch) => {
localStorage.removeItem("userInfo");
dispatch({ type: userLogout });
};
<file_sep>/src/config/firebase.js
import firebase from "firebase";
const firebaseApp = firebase.initializeApp({
apiKey: "<KEY>",
authDomain: "olx-clone-c4f62.firebaseapp.com",
databaseURL: "gs://olx-clone-c4f62.appspot.com/",
projectId: "olx-clone-c4f62",
storageBucket: "olx-clone-c4f62.appspot.com",
messagingSenderId: "928509159212",
appId: "1:928509159212:web:a85c52b5d230308d22ee89",
});
const db = firebaseApp.firestore();
const auth = firebaseApp.auth();
const storage = firebaseApp.storage();
export { db, auth, storage };
<file_sep>/src/store/reducers/favouriteReducer.js
import {
Favourite_Clear_Items,
Favourite_Item_Add,
Favourite_Item_Remove,
} from "../constants/favourite";
export const favouriteReducer = (state = { favouriteItems: [] }, action) => {
switch (action.type) {
case Favourite_Item_Add:
const item = action.payload;
const existItem = state.favouriteItems.find((x) => x.id === item.id);
if (existItem) {
return {
...state,
favouriteItems: state.favouriteItems.map((x) =>
x.product === existItem.product ? item : x
),
};
} else {
return {
...state,
favouriteItems: [...state.favouriteItems, item],
};
}
case Favourite_Item_Remove:
return {
...state,
favouriteItems: state.favouriteItems.filter(
(x) => x.id !== action.payload
),
};
case Favourite_Clear_Items:
return {
...state,
favouriteItems: [],
};
default:
return state;
}
};
<file_sep>/src/store/actions/favouriteActions.js
import {
Favourite_Item_Add,
Favourite_Item_Remove,
} from "../constants/favourite";
export const addToFavourite = (data) => async (dispatch, getState) => {
dispatch({
type: Favourite_Item_Add,
payload: data,
});
localStorage.setItem(
"favouriteItems",
JSON.stringify(getState().favourite.favouriteItems)
);
};
export const removeFromFavourite = (id) => (dispatch, getState) => {
dispatch({
type: Favourite_Item_Remove,
payload: id,
});
localStorage.setItem(
"favouriteItems",
JSON.stringify(getState().favourite.favouriteItems)
);
};
<file_sep>/src/pages/MyAds.js
import React, { useEffect, useState } from "react";
import { useSelector, useDispatch } from "react-redux";
import { Link } from "react-router-dom";
import { Container, Row, Button, Table, Image } from "react-bootstrap";
import Loader from "../components/Loader";
import Message from "../components/Message";
import Modal from "../components/Modal";
import AdForm from "../components/AdForm";
import { listFilterAds, deleteAd } from "../store/actions/adActions";
const MyAds = ({ history }) => {
const [showModal, setShowModal] = useState(false);
const dispatch = useDispatch();
const userLogin = useSelector((state) => state.userLogin);
const { userInfo } = userLogin;
// console.log("uid", userInfo.uid);
const adCreate = useSelector((state) => state.adCreate);
const {
loading: createLoading,
error: createError,
success: createSuccess,
} = adCreate;
const adUpdate = useSelector((state) => state.adUpdate);
const {
loading: updateLoading,
error: updateError,
success: updateSuccess,
} = adUpdate;
const adDelete = useSelector((state) => state.adDelete);
const {
loading: delLoading,
error: delError,
success: delSuccess,
} = adDelete;
const filter = `uid=${userInfo.uid}`;
const adFilter = useSelector((state) => state.adFilter);
const { loading, error, ads } = adFilter;
useEffect(() => {
if (!userInfo) {
history.push("/login");
}
}, [history, userInfo]);
useEffect(() => {
dispatch(listFilterAds(filter));
}, [dispatch, userInfo, filter, delSuccess, createSuccess, updateSuccess]);
const deletehandler = (id) => {
if (window.confirm("are you sure delete ad")) {
// alert(id);
dispatch(deleteAd(id));
}
};
const [recordForEdit, setRecordForEdit] = useState(null);
const setOpen = (item) => {
setShowModal(true);
setRecordForEdit(item);
};
return (
<>
{createLoading ? (
<Loader />
) : createError ? (
<Message varaint="danger">{createError}</Message>
) : createSuccess ? (
<Message variant="success">ad added successfuly</Message>
) : (
<></>
)}
{updateLoading ? (
<Loader />
) : updateError ? (
<Message varaint="danger">{`${updateError}`}</Message>
) : updateSuccess ? (
<Message variant="success">ad updated successfuly</Message>
) : (
<></>
)}
{delLoading ? (
<Loader />
) : delError ? (
<Message varaint="danger">{delError}</Message>
) : delSuccess ? (
<Message variant="success">ad deleted successfuly</Message>
) : (
<></>
)}
<Button variant="success" className="m-5" onClick={() => setOpen(null)}>
Add
</Button>
{loading ? (
<Loader />
) : error ? (
<Message varaint="danger">{error}</Message>
) : (
<>
<Container>
<Row>
<Table responsive hover striped>
<thead>
<tr>
<th>Name</th>
<th> Category</th>
<th> Price</th>
<th> Location</th>
</tr>
</thead>
<tbody>
{ads.map((v, i) => (
<tr key={i}>
<td> {v.name}</td>
<td> {v.category}</td>
<td> {v.price}</td>
<td> {v.city}</td>
<td>
<Image
src={v.images[0]}
fluid
style={{ width: "100px", height: "100px" }}
/>
</td>
<td>
<Link
to={`/ad/${v.id}?redirect=myads`}
className="btn btn-info btn-sm"
>
details
<i className="fa fa-eye"></i>
</Link>
<Button
variant="primary"
size="sm"
onClick={() => setOpen(v)}
>
<i className="fa fa-edit"></i>
</Button>
<Button
variant="danger"
size="sm"
onClick={() => deletehandler(v.id, v)}
>
<i className="fa fa-close"></i>
</Button>
</td>
</tr>
))}
</tbody>
</Table>
</Row>
<Modal
show={showModal}
onHide={() => setShowModal(false)}
title="Ad details"
>
<AdForm
recordForEdit={recordForEdit}
userId={userInfo.uid}
close={() => {
setShowModal(false);
}}
/>
</Modal>
</Container>
</>
)}
</>
);
};
export default MyAds;
<file_sep>/src/pages/Home.js
import React, { useEffect } from "react";
import { useSelector, useDispatch } from "react-redux";
import { Container, Row, Col } from "react-bootstrap";
import { listAds } from "../store/actions/adActions";
import Loader from "../components/Loader";
import Message from "../components/Message";
import Ad from "../components/Ad";
import "./Home.css";
const Home = () => {
const dispatch = useDispatch();
const adsList = useSelector((state) => state.adsList);
const { loading, error, ads } = adsList;
useEffect(() => {
dispatch(listAds());
// console.log("calling");
}, [dispatch]);
return (
<>
{loading ? (
<Loader />
) : error ? (
<Message varaint="danger">{error}</Message>
) : (
<>
<Container fluid className="banner"></Container>
<Container>
<Row>
{ads.map((ad) => (
<Col className="mt-3" sm={12} md={6} lg={4} xl={3} key={ad.id}>
<Ad ad={ad} />
</Col>
))}
</Row>
</Container>
</>
)}
</>
);
};
export default Home;
<file_sep>/src/store/constants/userConstants.js
const userLoginRequest = "userLoginRequest";
const userLoginSuccess = "userLoginSuccess";
const userLoginFail = "userLoginFail";
const userRegisterRequest = "userRegisterRequest";
const userRegisterSuccess = "userRegisterSuccess";
const userRegisterFail = "userRegisterFail";
const userLogout = "userLogout";
export {
userLoginRequest,
userLoginSuccess,
userLoginFail,
userLogout,
userRegisterRequest,
userRegisterSuccess,
userRegisterFail,
};
<file_sep>/src/pages/Search.js
import React, { useEffect } from "react";
import { useSelector, useDispatch } from "react-redux";
import { Link } from "react-router-dom";
import { Container, Row, Col } from "react-bootstrap";
import { listSearchAds } from "../store/actions/adActions";
import Loader from "../components/Loader";
import Message from "../components/Message";
import Ad from "../components/Ad";
import Meta from "../components/Meta";
import "./Search.css";
const Search = ({ match }) => {
const keyword = match.params.keyword;
const dispatch = useDispatch();
const adSearch = useSelector((state) => state.adSearch);
const { loading, error, ads } = adSearch;
useEffect(() => {
dispatch(listSearchAds(keyword));
}, [dispatch, keyword]);
return (
<>
<Container fluid>
<Meta title={`Search for ${keyword}`} />
<h1 className="text-center"> Showing Results for {keyword}</h1>
<Link to="/" className="btn btn-info py-3">
Go Back
</Link>
</Container>
{loading ? (
<Loader />
) : error ? (
<Message varaint="danger">{error}</Message>
) : (
<>
<Container>
<Row>
{ads.length > 0
? ads.map((ad) => (
<Col
className="mt-3"
sm={12}
md={6}
lg={4}
xl={3}
key={ad.id}
>
<Ad ad={ad} />
</Col>
))
: "no result found"}
</Row>
</Container>
</>
)}
</>
);
};
export default Search;
<file_sep>/src/App.js
import "./App.css";
import Header from "./components/Header";
import Home from "./pages/Home";
import Login from "./pages/Login";
import AdDetails from "./pages/AdDetails";
import Register from "./pages/Register";
import Search from "./pages/Search";
import Favourite from "./pages/Favourite";
import Category from "./pages/Category";
import MyAds from "./pages/MyAds";
import React from "react";
import { BrowserRouter as Router, Switch, Route } from "react-router-dom";
const App = () => {
return (
<div className="App">
<Router>
<div>
<Header />
{/* A <Switch> looks through its children <Route>s and
renders the first one that matches the current URL. */}
<Switch>
<Route path="/" component={Home} exact></Route>
<Route path="/login" component={Login} exact></Route>
<Route path="/register" component={Register} exact></Route>
<Route path="/ad/:id" component={AdDetails} exact></Route>
<Route path="/ad/:id?" component={AdDetails} exact />
<Route path="/myads" component={MyAds} exact></Route>
<Route path="/category/:cat" component={Category} exact></Route>
<Route path="/search/:keyword" component={Search} exact />
<Route path="/myfavourite" component={Favourite} exact />
</Switch>
</div>
</Router>
</div>
);
};
export default App;
<file_sep>/src/store/reducers/adReducers.js
import {
Ad_Create_Fail,
Ad_Create_Request,
Ad_Create_Reset,
Ad_Create_Success,
Ad_Delete_Fail,
Ad_Delete_Request,
Ad_Delete_Success,
Ad_Detail_Fail,
Ad_Detail_Request,
Ad_Detail_Reset,
Ad_Detail_Success,
Ad_Filter_Fail,
Ad_Filter_Request,
Ad_Filter_Reset,
Ad_Filter_Success,
Ad_List_Fail,
Ad_List_Request,
Ad_List_Success,
Ad_Search_Fail,
Ad_Search_Request,
Ad_Search_Reset,
Ad_Search_Success,
Ad_Update_Fail,
Ad_Update_Request,
Ad_Update_Reset,
Ad_Update_Success,
} from "../constants/adConstants";
export const ad_ListReducer = (state = { ads: [] }, action) => {
switch (action.type) {
case Ad_List_Request:
return {
loading: true,
ads: [],
};
case Ad_List_Success:
return {
loading: false,
ads: action.payload,
};
case Ad_List_Fail:
return {
loading: false,
error: action.payload,
};
default:
return state;
}
};
export const adDetailsReducer = (state = { ad: {} }, action) => {
switch (action.type) {
case Ad_Detail_Request:
return {
...state,
loading: true,
};
case Ad_Detail_Success:
return {
loading: false,
ad: action.payload,
};
case Ad_Detail_Fail: {
return {
loading: false,
error: action.payload,
};
}
case Ad_Detail_Reset: {
return {
loading: false,
ad: {},
};
}
default:
return state;
}
};
export const adSearchReducer = (state = { ads: [] }, action) => {
switch (action.type) {
case Ad_Search_Request:
return {
loading: true,
ads: [],
};
case Ad_Search_Success:
return {
loading: false,
ads: action.payload,
};
case Ad_Search_Fail:
return {
loading: false,
error: action.payload,
};
case Ad_Search_Reset:
return {
ads: [],
};
default:
return state;
}
};
export const adFilterReducer = (state = { ads: [] }, action) => {
switch (action.type) {
case Ad_Filter_Request:
return {
loading: true,
ads: [],
};
case Ad_Filter_Success:
return {
loading: false,
ads: action.payload,
};
case Ad_Filter_Fail:
return {
loading: false,
error: action.payload,
};
case Ad_Filter_Reset:
return {
ads: [],
};
default:
return state;
}
};
export const adDeleteReducer = (state = {}, action) => {
switch (action.type) {
case Ad_Delete_Request:
return {
loading: true,
};
case Ad_Delete_Success:
return {
loading: false,
success: true,
};
case Ad_Delete_Fail:
return {
loading: false,
error: action.payload,
};
default:
return state;
}
};
export const adCreateReducer = (state = {}, action) => {
switch (action.type) {
case Ad_Create_Request:
return {
loading: true,
};
case Ad_Create_Success:
return {
loading: false,
success: true,
};
case Ad_Create_Fail:
return {
loading: false,
error: action.payload,
};
case Ad_Create_Reset: {
return {};
}
default:
return state;
}
};
export const adUupdateReducer = (state = {}, action) => {
switch (action.type) {
case Ad_Update_Request:
return {
loading: true,
};
case Ad_Update_Success:
return {
loading: false,
success: true,
};
case Ad_Update_Fail:
return {
loading: false,
error: action.payload,
};
case Ad_Update_Reset: {
return {};
}
default:
return state;
}
};
|
0bad7224e0a38641d38a686e13ad48ea25373105
|
[
"JavaScript"
] | 16
|
JavaScript
|
MianHamzaHussain/olx-clone
|
8873c94357b88537a9645fc0624f6f42f471e8d3
|
57db5c49f9c07ec32cfd01785ad19eac4dd29d30
|
refs/heads/master
|
<file_sep>import { HttpClient } from '@angular/common/http';
import { Injectable } from '@angular/core';
//Provider que tem por finalidade consumir API ViaCep
@Injectable()
export class ApiCepProvider {
constructor(public http: HttpClient) {
console.log('Hello ApiCepProvider Provider');
}
//função que consome a API ViaCep
getCep(cep)
{
return new Promise(resolve => this.http.get("https://viacep.com.br/ws/"+cep+"/json/").subscribe(
data => {
resolve(data);
},
error => {
console.log(error);
}
)
);
}
}
<file_sep>import { Component } from '@angular/core';
import { NavController, AlertController } from 'ionic-angular';
import { ApiCepProvider } from '../../providers/api-cep/api-cep';
import { HistoricoDbProvider } from '../../providers/historico-db/historico-db';
import {FormGroup, FormBuilder, Validators} from "@angular/forms";
@Component({
selector: 'page-home',
templateUrl: 'home.html',
providers:[
ApiCepProvider,
HistoricoDbProvider
]
})
export class HomePage
{
cep: any;
constructor(public navCtrl: NavController, private ApiCepProvider: ApiCepProvider, private historicoDb: HistoricoDbProvider, public FormBuilder: FormBuilder, private alert: AlertController) {
}
//função que cria um alert com os valores passados por parametro
cepAlert(cep) {
let alert = this.alert.create({
title: "Endereço",
message: "<p><b class='titulo'>Logradouro: </b>"+cep.logradouro+"</p><p><b class='titulo'>Bairro: </b>"+cep.bairro+"</p><p><b class='titulo'>localidade: </b>"+cep.localidade+"</p><p><b class='titulo'>UF: </b> "+cep.uf+"</p>",
buttons: ['FECHAR'],
cssClass: 'alertInfo'
});
alert.present();
}
//instancia de FormGroup que recebe o retorno da função group com as validações necessarias
formCep : FormGroup = this.FormBuilder.group({
cep: ['', Validators.compose([Validators.required, Validators.minLength(8)])]
});
//função que exibe dados do CEP e salva no banco de dados
getEndereco(cep){
let now: Date = new Date();
let dataHora: String = "pesquisado em "+((now.getDate()<10)?'0'+(now.getDate()+1): (now.getDate()))+"/"+((now.getMonth()<10)?'0'+(now.getMonth()+1): (now.getMonth()+1))+"/"+now.getFullYear()+" ás "+now.getHours()+":"+now.getMinutes() ;
this.ApiCepProvider.getCep(cep).then(
data =>
{
this.cep = data;
//verifica se o cep realmente existe caso for undefined o usuario será notificado
if (this.cep.erro) {
let alert = this.alert.create({
title: "CEP inexistente ou invalido",
message: "O CEP buscado não pode ser encontrado verifique se digitado corretamente.",
buttons: ['FECHAR'],
});
alert.present();
}else{
this.cepAlert(this.cep);
this.historicoDb.salvarPesquisa(this.cep.cep, dataHora, this.cep.logradouro, this.cep.bairro, this.cep.localidade, this.cep.uf);
}
}
)
}
}
<file_sep>import { Injectable } from '@angular/core';
import { SQLite, SQLiteObject } from '@ionic-native/sqlite';
//Provider que tem por finalidade executar ações no banco de dados
@Injectable()
export class HistoricoDbProvider {
//no contrutor verifica-se a existencia da tabela historico, caso não exista é criada
constructor(public sqlite: SQLite) {
this.sqlite.create({
name: 'buscacep.db',
location: 'default'
}).then((db: SQLiteObject) => {
db.executeSql('CREATE TABLE IF NOT EXISTS historico(id INTEGER PRIMARY KEY, cep TEXT, data TEXT, logradouro TEXT, bairro TEXT, localidade TEXT, uf TEXT)', {})
.then(res => console.log('Executed SQL'))
.catch(e => console.log(e));
});
}
//executa um insert na tabela historico
salvarPesquisa(cep, data, logradouro, bairro, localidade, uf){
this.sqlite.create(
{
name: 'buscacep.db',
location:'default'
}).then(
(db: SQLiteObject) =>{
db.executeSql('INSERT INTO historico VALUES(NULL,?,?,?,?,?,?)',[cep,data, logradouro, bairro, localidade, uf])
.then(res => {
console.log(res);
})
.catch(e =>
{
console.log(e);
})
}).catch(e =>
{
console.log(e);
});
}
//executa um select na tabela historico que retorna todos os registros
mostrarPesquisas(){
return this.sqlite.create({
name: 'buscacep.db',
location:'default'
}).then(
(db: SQLiteObject) =>{
return db.executeSql('SELECT * FROM historico order by id desc',[])
.then(data => {
if(data.rows.length > 0)
{
let historico: any[] = [];
for (var i = 0; i < data.rows.length; i++)
{
var item = data.rows.item(i);
historico.push(item);
}
return historico;
}
})
.catch(e =>
{
return [e];
})
}).catch(e =>
{
return [e];
})
}
//executa um delete na tabela historico através o id passado por parametro
excluirRegistro(id: number){
return this.sqlite.create({
name: 'buscacep.db',
location:'default'
})
.then(
(db: SQLiteObject) => {
return db.executeSql('delete from historico where id = ?', [id])
.catch(e => {
return e
});
})
.catch(e => {return e});
}
}
<file_sep>import { Component } from '@angular/core';
import { NavController } from 'ionic-angular';
import { HistoricoDbProvider } from '../../providers/historico-db/historico-db';
import { AlertController } from 'ionic-angular';
@Component({
selector: 'page-historico',
templateUrl: 'historico.html',
providers: [
HistoricoDbProvider
]
})
export class HistoricoPage {
public historico = new Array<any>();
constructor(public navCtrl: NavController, private historicoDb: HistoricoDbProvider, private alert: AlertController) {
}
//função que exclui registro utiliza o AlertController para confirmação do usuário e HistoricoDbProvider para utilizar as funções do banco de dados
excluiRegistro(item: any)
{
let alert = this.alert.create({
title: 'Você deseja mesmo deletar esse registro?',
message: 'Após deletado o registro não poderá ser recuperado',
buttons: [
{
text: 'Ok',
handler: () =>
{
//chama função que exlui registros e passa o id do item por parametro
this.historicoDb.excluirRegistro(item.id)
.then(() =>
{
//exclui o item do Array que é populado no template
var index = this.historico.indexOf(item);
this.historico.splice(index, 1);
});
}
},
{
text: 'Cancel',
cssClass:'danger',
role:'cancel',
handler: () =>
{
return ;
}
}
],
cssClass:'alertDanger'
});
alert.present();
}
// função que é disparada toda vez que entra na page
ionViewWillEnter(){
//chama função que mostra todas as pesquisas ou seja o historico
this.historicoDb.mostrarPesquisas()
.then(res=>{
this.historico = res;
})
.catch(e=>{
alert("Erro no banco de dados");
});
}
}
|
6581393a2887100388dfe07ee51aaa8587847297
|
[
"TypeScript"
] | 4
|
TypeScript
|
fernandodsds/Ionic_BuscaCep
|
207894df2806cda87c372f7ab2dd16686e75638b
|
dbcd090139048cb808a97c27b04c7a13541f9788
|
refs/heads/master
|
<repo_name>dkippes/DataStructures-Algorithms-Study<file_sep>/Sort Algorithms/Unstable-Stable Sort.md
# Unstable - Stable Sort
- Cuando hay duplicados
- Cuando hay que ordenar objetos
## Unstable Sort
- El 9 blanco aunque este antes se ubica luego del <b>9</b> y no preserva su posicion
Sin ordenar:
<table>
<tr>
<td>5</td>
<td>9</td>
<td>3</td>
<td><b>9</b></td>
<td>8</td>
<td>4</td>
</tr>
</table>
Ordenado:
<table>
<tr>
<td>3</td>
<td>4</td>
<td>5</td>
<td>8</td>
<td><b>9</b></td>
<td>9</td>
</tr>
</table>
## Stable Sort
- El 9 blanco se "preserva" atras del <b>9</b>
Sin ordenar:
<table>
<tr>
<td>5</td>
<td>9</td>
<td>3</td>
<td><b>9</b></td>
<td>8</td>
<td>4</td>
</tr>
</table>
Ordenado:
<table>
<tr>
<td>3</td>
<td>4</td>
<td>5</td>
<td>8</td>
<td>9</td>
<td><b>9</b></td>
</tr>
</table><file_sep>/Sort Algorithms/BubbleSort/Bubble Sort Teoria.md
# Bubble Sort - O(n^2)
- O(n^2) - Quadratic
- Es Stable Sort por el >
- Es de los algoritmos menos eficientes
- 100 pasos para ordenar 10 elementos.
- No se necesita otro array, particiona el mismo array
## Pasos:
- Separa el array en partes, una ordenada y otra desordenada a medida que progresa
- Empieza en el index 0
- Si el index 0 es mayor al index 1, cambia los elementos, si es menos no lo cambia
- Orden ascendente
- Pasa a la siguiente posicion, hasta terrminar el array
- Una vez completado el array, se hace array.length-1 porque el maximo elemento ya estaria en la ultima posicion del array
Array Inicial:
<table>
<tr>
<td>20</td>
<td><b>35</b></td>
<td>-15</td>
<td>7</td>
<td><b>55</b></td>
<td>1</td>
<td>-22</td>
</tr>
</table>
Array Luego de que el maximo elemento este en la ultima posicion:
<table>
<tr>
<td>20</td>
<td>-15</td>
<td>7</td>
<td>35</td>
<td>1</td>
<td>-22</td>
<td>55</td>
</tr>
</table>
Segunda vuelta al pasar trasversalmente todo el array:
<table>
<tr>
<td>-15</td>
<td>-7</td>
<td>20</td>
<td>1</td>
<td>-22</td>
<td>35</td>
<td>55</td>
</tr>
</table><file_sep>/Sort Algorithms/SelectionSort/Selection Sort Teoria.md
# Selection Sort Teoria - O(n^2)
- O(n^2) - Quadratic
- Seleccion el mayor elemento y lo pone al final del array
- No requierer tanto swap como Bubble Sort
- - Es Unstable Sort, porque selecciona el primer elemento mas grande (sin respetar el orden de los duplicados)
## Pasos:
- Un for inicial para recorrer cada elemento
- Segundo for para rercorrer todo el array y agarrar el elemento mas grande
- Cambiar el elemento mas grande por la ultima posicion
Array Inicial:
<table>
<tr>
<td>20</td>
<td>35</td>
<td>-15</td>
<td>7</td>
<td><b>55</b></td>
<td>1</td>
<td>-22</td>
</tr>
</table>
Array Luego de que el maximo elemento este en la ultima posicion:
<table>
<tr>
<td>20</td>
<td><b>35</b></td>
<td>-15</td>
<td>7</td>
<td>-22</td>
<td>1</td>
<td>55</td>
</tr>
</table>
Segunda vuelta al pasar trasversalmente todo el array:
<table>
<tr>
<td>20</td>
<td>1</td>
<td>-15</td>
<td>7</td>
<td>-22</td>
<td><b>35</b></td>
<td><b>55</b></td>
</tr>
</table><file_sep>/Sort Algorithms/CountingSort/Counting Sort.md
# Counting Sort
- O(n) - asumimos los elementos que hay que ordenar
- NOT in-place algorithm -> Crece el uso de memoria
- No es estable, si queremos que sea estable hay que hacer mas pasos
- No hace comparasiones
- Cuenta las ocurrencias encontradas en numero
- No funciona con numeros negativos, numeros flotantes o strings
- Los valores deben tener un rango especifico
## Array Inicial:
<table>
<tr>
<td>2</td>
<td>5</td>
<td>9</td>
<td>8</td>
<td>2</td>
<td>8</td>
<td>7</td>
<td>10</td>
<td>4</td>
<td>3</td>
</tr>
</table>
## Array al contar inicial:
- i = 0 -> Se pone 1 en 2, porque el primer elemento del array es 2, entonces se suma 1
<table>
<tr>
<td>0 (Este representa 1)</td>
<td>1 (Este representa 2)</td>
<td>0 (Este representa 3)</td>
<td>0 (Este representa 4)</td>
<td>0 (Este representa 5)</td>
<td>0 (Este representa 6)</td>
<td>0 (Este representa 7)</td>
<td>0 (Este representa 8)</td>
<td>0 (Este representa 9)</td>
<td>0 (Este representa 10)</td>
</tr>
</table>
## Nuestro array inicial dividido:
<img src="https://i.imgur.com/nJObrps.png">
<file_sep>/Sort Algorithms/ShellSort/Shell Sort Teoria.md
# Shell Sort - O(n^2)
- O(n^2) - Quadratic - En el peor caso
- Es la version mejorada de Insertion Sort
- Trata de mejorar las veces que se cambia un elemento por otro
- Cada vez que progresa necesita menos tiempo
- No intercambia con el elemento vecino hasta que el gap es muy chico
- Usa un gap value para ordenar
- Es Unstable Sort porque compara elementos lejanos entre si
- Lo ordena preliminarmente y va achicando el gap
## Gap Value
- Hay distintos gap value, uno usado es el Knuth Sequence
- Gap = (3^k - 1) / 2
- k = longitud del array
<table>
<tr>
<th>k</th>
<th>gap (interval)</th>
</tr>
<tr>
<td>1</td>
<td>1</td>
</tr>
<tr>
<td>2</td>
<td>4</td>
</tr>
<tr>
<td>3</td>
<td>13</td>
</tr>
<tr>
<td>4</td>
<td>40</td>
</tr>
<tr>
<td>5</td>
<td>121</td>
</tr>
</table>
## Pasos (sin usar knuth sequence):
- g = array.length / 2
- Se divide el gap por 2 en cada iteraccion
- Para nuesro array el gap empieza en 3
1. Array inicial:
<table>
<tr>
<td><b>20</b></td>
<td>35</td>
<td>-15</td>
<td>7</td>
<td>55</td>
<td>1</td>
<td>-22</td>
</tr>
</table>
- i = gap = 3
- j = i = 3
- newElement = intArray[i] = 7
- compara -> intArray[j - gap ] con el newElement
2. 7 < 20 -> Entonces lo intercambia
<table>
<tr>
<td><b>7</b></td>
<td>35</td>
<td>-15</td>
<td><b>20</b></td>
<td>55</td>
<td>1</td>
<td>-22</td>
</tr>
</table>
- i = gap = 3
- j = j - gap = 0
- newElement = intArray[i] = 7
3. 35 < 55 -> Esta a 3 elementos de el, no se hace nada
- i 5
- j = i = 5
- newElement = intArray[i] = i
- Compara intArray[j - gap] con el newElement
<table>
<tr>
<td>7</td>
<td><b>35</b></td>
<td>-15</td>
<td>20</td>
<td><b>55</b></td>
<td>1</td>
<td>-22</td>
</tr>
</table>
4. Compara -15 < 1, no hace nada
<table>
<tr>
<td>7</td>
<td>35</td>
<td><b>-15</b></td>
<td>20</td>
<td>55</td>
<td>1</td>
<td>-22</td>
</tr>
</table>
5. 22 < 20 -> Lo intercambia
<table>
<tr>
<td>7</td>
<td>35</td>
<td>-15</td>
<td><b>-22</b></td>
<td>55</td>
<td>1</td>
<td><b>20</b></td>
</tr>
</table>
6. -22 < 7 -> Lo intercambia, no hay mas para comparar
<table>
<tr>
<td><b>-22</b></td>
<td>35</td>
<td>-15</td>
<td><b>7</b></td>
<td>55</td>
<td>1</td>
<td><b>20</b></td>
</tr>
</table>
7. Aca termina la primera iteracion con gap = 3, ahora gap se divide en dos -> gap = 3 / 2 = 1
8. El array ahora esta mas ordenado, "preliminary work". Hace menos intercambios<file_sep>/Sort Algorithms/InsertSort/Insertion Sort Teoria.md
# Insertion Sort - O(n^2)
- O(n^2) - Quadratic
- Es Stable Sort por <=
- 100 pasos para ordenar 10 elementos.
- No se necesita otro array, particiona el mismo array
## Pasos:
- Separa el array en una parte ordenada y otra sin ordenar
- Va de derecha (parte desordenada) a izquierda (parte ordenada)
- Reemplaza los valores y compara el elemento actual con todos los ordenados hasta ubicarlo en la parte del array que esta ordenado
Array Inicial: El 20 ya esta ordenado
<table>
<tr>
<td><b>20</b></td>
<td>35</td>
<td>-15</td>
<td>7</td>
<td>55</td>
<td>1</td>
<td>-22</td>
</tr>
</table>
2. Ahora compara el -15 <= 35 ? Si, entonces pone el 35 donde estaba el -15 y guarda el -15 en una variable temporal
<table>
<tr>
<td></b>20</b></td>
<td><b>35</b></td>
<td><b>35</b></td>
<td>7</td>
<td>55</td>
<td>1</td>
<td>-22</td>
</tr>
</table>
3. se compara luego la variable temporal (-15) con el 20, es -15 <= 20? Si, entonces reemplaza el elemento por el 20
<table>
<tr>
<td>20</td>
<td><b>20</b></td>
<td>35</td>
<td>7</td>
<td>55</td>
<td>1</td>
<td>-22</td>
</tr>
</table>
4. El -15 ya llego al lugar minimo posible
<table>
<tr>
<td>20</td>
<td><b>20</b></td>
<td>35</td>
<td>7</td>
<td>55</td>
<td>1</td>
<td>-22</td>
</tr>
</table>
5. Estos serian los ordenados en este momento, ahora pasa a comparar 7 con la parte ordenada del array para ubicarlo.. etc.
<table>
<tr>
<td><b>15</b></td>
<td><b>20</b></td>
<td><b>35</b></td>
<td>7</td>
<td>55</td>
<td>1</td>
<td>-22</td>
</tr>
</table><file_sep>/Sort Algorithms/RadixSort/Radix Sort.md
# Radix Sort
- O(n) - puede correr mas lento que O(n log n) porque interviene muchas operaciones
- in-place algorithm -> Crece el uso de memoria
- Es stable algorithm
- Se suele usar counting sort para el radix sort
- Asume datos del array que ordena
- La informacion de ambos array edbe ser el mismo radix y width
- Solo puede ser integers o strings
- Se ordena en base a cada elemento individual, digito o posicion de la letra
- Empieza en la posicion mas derecha
- Se debe usar un stable sort para cada fase
## Ejemplo 1er ordenamiento del array:
- 1330 va primero porque tiene 0 en la posicion inicial (a la derecha de todo)
- Luego viene 8792 porque tiene un 2 al final, y asi con cada uno
- Se evalua el ultimo digito de cada elemento, y se ordena en base a este
<img src="https://i.imgur.com/0Jpo4FF.png">
## Ejemplo 2do ordenamiento del array:
- Ahora se evalua el anteultimo digito, 4725 va primero porque aparece primero y porque tiene un 2 como anteultimo digito
- Debe ser usado un stable sort
<img src="https://i.imgur.com/OWgFWvE.png">
## Ejemplo 3er ordenamiento del array:
- Lo mismo que antes pero el segundo elemento ordenado de menor a mayor
- En el caso de 4725 seria 7.. etc
<img src="https://i.imgur.com/pOQAtn7.png">
## Ejemplo 4to ordenamiento del array:
- Lo mismo que antes pero el primer elemento
- En el caso de 1330 es un 1
<img src="https://i.imgur.com/wG2Vvsd.png"><file_sep>/Sort Algorithms/ShellSort/src/insertsort/InsertSort.java
package insertsort;
public class InsertSort {
public static void main(String[] args) {
int[] intArray = { 20, 35, -15, 7, 55, 1, -22 };
for( int firstUnsortedIndex = 1; firstUnsortedIndex < intArray.length; firstUnsortedIndex++ ){
int newElement = intArray[firstUnsortedIndex]; // Agarramos el elemento
int i;
// El elemento busca en donde posicionarse
for ( i = firstUnsortedIndex; i > 0 && intArray[i - 1] > newElement; i--) { // Izquierda a derecha recorre
// Cuando llegamos al frente del array y es menor al elemento i - 1
// Encontramos la posicion donde colocar el elemento
intArray[i] = intArray[i - 1]; // Izquierda a derecha insertamos
}
intArray[i] = newElement; // Ahora pasamos el elemento a la posicion donde deberia ir (si no se mueve i queda donde esta)
}
// Mostramos el array
imprimirArray(intArray);
}
public static void imprimirArray(int[] array) {
for ( int i = 0; i < array.length; i++ ) {
System.out.println(array[i]);
}
}
}
<file_sep>/Sort Algorithms/RadixSort/Stable Counting Sort.md
# Stable Couting Sort
- Requiere pasos extras
- Se puede calcular si dos elementos son iguales, cual va primero
- Se puede usar con radix sort
- Los valores duplicados van a un array temporal donde se escriben de derecha a izquierda<file_sep>/Sort Algorithms/Recursion/Recursion - Factorial Allgorithm.md
# Factorial Algorithm
Pasos:
1. num = 0, factorial es 1... Sino, sigue con los demas pasos
2. mutiplier = 1
3. factorial = 1
4. Si mutiplier != num, hace el paso 5 y 6
5. factorial = factorial * multiplier
6. multiplier++
7. Tenemos el resultado<file_sep>/Sort Algorithms/QuickSort/QuickSort.md
# Merge Sort
- O(n log n) - base 2.
- NOT in-place algorithm -> Crece el uso de memoria
- Divide y venceras (Algoritmo)
- Usa recursive algorithm
- 2 fases: Splitting y merging
- Spliting: no crea un nuevo array, es logica. La fase splitting hace que el ordenamiento sea mas rapido cuando se hace la fase merging
## Splitting Phase
- Empieza con un array desordenado
- Divide el array en 2, los cuales estan desordenados. El primer array es el de la izq y el segundo el de la derecha.
- Vuelve a dividir el primer y segundo array en 2
- Sigue dividiendo hasta que cada array tiene un elemento - estos array estan ordenados
## Merging Phase
- Mergea cada par izq/der en los array en los hermanos que estan en un array ordenado
- Despues del primer merge, tenemos 2 elementos ordenados
- Despues mergea esos 2 ordenados con otros 2, y hace 4 elementos ordenados
- Repite hasta que tiene un solo array ordenado
- Usa array temporales
## Ejemplos:
<table>
<tr>
<td>20</td>
<td>-35</td>
<td>-15</td>
<td>7</td>
<td>55</td>
<td>1</td>
<td>-22</td>
</tr>
</table>
- Start = 0, end = 7 (array.length)
- puntoMedio = (start + end) / 2 = 3
- Elemento 0 a 2 va a ser izq, y elemento 3 a 6 va a ser derecho (negro)
<table>
<tr>
<td>20</td>
<td>-35</td>
<td>-15</td>
<td><b>7</b></td>
<td><b>55</b></td>
<td><b>1</b></td>
<td><b>-22</b></td>
</tr>
</table>
- Ahora se dividen en hermanos, los hermanos son los colores parecidos
- 35 y -15 son hermanos izq/der array
- 7 y 55 son hermanos izq/der array
- 1 y -22 son hermanos izq/der array
- Los demas izq/der estan ordenamos
<table>
<tr>
<td style="background: #5DADE2">20</td>
<td style="background: #0E6655">-35</td>
<td style="background: #196F3D">-15</td>
<td style="background: #B7950B"><b>7</b></td>
<td style="background: #B9770E"><b>55</b></td>
<td style="background: #909497"><b>1</b></td>
<td style="background: #B3B6B7"><b>-22</b></td>
</tr>
</table>
## Nuestro array inicial dividido:
<img src="https://i.imgur.com/pMDlsEO.png">
## Merging process
- Se repite el proceso hasta que todos los elementos estan en 2 arrays
- El array temporal contiene todo lo mergeado y ordenado
- Copiamos lo temporal al original con las posiciones correctas
- Si el array izq esta en la posicion x de y, y el array der esta en la posicion y + 1 de z, entonces lo copiamos, la posicion x a z va a ser ordenada en el array original
### Como empezamos:
- Se mergea los 2 hermanos de la izq (35 y -15)
- Creamos un array temporal de 2 elementos
- i = 1 y j = 2
- Comparamos array[i] con array[j]. -15 < 35, entonces copiamos en el array original
- Copiamos 35 en el array original
- array temporal = {-15, 35}
## Asi quedaria las comparaciones del merging:
<img src="https://i.imgur.com/d3Vbq53.png"><file_sep>/Arrays and Big O Notation/Big O Notation.md
# Big O Notation
## Algoritmo
1. Agarrar el jarro de azucar
2. Agarrar la cuchara
3. Sacar azucar del jarron
4. Poner azucar en el te
5. Repetir paso 3 y 4
- Los pasos 3 y 4 se repiten n veces.
<table style="width:100%">
<tr>
<th>Cantidad de azucar</th>
<th>Pasos requeridos</th>
</tr>
<tr>
<td>1</td>
<td>4</td>
</tr>
<tr>
<td>2</td>
<td>6</td>
</tr>
<tr>
<td>3</td>
<td>8</td>
</tr>
<tr>
<td>4</td>
<td>10</td>
</tr>
</table>
- Cantidad de azucar = n
- Cantidad de pasos totales = 2n + 2
- n crece segun la cantidad de pasos necesarios
- "2" en 2n y "+2" se mantienen constantes, no afectan al tiempo. El valor que determina cuanto crece es n.
- Tiempo de computo es O(n) -> Hablado en ingles es O of..
- Tiempo de computo lineal
## Funciones segun tiempo
- De menor tiempo a mayor tiempo de computo.
- Constante es el que menos tiempo lleva, y el mejor caso posible, mientras que cuadratica lleva mucho mas tiempo de computo.
- El eje X es la cantidad de elementos en el algoritmo y el eje Y es el tiempo.
<table style="width:100%">
<tr>
<th>Big-O</th>
<th></th>
</tr>
<tr>
<td>O(1)</td>
<td>Constant</td>
</tr>
<tr>
<td>O(log n)</td>
<td>Logarithmic</td>
</tr>
<tr>
<td>O(n)</td>
<td>Linear</td>
</tr>
<tr>
<td>O(n log n)</td>
<td>n log-star n</td>
</tr>
<tr>
<td>O(n^2)</td>
<td>Quadratic</td>
</tr>
</table>
<img src="https://upload.wikimedia.org/wikipedia/commons/thumb/7/7e/Comparison_computational_complexity.svg/512px-Comparison_computational_complexity.svg.png" alt="funciones">
## Arrays
<table style="width:100%">
<tr>
<th>Operacion</th>
<th>Time Complexity</th>
</tr>
<tr>
<td>Conseguir un dato, tengo el index</td>
<td>O(1) - Constante</td>
</tr>
<tr>
<td>Conseguir un dato, no tengo el index</td>
<td>O(n) - Linear time</td>
</tr>
<tr>
<td>Añadir un elemento a un array lleno</td>
<td>O(n)</td>
</tr>
<tr>
<td>Añandir un elmento al final del array (tiene espacio)</td>
<td>O(1)</td>
</tr>
<tr>
<td>Insertar o actualizar un elemento de un index especifico</td>
<td>O(1)</td>
</tr>
<tr>
<td>Borrar un elemento seteandolo como null</td>
<td>O(1)</td>
</tr>
<tr>
<td>Borrando un elemento al cambiar elementos</td>
<td>O(n)</td>
</tr>
</table>
|
1fc454810659c02a5db3e7fde8e10be56b3d73b7
|
[
"Markdown",
"Java"
] | 12
|
Markdown
|
dkippes/DataStructures-Algorithms-Study
|
ec778fae00f292ed0f9d718735d811de98707b00
|
29da3bbee844b46f7526d48fc0d36f62851cad50
|
refs/heads/master
|
<file_sep>## Caching the inverse of a matrix. If the inverse of matrix has been calculated,
## the cached result will be returned. Otherwise, the inverse will be calculated
## and the result will be cached.
### makeCacheMatrix is a list of function to
### set the value of the matrix,
### get the value of the matrix,
### set the value of the inverse matrix,
### get the value of the inverse matrix
makeCacheMatrix <- function(x = matrix()) {
invx <- NULL
set <- function(y){
x<<-y
invx <<- NULL
}
get <- function() x
setInv <- function(inverse) invx<<-inverse
getInv <- function() invx
list(set = set, get = get,
setInv = setInv,
getInv = getInv)
}
## cacheSolve calculates the inverse of the matrix created with the
## makeCacheMatrix. It first check if the inverse has been calculated.
## if yes, it will retrive the inverse from cached value. Otherwise
## it will calculated the inverse using R:solve and cached the result
cacheSolve <- function(x, ...) {
## Return a matrix that is the inverse of 'x'
invx <- x$getInv()
if(!is.null(invx)) {
message("getting cached data")
return(invx)
}
data <- x$get()
invx <- solve(data, ...)
x$setInv(invx)
invx
}
|
e66c9a2192840924f13569c355a1e62e25f7bbca
|
[
"R"
] | 1
|
R
|
kk134/ProgrammingAssignment2
|
f9ec945dadadd8f393cc04e593a74979706c5637
|
86505fe7642eebae0ec97588e40ea3f55d84bc78
|
refs/heads/master
|
<repo_name>AidanWolfheart/PeasOutGamePrototype<file_sep>/PeasOut/View/Headers/Texture.h
#if !defined(_TEXTURE_H)
#define _TEXTURE_H
#include "../../glew/include/GL/glew.h"
#include <unordered_map>
//#include "tgaload.h"
#define GET(Type, MemberName, FaceName) \
Type Get##FaceName() const { \
return MemberName; \
}
#define SET(Type, MemberName, FaceName) \
void Set##FaceName(const Type &value) { \
MemberName = value; \
}
class Texture {
private:
public:
Texture();
std::unordered_map<std::string, GLuint> textures;
};
#endif //_TEXTURE_H
<file_sep>/PeasOut/Object/Headers/World.h
#if !defined(_WORLD_H)
#define _WORLD_H
#include "../Object/Headers/Entity.h"
#include <vector>
class World {
private:
int numberOfEntities = 0;
std::vector<Entity> entities;
public:
World();
inline std::vector<Entity> getEntities();
inline void addEntity(Entity entity);
};
inline std::vector<Entity> World::getEntities()
{
return entities;
}
inline void World::addEntity(Entity entity)
{
entities.push_back(entity);
}
#endif //_WORLD_H
<file_sep>/PeasOut/Object/Headers/Entity.h
#if !defined(_ENTITY_H)
#define _ENTITY_H
#include "../View/Headers/Texture.h"
#include <vector>
#include "../glm/glm/glm.hpp"
#include "../glm/glm/gtc/matrix_transform.hpp"
#include "../glm/glm/gtc/type_ptr.hpp"
#include <iostream>
#include "../../glew/include/GL/glew.h"
#define GET(Type, MemberName, FaceName) \
Type Get##FaceName() const { \
return MemberName; \
}
#define SET(Type, MemberName, FaceName) \
void Set##FaceName(const Type &value) { \
MemberName = value; \
}
class Entity {
public:
//CONSTRUCTORS
Entity();
Entity(glm::vec2 coords, glm::vec2 len, glm::vec2 vel, GLuint texID);
typedef glm::vec2 GridCoordinates;
//Getters
GET(glm::vec2, coordinate, Coordinate)
GET(glm::vec2, length, Length)
GET(std::vector<GridCoordinates>, grids, Grids)
GET(bool, nU, N_up)
GET(bool, nD, N_down)
GET(bool, nL, N_left)
GET(bool, nR, N_right)
GET(GLuint, textureID, TextureID)
GET(bool, destructible, Destructible)
GET(int, _id, ObjID)
SET(glm::vec2, coordinate, Coordinate)
SET(glm::vec2, length, Length)
SET(std::vector<GridCoordinates>, grids, Grids)
SET(bool, nU, N_up)
SET(bool, nD, N_down)
SET(bool, nL, N_left)
SET(bool, nR, N_right)
SET(GLuint, textureID, TextureID)
SET(bool, destructible, Destructible)
friend bool operator==(const Entity &e1, const Entity &e2);
protected:
int _id;
float dt;
bool destructible;
bool nU, nD, nR, nL;
glm::vec2 coordinate, length, velocity;
GLuint textureID;
std::vector<GridCoordinates> grids;
};
#endif //_ENTITY_H
<file_sep>/README.md
# Platformer in OpenGL
---
The platform game should enable the user to control (a) character(s) that move(s) on multiple platforms.
- The following features are essential:
- The game starts at ground level;
- Has at least two levels above ground level;
- Each level has to be made up of at least two distinct (separated) platforms unless interlaced levels (zigzag
from left to right) are used;
- The target position of the platform has to be at the highest (or furthest) level;
- The character(s) can jump from one level to the next both horizontally and vertically;
- The character can collide with the platforms from all possible directions, i.e. it stays on the platform
(obviously) but can also hit its head when jumping upwards;
- A clear objective and scoring system needs to be provided;
- The world should be larger than the screen size which means the background (world) should scroll both
horizontally and vertically if the character(s) threaten to go off the screen out of the user’s view.
- The following features are optional (but desirable):
- The character is animated when walking or jumping (that is, use of different sprites/textures for
different motions);
- NPC’s (non-player characters) try to stop the player’s character(s) from reaching its/their target;
- The character (s) can eliminate NPC’s;
- Have character offspring which follows a main character (e.g. SPROGS example);
- Have moving (transition) platforms to travel from one fixed platform to another at the same (or even
adjacent) level(s).
- Collision Response and physics
- Special effects, dynamic textures, particle systems
- More complex collision detection, e.g. GJK, BB hierarchies
- More advanced AI for NPC’s
- Gravity
- More than one level
- A level editor
- Additional game information
<file_sep>/PeasOut/Object/World.cpp
#include "../Object/Headers/World.h"
World::World() {
}
<file_sep>/PeasOut/View/Texture.cpp
#include "../View/Headers/Texture.h"
Texture::Texture()
{}
<file_sep>/PeasOut/Object/Headers/NPC.h
#if !defined(_NPC_H)
#define _NPC_H
#include "../Object/Headers/Entity.h"
class NPC : public Entity {
private:
bool isAlly;
public:
NPC();
NPC(glm::vec2 coords, glm::vec2 len, glm::vec2 vel, GLuint texID, bool ally);
};
#endif //_NPC_H
<file_sep>/PeasOut/Object/Headers/Player.h
#if !defined(_PLAYER_H)
#define _PLAYER_H
#include "../Object/Headers/Entity.h"
#include <windows.h>
#include <irrklang/irrKlang.h>
using namespace irrklang;
class Player : public Entity {
public:
Player();
Player(glm::vec2 coords, glm::vec2 len, glm::vec2 vel, GLuint texID, float jumpH, int sc, int life, ISoundEngine* sound);
//~Player();
enum JumpState {
ON_GROUND,
JUMPING,
FALLING
};
enum Direction {
UP,
RIGHT,
DOWN,
LEFT,
NONE
};
enum Moving {
MLEFT,
MRIGHT,
OTHER
};
typedef Direction Horizontal;//For horizontal collision (Up or Down)
typedef Direction Vertical;//For vertical collision (Left or Right)
typedef std::pair<Horizontal, Vertical> CollisionSides;
GET(Texture, t, Textures)
SET(Texture, t, Textures)
GET(int, score, Score)
SET(int, score, Score)
GET(int, lives, Lives)
SET(int, lives, Lives)
bool keys[256];
bool checkCollision(Entity &second);
void collisionSide(Entity &e);
void resetCollisions(), processKeys(), moveRight(), moveLeft(), jump(), checkJumpState(float dt);
private:
ISoundEngine* soundEng;
Texture t;
CollisionSides collision;
JumpState jstate;
Moving moving;
float initialCoordY, jumpHeight, seconds_on_ground;
int score, lives;
glm::vec2 initialVelocity;
};
#endif //_PLAYER_H
<file_sep>/PeasOut/Object/Headers/Prop.h
#if !defined(_PROP_H)
#define _PROP_H
#include "../Object/Headers/Entity.h"
class Prop : public Entity {
public:
Prop();
};
#endif //_PROP_H
<file_sep>/PeasOut/View/Headers/Renderer.h
#if !defined(_RENDERER_H)
#define _RENDERER_H
#include "../Object/Headers/Entity.h"
#include "../Object/Headers/Player.h"
#include "../Object/Headers/World.h"
#include "../Core/SpatialHash.h"
#include "Texture.h"
#include <windows.h> // Header file for Windows
//#include <gl\gl.h> // Header file for the OpenGL32 Library
//#include <gl\glu.h> // Header file for the GLu32 Library
#include "../glew/include/GL/glew.h"
#include "../glm/glm/glm.hpp"
#include "../glm/glm/gtc/matrix_transform.hpp"
#include "../glm/glm/gtc/type_ptr.hpp"
#include "FreeType.h"
#pragma once
#define GET(Type, MemberName, FaceName) \
Type Get##FaceName() const { \
return MemberName; \
}
#define SET(Type, MemberName, FaceName) \
void Set##FaceName(const Type &value) { \
MemberName = value; \
}
class Renderer
{
private:
GLfloat targetWidth, targetHeight;
GLfloat heightInfo, widthInfo;
freetype::font_data our_font;
Texture texture;
public:
typedef std::pair<GLdouble, GLdouble> X;
typedef std::pair<GLdouble, GLdouble> Y;
GET(Texture, texture, TextureList)
SET(Texture, texture, TextureList)
GET(GLfloat, targetWidth, TargetWidth)
SET(GLfloat, targetWidth, TargetWidth)
GET(GLfloat, targetHeight, TargetHeight)
SET(GLfloat, targetHeight, TargetHeight)
SET(freetype::font_data, our_font, Font)
Renderer(GLfloat targetW, GLfloat targetH);
void display(Player*, std::vector<Entity>); //called in winmain to draw everything to the screen
std::pair<X,Y> reshape(GLuint width, GLuint height, Player* p); //called when the window is resized
void init(); //called in winmain when the program starts. //called in winmain to update variables
void drawEntity(Entity* entity);
void drawEntity(Entity entity); //draws an Entity (Player, NPC)
void displayMenu(GLuint width, GLuint height, std::vector<Entity> entities, bool beatGame, Player* p);
void drawBackground(GLuint bkgr);
};
#endif
<file_sep>/PeasOut/View/Renderer.cpp
#include "../View/Headers/Renderer.h"
Renderer::Renderer(GLfloat width, GLfloat height) : targetWidth(width), targetHeight(height)
{
}
void Renderer::display(Player* p, std::vector<Entity> entities)
{
glClear(GL_COLOR_BUFFER_BIT);
glLoadIdentity();
drawBackground(texture.textures["background"]);
drawEntity(p);
for (Entity e : entities)
{
drawEntity(e);
}
freetype::print(our_font, 25.0f, heightInfo - 40.0f, "Score: %i", p->GetScore());
freetype::print(our_font, 250.0f, heightInfo - 40.0f, "Lives: %i", p->GetLives());
if(p->GetLives() < 1) freetype::print(our_font, widthInfo / 2.0f, heightInfo / 2.0f, "YOU DIED");
glFlush();
}
void Renderer::displayMenu(GLuint currentWidth, GLuint currentHeight, std::vector<Entity> entities, bool beatGame, Player* p)
{
GLfloat w = (GLfloat)currentWidth / targetWidth;
GLfloat h = (GLfloat)currentHeight / targetHeight;
const float c = 200.0f;
GLdouble camX = 0;
GLdouble camXWidth = 0 + c * w;
GLdouble camY = 0;
GLdouble camYHeight = 0 + c *h;
glViewport(0, 0, currentWidth, currentHeight); // Reset the current viewport
glMatrixMode(GL_PROJECTION); // select the projection matrix stack
glLoadIdentity(); // reset the top of the projection matrix to an identity matrix
gluOrtho2D(0, currentWidth, 0, currentHeight); // set the coordinate system for the window
glMatrixMode(GL_MODELVIEW); // Select the modelview matrix stack
glLoadIdentity(); // Reset the top of the modelview matrix to an identity matrix
glClear(GL_COLOR_BUFFER_BIT);
glLoadIdentity();
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, texture.textures["menubg"]);
//glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glPushMatrix();
glBegin(GL_QUADS);
glTexCoord2f(0.0, 0.0); glVertex2f(0, 0);
glTexCoord2f(0.0, (GLfloat)targetHeight / 512); glVertex2f(0, currentHeight);
glTexCoord2f((GLfloat)targetWidth / 512, (GLfloat)targetHeight / 512); glVertex2f(currentWidth, currentHeight);
glTexCoord2f((GLfloat)targetWidth / 512, 0.0); glVertex2f(currentWidth, 0);
glEnd();
glDisable(GL_TEXTURE_2D);
glPopMatrix();
glColor3f(0.0, 0.0, 0.0);
if (beatGame) freetype::print(our_font, 0.0f, currentHeight - 50.0f, "Congratulations! Your score is %i", p->GetScore());
freetype::print(our_font, 150.0f, 200.0f, "Quit");
freetype::print(our_font, 150.0f, 50.0f, "Play");
for (Entity e : entities)
{
drawEntity(e);
}
glFlush();
}
std::pair<Renderer::X, Renderer::Y> Renderer::reshape(GLuint currentWidth, GLuint currentHeight, Player* p)
{
GLfloat w = (GLfloat)currentWidth / targetWidth;
GLfloat h = (GLfloat) currentHeight/ targetHeight;
const float c = 200.0f;
GLdouble camX = p->GetCoordinate().x - c * w;
GLdouble camXWidth = p->GetCoordinate().x + c * w;
GLdouble camY = p->GetCoordinate().y - c * h;
GLdouble camYHeight = p->GetCoordinate().y + c *h;
heightInfo = currentHeight; widthInfo = currentWidth;
std::pair<X, Y> cam; cam.first.first = camX; cam.first.second = camXWidth; cam.second.first = camY; cam.second.second = camYHeight;
glViewport(0, 0, currentWidth, currentHeight); // Reset the current viewport
glMatrixMode(GL_PROJECTION); // select the projection matrix stack
glLoadIdentity(); // reset the top of the projection matrix to an identity matrix
gluOrtho2D(camX, camXWidth, camY, camYHeight); // set the coordinate system for the window
glMatrixMode(GL_MODELVIEW); // Select the modelview matrix stack
glLoadIdentity(); // Reset the top of the modelview matrix to an identity matrix
return cam;
}
void Renderer::init()
{
glClearColor(0.0, 0.0, 0.0, 0.0); //sets the clear colour to black
//glClear(GL_COLOR_BUFFER_BIT) in the display function
//will clear the buffer to this colour.
}
void Renderer::drawBackground(GLuint bkgr)
{
float edge = 500.0f;
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, bkgr);
//glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glPushMatrix();
glBegin(GL_QUADS);
glTexCoord2f(0.0, 0.0); glVertex2f(-edge, -edge);
glTexCoord2f(0.0,(GLfloat)targetHeight/512); glVertex2f(-edge, edge + targetHeight);
glTexCoord2f((GLfloat)targetWidth/512, (GLfloat)targetHeight / 512); glVertex2f(-edge + targetWidth, edge + targetHeight);
glTexCoord2f((GLfloat)targetWidth/512, 0.0); glVertex2f(edge + targetWidth, -edge);
glEnd();
glDisable(GL_TEXTURE_2D);
glPopMatrix();
}
void Renderer::drawEntity(Entity* entity)
{
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, entity->GetTextureID());
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glPushMatrix();
glTranslatef(entity->GetCoordinate().x, entity->GetCoordinate().y, 0.0);
glBegin(GL_QUADS);
glTexCoord2f(0.0, 0.0); glVertex2f(0.0f, 0.0f);
glTexCoord2f(0.0, 1.0); glVertex2f(0.0f, entity->GetLength().y);
glTexCoord2f(1.0, 1.0); glVertex2f(entity->GetLength().x, entity->GetLength().y);
glTexCoord2f(1.0, 0.0); glVertex2f(entity->GetLength().x, 0.0f);
glEnd();
glDisable(GL_TEXTURE_2D);
glDisable(GL_BLEND);
glPopMatrix();
}
void Renderer::drawEntity(Entity entity)
{
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, entity.GetTextureID());
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glPushMatrix();
glTranslatef(entity.GetCoordinate().x, entity.GetCoordinate().y, 0.0);
glBegin(GL_QUADS);
glTexCoord2f(0.0, 0.0); glVertex2f(0.0f, 0.0f);
glTexCoord2f(0.0, 1.0); glVertex2f(0.0f, entity.GetLength().y);
glTexCoord2f(1.0, 1.0); glVertex2f(entity.GetLength().x, entity.GetLength().y);
glTexCoord2f(1.0, 0.0); glVertex2f(entity.GetLength().x, 0.0f);
glEnd();
glDisable(GL_TEXTURE_2D);
glDisable(GL_BLEND);
glPopMatrix();
}
<file_sep>/PeasOut/Core/Engine.cpp
#include <windows.h>
#include <iostream>
#include <memory>
#include "../glew/include/GL/glew.h"
#include "../View/Headers/Renderer.h"
#include "../Object/Headers/NPC.h"
#include "../Object/Headers/Player.h"
//#include "../Object/Headers/World.h" //Deprecated. Using a Spatial Grid to collect world objects now.
#include "../View/Headers/Texture.h"
#include "tgaload.h"
#include "SpatialHash.h"
#include <fstream>
#include <string>
#include <sstream>
#include <unordered_map>
#include "../glm/glm/glm.hpp"
#include "../glm/glm/gtc/matrix_transform.hpp"
#include "../glm/glm/gtc/type_ptr.hpp"
#include <experimental\filesystem>
#include "../View/Headers/FreeType.h"
#include <irrklang/irrKlang.h>
using namespace irrklang;
#define GAME_NULL_VELOCITY glm::vec2(0.0f, 0.0f)
#define GAME_SQUARE_SIZE glm::vec2(25.0f, 25.0f)
#define GAME_WORLD_WIDTH 2500
#define GAME_WORLD_HEIGHT 2500
#define GAME_JUMP_HEIGHT 70.0f
#define GAME_INITIAL_VELOCITY glm::vec2(15.0f, 18.0f)
#define GAME_PLAYER_SIZE GAME_SQUARE_SIZE
#define GAME_GRID_CELL_SIZE 200
#define GAME_TARGET_WIDTH GLuint(1024)
#define GAME_TARGET_HEIGHT GLuint(1024)
#define GAME_PLAYER_MAX_LIFE 3
#define GAME_STARTING_SCORE 0
#define RANGE(x,y,z) ((x>=y)&&(x<=z))
//List of available levels in the directory of the game (.amap extension)
std::vector<std::string> levels;
//List of accepted tiles that level reading function will convert into tiles
std::vector<int> acceptTiles;
//Set current level to NULL (1 element of array)
int thisLevel = NULL;
//Set current score to NULL
int currentScore = NULL;
//Set current life as maximum available during initialisation
int currentLife = GAME_PLAYER_MAX_LIFE;
//If the last level was finished
bool beatGame = false;
//Specifies if the game is Active, displaying Menu, Setting up or Quitting
enum GameState
{
MENU,
ACTIVE,
SETUP,
QUIT,
};
ISoundEngine *SoundEngine = createIrrKlangDevice();
//Font that is used to text in the game
freetype::font_data font;
//Mouse coordinates
int mouse_x, mouse_y;
//If left mouse button is pressed in menu
bool mouseKeyPressed = false;
//Is the world set up
bool worldSetUp = false;
//Current resolution
GLuint currentWidth = 1024, currentHeight = 768;
//Frequency for delta time
double timerFrequencyRecip = 0.000003;
//Delta time between each update cycle
float deltaT;
//Previous time used to calculate delta time
__int64 prevTime;
//Starting coordinate for player
glm::vec2 startingCoord;
//List of textures
Texture t;
//Initial player object
Player* player = new Player(); GLuint plChar;
//Rendering functions
Renderer renderer(GAME_TARGET_WIDTH, GAME_TARGET_HEIGHT);
//Spatial Grid to collect world objects
SpatialHash grid(GAME_WORLD_WIDTH, GAME_WORLD_HEIGHT, GAME_GRID_CELL_SIZE);
//Entities on screen that are going to be drawn and checked for collision
std::vector<Entity> collected;
//Player perspective to get Entities on screen
std::pair<Renderer::X, Renderer::Y> cam;
//Whether the user is playing, in Menu or wants to Quit
GameState stateOfThisGame;
//World world; //Deprecated.
/*--------------------------Game functions------------------------------------------------------*/
//Processes player, setting proper life and score after an event trigger
void processPlayer();
//Checks if current tile code is an acceptable tile
bool checkTileCode(int tileCode);
//Switch to next level when portal is entered
void nextLevel();
//Gets all ".amap" extension files in current directory of the game executable
void getMaps();
//Loads Entity textures
void loadTextures();
//Checks if the game is in Menu, if user wants to Quit or is the game Active
void checkGameState();
//Not used in code. Generates a map template.
void generateMapFile();
void setCollisionFlags(int things[99][99], Entity &e, int i, int j);
//Simulates the delta of time for each update cycle
double timeSimulation();
//Checks for collisions of Entities that are displayed on the screen
void doCollisions();
//External keyboard processing (non-player movement, e.g. reset game)
void processKeys_external();
//Game update cycle to process objects on screen, keyboard, resolve collision, check jumping
void update();
//Renders the objects on screen
void render(int width, int height);
//Function to populate the game world, adding objects to grid
void populateWorld(int selectedLevel);
// Declaration For WndProc
LRESULT CALLBACK WndProc(HWND, UINT, WPARAM, LPARAM);
// releases and destroys the window
void KillGLWindow();
//creates the window
bool CreateGLWindow(char* title, int width, int height);
// Win32 main function
int WINAPI WinMain(HINSTANCE, HINSTANCE, LPSTR, int);
/*------------------------------------------------------------------------------------------*/
/*Win32 global variables:
-----------------------------*/
// Private GDI Device Context
HDC hDC = NULL;
// Permanent Rendering Context
HGLRC hRC = NULL;
// Holds Our Window Handle
HWND hWnd = NULL;
// Holds The Instance Of The Application
HINSTANCE hInstance;
/*--------------------------*/
void checkGameState()
{
if (stateOfThisGame == GameState::SETUP)
{
thisLevel = 0;
currentScore = GAME_STARTING_SCORE;
currentLife = GAME_PLAYER_MAX_LIFE;
grid.clear();
delete(player);
player = new Player(glm::vec2(0.0f, 80.0f), GAME_PLAYER_SIZE, GAME_INITIAL_VELOCITY, plChar, GAME_JUMP_HEIGHT, NULL, GAME_PLAYER_MAX_LIFE, SoundEngine);
std::cout << "Player created." << std::endl;
player->SetTextures(t);
std::cout << "Player texture list set." << std::endl;
std::cout << "Populating world." << std::endl;
populateWorld(thisLevel);
stateOfThisGame = GameState::ACTIVE;
worldSetUp = true;
}
else if (stateOfThisGame == GameState::ACTIVE)
{
if (SoundEngine->isCurrentlyPlaying("audio/gw2.mp3"))
{
SoundEngine->stopAllSounds();
}
if (!SoundEngine->isCurrentlyPlaying("audio/game.mp3")) SoundEngine->play2D("audio/game.mp3");
update();
}
else
{
if (SoundEngine->isCurrentlyPlaying("audio/game.mp3")) SoundEngine->stopAllSounds();
Entity start = Entity(glm::vec2(25.0f, 25.0f), glm::vec2(100.0f, 100.0f), GAME_NULL_VELOCITY, t.textures["start"]);
Entity quit = Entity(glm::vec2(25.0f, 160.0f), glm::vec2(100.0f, 100.0f), GAME_NULL_VELOCITY, t.textures["quit"]);
Entity cursor = Entity(glm::vec2(mouse_x, mouse_y), glm::vec2(5.0f, 5.0f), GAME_NULL_VELOCITY, t.textures["cursor"]);
std::vector<Entity> buttonslist;
buttonslist.push_back(start);
buttonslist.push_back(quit);
buttonslist.push_back(cursor);
std::vector<std::pair<Entity, int>> buttons;
buttons.push_back({ start,0 });
buttons.push_back({ quit, 1 });
for (auto e : buttons)
{
bool on_x = cursor.GetCoordinate().x + cursor.GetLength().x >= e.first.GetCoordinate().x && e.first.GetCoordinate().x + e.first.GetLength().x >= cursor.GetCoordinate().x;
bool on_y = cursor.GetCoordinate().y + cursor.GetLength().y >= e.first.GetCoordinate().y && e.first.GetCoordinate().y + e.first.GetLength().y >= cursor.GetCoordinate().y;
if (on_x && on_y)
{
if (mouseKeyPressed)
{
if (e.second == 0)
{
if (!worldSetUp)
{
stateOfThisGame = GameState::SETUP;
beatGame = false;
}
else stateOfThisGame = GameState::ACTIVE;
}
if (e.second == 1) stateOfThisGame = GameState::QUIT;
}
break;
}
}
renderer.displayMenu(currentWidth, currentHeight, buttonslist, beatGame, player);
}
}
void processPlayer()
{
currentScore = player->GetScore();
currentLife = player->GetLives() - 1;
delete(player);
player = new Player(startingCoord, GAME_PLAYER_SIZE, GAME_INITIAL_VELOCITY, plChar, GAME_JUMP_HEIGHT, currentScore, currentLife, SoundEngine);
player->SetTextures(t);
}
void update()
{
collected = grid.collect(cam.first.first, cam.first.second, cam.second.first, cam.second.second);
double dt = timeSimulation();
if (currentLife > 0)
{
processKeys_external();
doCollisions(); //Collision detection
player->processKeys(); //Process keyboard
player->checkJumpState(dt); //Check if player is jumping/falling/on-ground
}
//Sets Perspective GL Screen in respect to player coordinates
cam = renderer.reshape(currentWidth, currentHeight, player);
renderer.display(player, collected); // Draw the scene of objects near player
const float padding = 100.0f;
if (player->GetCoordinate().x < -padding
|| player->GetCoordinate().y < -padding
|| player->GetCoordinate().x > GAME_WORLD_WIDTH + padding
|| player->GetCoordinate().y > GAME_WORLD_HEIGHT + padding)
{
processPlayer();
std::cout << "Player fell." << std::endl;
}
}
void loadTextures()
{
std::cout << "Loading textures." << std::endl;
t.textures["pChar"] = tgaLoadAndBind("player.tga", TGA_ALPHA); plChar = tgaLoadAndBind("player.tga", TGA_ALPHA);
t.textures["pCharL"] = tgaLoadAndBind("player_left.tga", TGA_ALPHA);
t.textures["pCharLU"] = tgaLoadAndBind("player_leftUp.tga", TGA_ALPHA);
t.textures["pCharLD"] = tgaLoadAndBind("player_leftDown.tga", TGA_ALPHA);
t.textures["pCharR"] = tgaLoadAndBind("player_right.tga", TGA_ALPHA);
t.textures["pCharRU"] = tgaLoadAndBind("player_rightUp.tga", TGA_ALPHA);
t.textures["pCharRD"] = tgaLoadAndBind("player_rightDown.tga", TGA_ALPHA);
t.textures["pCharU"] = tgaLoadAndBind("player_up.tga", TGA_ALPHA);
t.textures["pCharD"] = tgaLoadAndBind("player_down.tga", TGA_ALPHA);
t.textures["grass"] = tgaLoadAndBind("grass.tga", TGA_ALPHA);
t.textures["dirt"] = tgaLoadAndBind("dirt.tga", TGA_ALPHA);
t.textures["start"] = tgaLoadAndBind("start.tga", TGA_ALPHA);
t.textures["quit"] = tgaLoadAndBind("quit.tga", TGA_ALPHA);
t.textures["pea"] = tgaLoadAndBind("pea.tga", TGA_ALPHA);
t.textures["background"] = tgaLoadAndBind("background.tga", TGA_ALPHA);
t.textures["menubg"] = tgaLoadAndBind("menubg.tga", TGA_ALPHA);
t.textures["portal"] = tgaLoadAndBind("portal.tga", TGA_ALPHA);
t.textures["cursor"] = tgaLoadAndBind("cursor.tga", TGA_ALPHA);
std::cout << "Textures loaded." << std::endl;
renderer.SetTextureList(t);
}
void processKeys_external()
{
if (player->keys[0x52])
{
delete(player);
player = new Player(startingCoord, GAME_PLAYER_SIZE, GAME_INITIAL_VELOCITY, plChar, GAME_JUMP_HEIGHT, currentScore, currentLife, SoundEngine);
grid.clear();
populateWorld(thisLevel);
player->SetTextures(t);
std::cout << "Player reset." << std::endl;
}
}
void render(int width, int height)
{
cam = renderer.reshape(width, height, player); // Set Up Our Perspective GL Screen
renderer.init();
}
void setCollisionFlags(int things[99][99], Entity& e, int i, int j )
{
if (things[i + 1][j] == 1 || things[i + 1][j] == 2)
e.SetN_up(true);
if (things[i - 1][j] == 1 || things[i - 1][j] == 2)
e.SetN_down(true);
if (things[i][j + 1] == 1 || things[i][j + 1] == 2)
e.SetN_right(true);
if (things[i][j - 1] == 1 || things[i][j - 1] == 2)
e.SetN_left(true);
}
void doCollisions()
{
player->resetCollisions(); //Each update cycle resets collision to none
for(Entity& e : collected) //For each entity on screen
{
if (player->checkCollision(e)) //AABB collision detection
{
if (e.GetDestructible() == true)
{
player->SetScore(player->GetScore() + 1);
grid.remove(e); break;
}
if (e.GetTextureID() == t.textures["portal"])
{
SoundEngine->play2D("audio/portal.wav");
thisLevel++;
nextLevel();
break;
}
player->collisionSide(e); //Check for the side of collision
}
}
}
void nextLevel()
{
if (thisLevel < levels.size())
{
currentScore = player->GetScore();
grid.clear();
populateWorld(thisLevel);
}
else
{
worldSetUp = false;
beatGame = true;
stateOfThisGame = GameState::MENU;
grid.clear();
}
}
void generateMapFile()
{
using namespace std;
ofstream out;
out.open("level_template.amap", fstream::out);
for (int i = 0; i <= GAME_WORLD_HEIGHT/25; i++)
{
for (int j = 0; j <= GAME_WORLD_WIDTH/25; j++)
{
out << "0 ";
}
out << endl;
}
}
bool checkTileCode(int tileCode)
{
bool accept = false;
for (auto i : acceptTiles)
{
if (tileCode == i)
{
accept = true;
break;
}
}
return accept;
}
void getMaps()
{
char buffer[MAX_PATH];
GetModuleFileName(NULL, buffer, MAX_PATH);
std::string::size_type pos = std::string(buffer).find_last_of("\\/");
auto thisdir = std::string(buffer).substr(0, pos);
for (const auto& p : std::experimental::filesystem::directory_iterator(thisdir))
{
if (p.path().filename().extension().string() == ".amap")
{
auto s = thisdir + "\\" + p.path().filename().string();
levels.push_back(s);
}
}
}
void populateWorld(int selectedLevel)
{
int row = 0, col = 0;
int tileCode;
std::string line;
std::string levelName;
levelName = levels[selectedLevel];
std::cout << "Loading " << levelName << "." << std::endl;
std::ifstream fstream(levelName);
std::vector<std::vector<GLuint>> tileData;
acceptTiles = { 1,2,7,5,9 };
int things[99][99];
if (fstream)
{
while (std::getline(fstream, line)) // Read each line from level
{
col = 0;
std::istringstream sstream(line);
while (sstream >> tileCode)
{
if (checkTileCode(tileCode))
{
things[100-row][col] = tileCode;
}
col++;
}
row++;
}
}
for (int i = 0; i < 99; i++)
{
for (int j = 0; j < 99; j++)
{
if (things[i][j] == 1)
{
Entity e(glm::vec2(j*25.0f, i*25.0f), glm::vec2(25.0f, 25.0f),
glm::vec2(0.08f, 0.033f), t.textures["dirt"]);
setCollisionFlags(things, e, i, j);
grid.add(e);
}
if (things[i][j] == 2)
{
Entity e(glm::vec2(j*25.0f, i*25.0f), glm::vec2(25.0f, 25.0f),
glm::vec2(0.08f, 0.033f), t.textures["grass"]);
setCollisionFlags(things, e, i, j);
grid.add(e);
}
if (things[i][j] == 9)
{
Entity e(glm::vec2((j*25.0f)+5.5f, (i*25.0f)+5.5f), glm::vec2(17.0f, 17.0f),
glm::vec2(0.08f, 0.033f), t.textures["portal"]);
setCollisionFlags(things, e, i, j);
grid.add(e);
}
if (things[i][j] == 7)
{
Entity e(glm::vec2((j*25.0f)+12.5f, (i*25.0f)+12.5f), glm::vec2(10.0f, 10.0f),
glm::vec2(0.08f, 0.033f), t.textures["pea"]);
setCollisionFlags(things, e, i, j);
e.SetDestructible(true);
grid.add(e);
}
if (things[i][j] == 5)
{
player->SetCoordinate(glm::vec2(j*25.0f, i*25.0f));
startingCoord = glm::vec2(j*25.0f, i*25.0f);
std::cout << "Starting player coordinate set." << std::endl;
}
}
}
}
double timeSimulation()
{
// Get the current time
LARGE_INTEGER t;
QueryPerformanceCounter(&t);
__int64 currentTime = t.QuadPart;
__int64 ticksElapsed = currentTime - prevTime; // Ticks elapsed since the previous time step
double deltaT = double(ticksElapsed) * timerFrequencyRecip; // Convert to second
//cout << ticksElapsed << " " << deltaT << endl;
// Advance timer
prevTime = currentTime; // use the current time as the previous time in the next step
return deltaT;
}
/******************* WIN32 FUNCTIONS ***************************/
int WINAPI WinMain(HINSTANCE hInstance, // Instance
HINSTANCE hPrevInstance, // Previous Instance
LPSTR lpCmdLine, // Command Line Parameters
int nCmdShow) // Window Show State
{
MSG msg; // Windows Message Structure
bool done = false; // Bool Variable To Exit Loop
AllocConsole();
FILE *stream;
freopen_s(&stream, "CONOUT$", "w", stdout);
// Create Our OpenGL Window
if (!CreateGLWindow("Peas Out! A Legend of Lost Peas", currentWidth, currentHeight))
{
return 0; // Quit If Window Was Not Created
}
prevTime = 0;
while (!done) // Loop That Runs While done=FALSE
{
if (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) // Is There A Message Waiting?
{
if (msg.message == WM_QUIT) // Have We Received A Quit Message?
{
done = true; // If So done=TRUE
}
else // If Not, Deal With Window Messages
{
TranslateMessage(&msg); // Translate The Message
DispatchMessage(&msg); // Dispatch The Message
}
}
else // If There Are No Messages
{
if (player->keys[VK_ESCAPE]) stateOfThisGame = GameState::MENU;
if (stateOfThisGame == GameState::QUIT) done = true;
checkGameState();
SwapBuffers(hDC); // Swap Buffers (Double Buffering)
}
}
// Shutdown
KillGLWindow(); // Kill The Window
return (int)(msg.wParam); // Exit The Program
}
//WIN32 Processes function - useful for responding to user inputs or other events.
LRESULT CALLBACK WndProc(HWND hWnd, // Handle For This Window
UINT uMsg, // Message For This Window
WPARAM wParam, // Additional Message Information
LPARAM lParam) // Additional Message Information
{
switch (uMsg) // Check For Windows Messages
{
case WM_CLOSE: // Did We Receive A Close Message?
{
PostQuitMessage(404); // Send A Quit Message
return 0; // Jump Back
}
break;
case WM_SIZE: // Resize The OpenGL Window
{
renderer.reshape(LOWORD(lParam), HIWORD(lParam), player); // LoWord=Width, HiWord=Height
currentWidth = LOWORD(lParam);
currentHeight = HIWORD(lParam);
return 0; // Jump Back
}
break;
case WM_LBUTTONDOWN:
{
mouseKeyPressed = true;
}
break;
case WM_LBUTTONUP:
{
mouseKeyPressed = false;
}
break;
case WM_MOUSEMOVE:
{
if (stateOfThisGame == MENU)
{
mouse_x = LOWORD(lParam);
mouse_y = currentHeight - HIWORD(lParam);
}
//LeftPressed = true;
}
break;
case WM_KEYDOWN: // Is A Key Being Held Down?
{
player->keys[wParam] = true; // If So, Mark It As TRUE
if (player->GetLives() < 1 && stateOfThisGame == GameState::ACTIVE)
stateOfThisGame = GameState::SETUP;
if (player->keys[VK_F11])
{
}
return 0; // Jump Back
}
break;
case WM_KEYUP: // Has A Key Been Released?
{
player->keys[wParam] = false; // If So, Mark It As FALSE
return 0; // Jump Back
}
break;
}
// Pass All Unhandled Messages To DefWindowProc
return DefWindowProc(hWnd, uMsg, wParam, lParam);
}
void KillGLWindow() // Properly Kill The Window
{
if (hRC) // Do We Have A Rendering Context?
{
if (!wglMakeCurrent(NULL, NULL)) // Are We Able To Release The DC And RC Contexts?
{
MessageBox(NULL, "Release Of DC And RC Failed.", "SHUTDOWN ERROR", MB_OK | MB_ICONINFORMATION);
}
if (!wglDeleteContext(hRC)) // Are We Able To Delete The RC?
{
MessageBox(NULL, "Release Rendering Context Failed.", "SHUTDOWN ERROR", MB_OK | MB_ICONINFORMATION);
}
hRC = NULL; // Set RC To NULL
}
if (hDC && !ReleaseDC(hWnd, hDC)) // Are We Able To Release The DC
{
MessageBox(NULL, "Release Device Context Failed.", "SHUTDOWN ERROR", MB_OK | MB_ICONINFORMATION);
hDC = NULL; // Set DC To NULL
}
if (hWnd && !DestroyWindow(hWnd)) // Are We Able To Destroy The Window?
{
MessageBox(NULL, "Could Not Release hWnd.", "SHUTDOWN ERROR", MB_OK | MB_ICONINFORMATION);
hWnd = NULL; // Set hWnd To NULL
}
if (!UnregisterClass("OpenGL", hInstance)) // Are We Able To Unregister Class
{
MessageBox(NULL, "Could Not Unregister Class.", "SHUTDOWN ERROR", MB_OK | MB_ICONINFORMATION);
hInstance = NULL; // Set hInstance To NULL
}
font.clean();
}
/* This Code Creates Our OpenGL Window. Parameters Are: *
* title - Title To Appear At The Top Of The Window *
* width - Width Of The GL Window Or Fullscreen Mode *
* height - Height Of The GL Window Or Fullscreen Mode */
bool CreateGLWindow(char* title, int width, int height)
{
GLuint PixelFormat; // Holds The Results After Searching For A Match
WNDCLASS wc; // Windows Class Structure
DWORD dwExStyle; // Window Extended Style
DWORD dwStyle; // Window Style
RECT WindowRect; // Grabs Rectangle Upper Left / Lower Right Values
WindowRect.left = (long)0; // Set Left Value To 0
WindowRect.right = (long)width; // Set Right Value To Requested Width
WindowRect.top = (long)0; // Set Top Value To 0
WindowRect.bottom = (long)height; // Set Bottom Value To Requested Height
hInstance = GetModuleHandle(NULL); // Grab An Instance For Our Window
wc.style = CS_HREDRAW | CS_VREDRAW | CS_OWNDC; // Redraw On Size, And Own DC For Window.
wc.lpfnWndProc = (WNDPROC)WndProc; // WndProc Handles Messages
wc.cbClsExtra = 0; // No Extra Window Data
wc.cbWndExtra = 0; // No Extra Window Data
wc.hInstance = hInstance; // Set The Instance
wc.hIcon = LoadIcon(NULL, IDI_WINLOGO); // Load The Default Icon
wc.hCursor = LoadCursor(NULL, IDC_ARROW); // Load The Arrow Pointer
wc.hbrBackground = NULL; // No Background Required For GL
wc.lpszMenuName = NULL; // We Don't Want A Menu
wc.lpszClassName = "OpenGL"; // Set The Class Name
if (!RegisterClass(&wc)) // Attempt To Register The Window Class
{
MessageBox(NULL, "Failed To Register The Window Class.", "ERROR", MB_OK | MB_ICONEXCLAMATION);
return false; // Return FALSE
}
dwExStyle = WS_EX_APPWINDOW | WS_EX_WINDOWEDGE; // Window Extended Style
dwStyle = WS_OVERLAPPEDWINDOW; // Windows Style
AdjustWindowRectEx(&WindowRect, dwStyle, FALSE, dwExStyle); // Adjust Window To True Requested Size
// Create The Window
if (!(hWnd = CreateWindowEx(dwExStyle, // Extended Style For The Window
"OpenGL", // Class Name
title, // Window Title
dwStyle | // Defined Window Style
WS_CLIPSIBLINGS | // Required Window Style
WS_CLIPCHILDREN, // Required Window Style
0, 0, // Window Position
WindowRect.right - WindowRect.left, // Calculate Window Width
WindowRect.bottom - WindowRect.top, // Calculate Window Height
NULL, // No Parent Window
NULL, // No Menu
hInstance, // Instance
NULL))) // Dont Pass Anything To WM_CREATE
{
KillGLWindow(); // Reset The Display
MessageBox(NULL, "Window Creation Error.", "ERROR", MB_OK | MB_ICONEXCLAMATION);
return false; // Return FALSE
}
static PIXELFORMATDESCRIPTOR pfd = // pfd Tells Windows How We Want Things To Be
{
sizeof(PIXELFORMATDESCRIPTOR), // Size Of This Pixel Format Descriptor
1, // Version Number
PFD_DRAW_TO_WINDOW | // Format Must Support Window
PFD_SUPPORT_OPENGL | // Format Must Support OpenGL
PFD_DOUBLEBUFFER, // Must Support Double Buffering
PFD_TYPE_RGBA, // Request An RGBA Format
24, // Select Our Color Depth
0, 0, 0, 0, 0, 0, // Color Bits Ignored
0, // No Alpha Buffer
0, // Shift Bit Ignored
0, // No Accumulation Buffer
0, 0, 0, 0, // Accumulation Bits Ignored
24, // 24Bit Z-Buffer (Depth Buffer)
0, // No Stencil Buffer
0, // No Auxiliary Buffer
PFD_MAIN_PLANE, // Main Drawing Layer
0, // Reserved
0, 0, 0 // Layer Masks Ignored
};
if (!(hDC = GetDC(hWnd))) // Did We Get A Device Context?
{
KillGLWindow(); // Reset The Display
MessageBox(NULL, "Can't Create A GL Device Context.", "ERROR", MB_OK | MB_ICONEXCLAMATION);
return false; // Return FALSE
}
if (!(PixelFormat = ChoosePixelFormat(hDC, &pfd))) // Did Windows Find A Matching Pixel Format?
{
KillGLWindow(); // Reset The Display
MessageBox(NULL, "Can't Find A Suitable PixelFormat.", "ERROR", MB_OK | MB_ICONEXCLAMATION);
return false; // Return FALSE
}
if (!SetPixelFormat(hDC, PixelFormat, &pfd)) // Are We Able To Set The Pixel Format?
{
KillGLWindow(); // Reset The Display
MessageBox(NULL, "Can't Set The PixelFormat.", "ERROR", MB_OK | MB_ICONEXCLAMATION);
return false; // Return FALSE
}
if (!(hRC = wglCreateContext(hDC))) // Are We Able To Get A Rendering Context?
{
KillGLWindow(); // Reset The Display
MessageBox(NULL, "Can't Create A GL Rendering Context.", "ERROR", MB_OK | MB_ICONEXCLAMATION);
return false; // Return FALSE
}
if (!wglMakeCurrent(hDC, hRC)) // Try To Activate The Rendering Context
{
KillGLWindow(); // Reset The Display
MessageBox(NULL, "Can't Activate The GL Rendering Context.", "ERROR", MB_OK | MB_ICONEXCLAMATION);
return false; // Return FALSE
}
ShowWindow(hWnd, SW_SHOW); // Show The Window
SetForegroundWindow(hWnd); // Slightly Higher Priority
SetFocus(hWnd); // Sets Keyboard Focus To The Window
stateOfThisGame = GameState::MENU;
std::cout << "Initial screen rendering." << std::endl;
render(width, height);
loadTextures();
font.init("arialbd.ttf", 36);
renderer.SetFont(font);
getMaps();
SoundEngine->setSoundVolume(0.5f);
SoundEngine->play2D("audio/gw2.mp3", GL_TRUE);
//SoundEngine->play2D("audio/gw2.mp3", GL_TRUE);
std::cout << RANGE(2, 1, 10) << std::endl;
return true; // Success
}
<file_sep>/PeasOut/Core/SpatialHash.h
#if !defined(_SPATIALHASH_H)
#define _SPATIALHASH_H
#include <unordered_map>
#include <map>
#include "../Object/Headers/Entity.h"
#include <vector>
#include <algorithm>
#include "../glm/glm/glm.hpp"
#include "../glm/glm/gtc/matrix_transform.hpp"
#include "../glm/glm/gtc/type_ptr.hpp"
#include <memory>
#define GET(Type, MemberName, FaceName) \
Type Get##FaceName() const { \
return MemberName; \
}
#define SET(Type, MemberName, FaceName) \
void Set##FaceName(const Type &value) { \
MemberName = value; \
}
struct SimpleHash {
size_t operator()(const glm::vec2& p) const {
return (unsigned int)p.x ^ (unsigned int)p.y;
}
};
class SpatialHash {
public:
typedef glm::vec2 GridCoordinates;
typedef std::vector<Entity> Entities;
typedef std::unordered_map<GridCoordinates, Entities, SimpleHash> Grid;
//CONSTRUCTORS
SpatialHash();
SpatialHash(float wWidth, float wHeight, int cellSize);
void add(Entity &e), remove(Entity e), clear();
std::vector<Entity> collect(float camX, float camY, float camXWidth, float camYHeigh);
private:
Grid grid;
const int CELL_SIZE;
float camX, camY, camXWidth, camYHeight, worldWidth, worldHeight;
};
#endif //_SPATIALHASH_H<file_sep>/PeasOut/Object/NPC.cpp
#include "../Object/Headers/NPC.h"
NPC::NPC() : Entity(), isAlly(false)
{ }
NPC::NPC(glm::vec2 coords, glm::vec2 len, glm::vec2 vel, GLuint texID,
bool ally)
: Entity(coords, len, vel, texID), isAlly(ally)
{ }
<file_sep>/PeasOut/Object/Prop.cpp
#include "../Object/Headers/Prop.h"
Prop::Prop()
{
}
<file_sep>/PeasOut/Object/Player.cpp
#include "../Object/Headers/Player.h"
#include <iostream>
Player::Player() : Entity(), jumpHeight(0.0f), jstate(JumpState::FALLING), collision(Direction::NONE, Direction::NONE), initialVelocity(velocity),
seconds_on_ground(0), moving(Moving::OTHER), score(0), lives(0)
{ }
Player::Player(glm::vec2 coords, glm::vec2 len, glm::vec2 vel, GLuint texID,
float jumpH, int sc, int life, ISoundEngine* sound)
: Entity(coords, len, vel, texID), jstate(JumpState::FALLING), jumpHeight(jumpH), initialVelocity(velocity), collision(Direction::NONE, Direction::NONE),
seconds_on_ground(0), moving(Moving::OTHER), score(sc), lives(life), soundEng(sound)
{ }
void Player::processKeys()
{
moving = Moving::OTHER;
if (keys[VK_UP] || keys[0x57])
{
jump();
}
if (keys[VK_DOWN] || keys[0x53])
{
}
if (keys[VK_LEFT] || keys[0x41])
{
moveLeft();
}
if (keys[VK_RIGHT] || keys[0x44])
{
moveRight();
}
}
bool Player::checkCollision(Entity &e)
{
bool on_x = coordinate.x + length.x >= e.GetCoordinate().x && e.GetCoordinate().x + e.GetLength().x >= coordinate.x;
bool on_y = coordinate.y + length.y >= e.GetCoordinate().y && e.GetCoordinate().y + e.GetLength().y >= coordinate.y;
return on_x && on_y;
}
void Player::resetCollisions()
{
collision = Player::CollisionSides(Player::Direction::NONE, Player::Direction::NONE);
}
void Player::collisionSide(Entity &e)
{
glm::vec2 pHalf(length.x / 2, length.y / 2);
glm::vec2 pCenter(coordinate.x + pHalf.x, coordinate.y + pHalf.y);
glm::vec2 eHalf(e.GetLength().x / 2, e.GetLength().y / 2);
glm::vec2 eCenter(e.GetCoordinate().x + eHalf.x, e.GetCoordinate().y + eHalf.y);
glm::vec2 difference = pCenter - eCenter;
glm::vec2 clamped = glm::clamp(difference, -eHalf, eHalf);
glm::vec2 closest = eCenter + clamped;
difference = pCenter - closest;
glm::vec2 directions[] = {
glm::vec2(0.0f, 1.0f), // up
glm::vec2(1.0f, 0.0f), // right
glm::vec2(0.0f, -1.0f), // down
glm::vec2(-1.0f, 0.0f) // left
};
float max = 0.0f;
int best_match = 5;
for (int i = 0; i < 4; i++)
{
float dot_product = glm::dot(glm::normalize(difference), directions[i]);
if (dot_product > max)
{
max = dot_product;
best_match = i;
}
}
switch (best_match)
{
case 0:
if(!e.GetN_up())
collision.second = (Direction)best_match;
break;
case 2:
if (!e.GetN_down())
collision.second = (Direction)best_match;
break;
case 1:
if (!e.GetN_right())
collision.first = (Direction)best_match;
break;
case 3:
if (!e.GetN_left())
collision.first = (Direction)best_match;
break;
}
}
void Player::moveRight() {
if (collision.first != Direction::LEFT)
{
moving = Moving::MRIGHT;
if (jstate == JUMPING) textureID = t.textures["pCharRU"];
else if (jstate == FALLING) textureID = t.textures["pCharRD"];
else textureID = t.textures["pCharR"];
coordinate.x += velocity.x*dt;
}
}
void Player::moveLeft() {
if (collision.first != Direction::RIGHT)
{
moving = Moving::MLEFT;
if (jstate == JUMPING) textureID = t.textures["pCharLU"];
else if (jstate == FALLING) textureID = t.textures["pCharLD"];
else textureID = t.textures["pCharL"];
coordinate.x -= velocity.x*dt;
}
}
void Player::jump()
{
if (jstate == JumpState::ON_GROUND)
{
initialCoordY = coordinate.y;
//this->state = JumpState::JUMPING;
jstate = JUMPING;
}
}
void Player::checkJumpState(float dt)
{
this->dt = dt;
if (velocity.y > 50.0f) velocity.y = 50.0f;
if (dt > 0.15f) dt = 0.15f;
float g = -9.81;
float v_old, c_old, fc;
double ndt;
if (collision.second == UP)
{
initialCoordY = coordinate.y;
//velocity.y = initialVelocity.y;
}
v_old = velocity.y;
c_old = coordinate.y;
switch (jstate)
{
case FALLING:
/*if(moving == Moving::MLEFT)
textureID = t.textures["pCharL"];
else if (moving == Moving::MRIGHT)
textureID = t.textures["pCharR"];
else
textureID = t.textures["pCharD"];*/
if (moving == Moving::OTHER) textureID = t.textures["pCharD"];
if (collision.second == DOWN)
v_old = -initialVelocity.y;
velocity.y = v_old + g*dt;
coordinate.y = c_old + ((v_old + velocity.y) / 2)*dt; // Use improved Euler Integration
if (coordinate.y <= initialCoordY)
{
if (collision.second == NONE)
{
jstate = FALLING;
}
else
{
jstate = ON_GROUND;
coordinate.y = initialCoordY;
velocity.y = -velocity.y;
}
}
break;
case JUMPING:
if (!soundEng->isCurrentlyPlaying("audio/jump.wav")) soundEng->play2D("audio/jump.wav");
if (moving == Moving::OTHER) textureID = t.textures["pCharU"];
velocity.y = v_old + g*dt; // Reintegrate
if (velocity.y < 0)
jstate = FALLING;
if (collision.second == DOWN)
{
jstate = FALLING;
//velocity.y = initialVelocity.y;
//velocity.y = -velocity.y;
break;
}
coordinate.y = coordinate.y + (((v_old + velocity.y) / 1.4)*dt);
if (coordinate.y > initialCoordY + jumpHeight)
{
if (collision.second == DOWN)
{
jstate = FALLING;
velocity.y = initialVelocity.y;
velocity.y = -velocity.y;
break;
}
jstate = FALLING;
velocity.y = 15.0f;
}
break;
case ON_GROUND:
if (moving == Moving::OTHER) textureID = t.textures["pChar"];
if (collision.second == UP) { //check if on ground for more than second, then change velocity
coordinate.y = initialCoordY;
if (seconds_on_ground > 0.5)
{
velocity.y = initialVelocity.y;
seconds_on_ground = 0.0f;
break;
}
seconds_on_ground += dt;
break;
}
else
{
jstate = FALLING;
velocity.y = initialVelocity.y;
break;
}
}
moving == Moving::OTHER;
}
<file_sep>/PeasOut/Object/Entity.cpp
#include "../Object/Headers/Entity.h"
Entity::Entity()
: coordinate(0, 0), length(1, 1), velocity(0.0f),
textureID(0), dt(0.0f), destructible(false)
{
static int id = 0;
_id = id++;
}
Entity::Entity(glm::vec2 coords, glm::vec2 len, glm::vec2 vel,
GLuint texID)
: coordinate(coords), length(len), velocity(vel), textureID(texID), dt(0.0f) , destructible(false)
{
static int id = 0;
_id = id++;
}
bool operator==(const Entity &e1, const Entity &e2)
{
return e1.GetObjID() == e2.GetObjID();
}
<file_sep>/PeasOut/Core/SpatialHash.cpp
#include "SpatialHash.h"
SpatialHash::SpatialHash() : CELL_SIZE(60)
{}
SpatialHash::SpatialHash(float wWidth, float wHeight, int cellSize) : CELL_SIZE(cellSize), worldWidth(wWidth), worldHeight(wHeight)
{}
void SpatialHash::add(Entity &e)
{
int x = (int)e.GetCoordinate().x / CELL_SIZE;
int y = (int)e.GetCoordinate().y / CELL_SIZE;
int xL = (int)(e.GetCoordinate().x + e.GetLength().x) / CELL_SIZE;
int yL = (int)(e.GetCoordinate().y + e.GetLength().y) / CELL_SIZE;
GridCoordinates coords0(x, y);
GridCoordinates coords1(x, yL);
GridCoordinates coords2(xL, yL);
GridCoordinates coords3(xL, y);
std::vector<GridCoordinates> gridcoords;
for (int newx = x; newx <= xL; newx++)
{
for (int newy = y; newy <= yL; newy++)
{
gridcoords.push_back(GridCoordinates(newx, newy));
}
}
e.SetGrids(gridcoords);
for (int newx=x;newx <= xL; newx++)
{
for (int newy = y; newy <= yL; newy++)
{
grid[GridCoordinates(newx, newy)].push_back(e);
}
}
}
void SpatialHash::clear()
{
for (auto cell : grid)
{
auto bucket = cell.second;
bucket.clear();
}
grid = Grid();
}
void SpatialHash::remove(Entity e)
{
std::vector<GridCoordinates> gridcoords = e.GetGrids();
for (auto coords : gridcoords)
{
grid[coords].erase(std::remove(grid[coords].begin(), grid[coords].end(), e), grid[coords].end());
}
}
std::vector<Entity> SpatialHash::collect(float camX, float camXWidth, float camY, float camYHeight)
{
const int c = 150; //Padding so that objects don't pop
int x = ((int)camX -c)/ CELL_SIZE; if (x < 0) x = 0;
int y = ((int)camY -c) / CELL_SIZE; if (y < 0) y = 0;
int xL = ((int)camXWidth +c)/ CELL_SIZE; if (xL < 0) xL = 0;
int yL = ((int)camYHeight +c) / CELL_SIZE; if (yL < 0) yL = 0;
std::vector<Entity> objects;
for (int newx=x;newx <= xL;newx++)
{
for(int newy=y;newy<=yL;newy++)
{
auto object = grid[GridCoordinates(newx, newy)];
objects.insert(objects.end(),object.begin(),object.end());
}
}
return objects;
}
|
465a78c1074232701a7b81d4aa8d3b658b50e797
|
[
"Markdown",
"C++"
] | 18
|
C++
|
AidanWolfheart/PeasOutGamePrototype
|
5533e3b11e20d5359c441fc4179f02fa7ef33c95
|
d2d95bd9a218fdd280e3996ca75bff113146730c
|
refs/heads/master
|
<file_sep>module github.com/MaxBreida/otc-gobs
go 1.13
require (
github.com/minio/minio-go/v6 v6.0.44
golang.org/x/crypto v0.6.0 // indirect
)
<file_sep>package s3
import (
"fmt"
"io"
"net/url"
netUrl "net/url"
"strings"
"sync"
"time"
"github.com/minio/minio-go/v6"
)
const (
ContentTypeJSON = "application/json"
ContentTypePDF = "application/pdf"
ContentTypePNG = "image/png"
ContentTypeJPEG = "image/jpeg"
)
type Service interface {
AddLifeCycleRule(ruleId, folderPath string, daysToExpiry int) error
UploadFile(path, contentType string, data io.Reader, objectSize *int64) error
GetFileUrl(path string, expiration time.Duration) (*url.URL, error)
UploadJSONFileWithLink(path string, data io.Reader, linkExpiration time.Duration) (*url.URL, error)
DownloadFile(path, localPath string) error
DownloadDirectory(path, localPath string) error
DownloadFileBytes(path string) ([]byte, error)
RemoveFile(path string) error
}
type service struct {
s3Client *minio.Client
lifeCycleRules string
bucketName string
urlValues url.Values
}
func NewService(url, accessKey, accessSecret, bucketName string) (Service, error) {
s3Client, err := minio.New(url, accessKey, accessSecret, true)
if err != nil {
return nil, err
}
exists, err := s3Client.BucketExists(bucketName)
if err != nil {
return nil, err
}
if !exists {
return nil, fmt.Errorf("s3 bucket required for service (%s) doesn't exist", bucketName)
}
urlValues := make(netUrl.Values)
urlValues.Set("response-content-disposition", "inline")
return &service{
s3Client: s3Client,
lifeCycleRules: "",
bucketName: bucketName,
urlValues: urlValues,
}, nil
}
func (s *service) AddLifeCycleRule(ruleId, folderPath string, daysToExpiry int) error {
if !strings.HasSuffix(folderPath, "/") {
folderPath = folderPath + "/"
}
lifeCycleString := fmt.Sprintf(
`<LifecycleConfiguration><Rule><ID>%s</ID><Prefix>%s</Prefix><Status>Enabled</Status><Expiration><Days>%d</Days></Expiration></Rule></LifecycleConfiguration>`,
ruleId, folderPath, daysToExpiry)
return s.s3Client.SetBucketLifecycle(s.bucketName, lifeCycleString)
}
func (s *service) UploadFile(path, contentType string, data io.Reader, objectSize *int64) error {
size := int64(-1)
if objectSize != nil {
size = *objectSize
}
_, err := s.s3Client.PutObject(s.bucketName, path, data, size, minio.PutObjectOptions{ContentType: contentType})
return err
}
func (s *service) GetFileUrl(path string, expiration time.Duration) (*url.URL, error) {
return s.s3Client.PresignedGetObject(s.bucketName, path, expiration, s.urlValues)
}
func (s *service) UploadJSONFileWithLink(path string, data io.Reader, linkExpiration time.Duration) (*url.URL, error) {
_, err := s.s3Client.PutObject(s.bucketName, path, data, -1, minio.PutObjectOptions{ContentType: "application/json"})
if err != nil {
return nil, err
}
return s.s3Client.PresignedGetObject(s.bucketName, path, 24*time.Hour, s.urlValues)
}
func (s *service) DownloadDirectory(path, localPath string) error {
doneCh := make(chan struct{})
defer close(doneCh)
objectCh := s.s3Client.ListObjectsV2(s.bucketName, path, true, doneCh)
wg := sync.WaitGroup{}
errCh := make(chan error)
for obj := range objectCh {
if obj.Err != nil {
return obj.Err
}
wg.Add(1)
go func(obj minio.ObjectInfo, errChan chan<- error) {
fileName := strings.TrimPrefix(obj.Key, path+"/")
err := s.DownloadFile(obj.Key, localPath+"/"+fileName)
if err != nil {
errCh <- err
}
wg.Done()
}(obj, errCh)
}
wg.Wait()
close(errCh)
errs := []error{}
for err := range errCh {
errs = append(errs, err)
}
if len(errs) > 0 {
return fmt.Errorf("Failed to download files from s3: %v", errs)
}
return nil
}
func (s *service) DownloadFile(path, localPath string) error {
return s.s3Client.FGetObject(s.bucketName, path, localPath, minio.GetObjectOptions{})
}
func (s *service) DownloadFileBytes(path string) ([]byte, error) {
object, err := s.s3Client.GetObject(s.bucketName, path, minio.GetObjectOptions{})
if err != nil {
return nil, err
}
defer object.Close()
fileInfo, err := object.Stat()
if err != nil {
return nil, err
}
buffer := make([]byte, fileInfo.Size)
_, err = object.Read(buffer)
if err != nil {
if err != io.EOF {
return nil, err
}
}
return buffer, nil
}
func (s *service) RemoveFile(path string) error {
return s.s3Client.RemoveObject(s.bucketName, path)
}
<file_sep># otc-gobs
golang OTC OBS sdk
|
889e119d71847dc9db88c0adad84abc45a227a47
|
[
"Go",
"Go Module",
"Markdown"
] | 3
|
Go Module
|
MaxBreida/otc-gobs
|
8b48ed3fc503ebff557ce90d429f18f1527d7ec0
|
52a26225f3db69a2130b047628b4de6d7c14475a
|
refs/heads/master
|
<file_sep>## Project designed to learn/improve skills in the languages/frameworks/tools used.
---
#### Developed with:
- docker (host redis)
- redis (save sync info cache)
- node js (api)
- neo4j graph database
#### Which is?
API that generates a neo4j database consuming the twitter API, with the intention of showing the followers / following of all users of the network.
The idea in the future is to use to identify bots and their origins.<file_sep>'use strict';
const neo4jService = require('../services/neo4j-service');
exports.get = (req, res) => {
neo4jService.checkUserAlreadyExists(req.params.user).then(exists => {
console.log(exists);
});
res.json();
}
exports.post = (req, res) => {
Promise.all([
neo4jService.createUser('lnq_bot'),
neo4jService.createUser('haga2112')
]).then(result => {
neo4jService.createNode('lnq_bot', 'haga2112').then(test => {
console.log(test);
res.json();
});
});
}<file_sep>const env = {
redis: {
address: "192.168.99.100"
},
neo4j: {
address: "bolt://localhost:7687",
user: "neo4j",
password: "<PASSWORD>"
},
twitter: {
api_key: "<KEY>",
secret_key: "<KEY>",
url: {
sync: "https://api.twitter.com/1.1/followers/list.json",
login: "https://api.twitter.com/oauth2/token"
}
}
}
module.exports = env;<file_sep>'use strict';
const neo4j = require('neo4j-driver').v1,
redis = require('redis'),
env = require('../config/environment');
class connectorFactory {
static get neo4jDriver() {
if (!this._neo4jDriver) {
this.loadNeo4j();
}
return this._neo4jDriver;
}
static set neo4jDriver(driver) {
this._neo4jDriver = driver;
}
static loadNeo4j() {
this.neo4jDriver = neo4j.driver(env.neo4j.address, neo4j.auth.basic(env.neo4j.user, env.neo4j.password));
}
static get redisDriver() {
if (!this._redisDriver) {
this.loadRedis();
}
return this._redisDriver;
}
static set redisDriver(driver) {
this._redisDriver = driver;
}
static loadRedis() {
const rd = redis.createClient({
host: env.redis.address
});
rd.on('connect', () => {
console.log('redis connected');
});
this.redisDriver = rd;
}
}
module.exports = connectorFactory;<file_sep>'use strict';
const loginService = require('../services/login-service'),
syncService = require('../services/sync-service');
exports.get = (req, res) => {
loginService.checkAuthorizationAlreadyExists().then(async authorize => {
if (!authorize) {
console.log('genereting token');
await loginService.login();
}
loginService.getToken().then(token => {
syncService.start(token, req.params.user);
});
});
res.json();
};
exports.post = (req, res) => {
// req.app.locals.client.set('synchronized-users', `["lnq_bot", "haga2112"]`);
res.json();
}
<file_sep>'use strict';
module.exports = (app) => {
const syncController = require('../controllers/sync-controller'),
userController = require('../controllers/user-controller');
// todoList Routes
app.route('/sync/:user')
.get(syncController.get);
app.route('/sync')
.post(syncController.post);
app.route('/user/:user')
.get(userController.get)
.post(userController.post);
};
|
6168dc8e0d3c1ca1314630d21c8063d4cf2dbfc3
|
[
"Markdown",
"JavaScript"
] | 6
|
Markdown
|
llinq/twitter4j
|
793851944d3c24b7e700d8fea3d428c7f3b76321
|
306ebb5358c83a4250fab14f8f2b025131626e1a
|
refs/heads/master
|
<file_sep>#!/bin/sh
export NOM="toto"
env | grep NOM && echo "My fake env var is set"
<file_sep># CI-CD
First look of GitHub Actions

|
8a9411629d8acab2b40e336bd36f71164b55358a
|
[
"Markdown",
"Shell"
] | 2
|
Shell
|
MiguelYann/CI-CD
|
a878b6256029caf84204384d61d77c9cd06e26bd
|
7c6e71d9aa8644c023f698df7f55d1c3b761bb7c
|
refs/heads/master
|
<repo_name>megmut/servator<file_sep>/lib/third-party/twilio.ts
/**
* Twilio API Reference: https://www.twilio.com/docs/libraries/node
*/
export default class TwilioAPI {
constructor(monitor) {
this.monitor = monitor;
this.accountSid = '{{ account_sid }}'; // Your Account SID from www.twilio.com/console
this.authToken = '{{ auth_token }}'; // Your Auth Token from www.twilio.com/console
this.twilio = require('twilio');
this.client = new twilio.RestClient(accountSid, authToken);
}
/**
* Send message using the Twilio API
* @param {number} from
* @param {number} to
* @param {string} body
*/
sendMessage(from, to, body) {
this.client.messages.create({
body: body,
to: to,
from: from
}, function(err, message) {
if(err) {
console.info('There was an issue with your Twilio request\n', err.message);
} else {
// do something with the successful request
}
});
}
}<file_sep>/bin/frontend/routes/api/root.d.ts
declare var _default: (monitor: any) => any;
export default _default;
<file_sep>/bin/db/dbManager.d.ts
/// <reference path="idbSettings.d.ts" />
export default class DatabaseManager {
private core;
private _settings;
private _models;
private _client;
private _connection;
/**
* Database Manager Class Constructor
*
* @param server {Object} reference to the server class
*/
constructor(core: any, callback: any);
/**
* Attempt to make a connection to the database
*/
connect(callback: any): void;
/**
* Disconnect from the database
*/
disconnect(): void;
/**
* Listen for mongoose callbacks
*/
handleCallbacks(callback: any): void;
readonly models: any;
readonly connection: any;
}
<file_sep>/bin/monitor/monitor.d.ts
/// <reference path="iStatus.d.ts" />
/// <reference path="iMonitorSettings.d.ts" />
export default class Monitor {
status: iStatus;
settings: iMonitorSettings;
private core;
private _controller;
private _id;
private _handle;
private _interval;
constructor(core: any, config: any);
/**
* Initalise the monitor.
*/
init(): void;
/**
* Start the monitor timer.
*/
start(): void;
/**
* Stop the timer, removing the setInterval callback and changing the paused status to true
*/
stop(): void;
/**
* initaliser function to handle the requests
*/
handleRequestProcess(): void;
/**
* Return true or false if this monitor contains the tag
*
* @param {string} tag tag string to search for
*
* @returns {boolean}
*/
checkTag(tag: any): boolean;
id: number;
}
<file_sep>/lib/frontend/frontend.ts
import Core from './../index';
import * as http from 'http';
import * as express from 'express';
import * as cors from 'cors';
import bodyParser from 'body-parser';
import * as fs from 'fs';
import API from './routes/api/root';
import Dashboard from './routes/dashboard/root';
import * as moment from 'moment';
export default class Frontend {
private core:Core;
private _config:Object;
protected _app:express;
protected _listner:any;
constructor(core:Core, cb:Function) {
// hold reference for monitor class
this.core = core;
this._config = this.core.config['API'];
this.setup(cb);
}
setup(cb) {
this._app = express();
this._app.server = http.createServer(this._app);
// 3rd party middleware
this._app.use(cors({
exposedHeaders: ["Link"]
}));
// api router
this._app.use('/api', API(this.core));
this._app.use('/dashboard', Dashboard(this.core));
this._listner = this._app.server.listen(process.env.PORT || this._config['port'], () => {
//this.monitor.logger.service('Express server started. Listening on port: ' + this.config.port, moment().format());
cb();
});
}
}<file_sep>/bin/third-party/slack.js
/**
* Find the slack api calls here: https://api.slack.com/
*/
"use strict";
var slack_node_1 = require("slack-node");
var request_1 = require("request");
var moment = require("moment");
/**
* Class for making API calls to the Slack Public API
*/
var SlackAPI = (function () {
function SlackAPI(core) {
this.core = core; // only here if parent callback is wanted
this._uri = '<KEY>';
this._username = 'Servator Bot';
this._slack = new slack_node_1.default();
// try to make a connection using the uri webhook string
try {
this._slack.setWebhook(this._uri);
}
catch (e) {
console.log('Could not connect to slack webhook\n', e);
}
}
/**
* @param {string} channel preceeded by #, used to dictate which chanel in the slack webhook to post to
* @param {string} msg the body of the message to post
* @param {string} type emoji type for the bot icon in the slack chanel
*/
SlackAPI.prototype.postMessage = function (channel, msg, type) {
var _this = this;
if (type === void 0) { type = ':warning:'; }
try {
// Make a new request
request_1.default.post(this._uri, { json: { "channel": channel, "username": this._username, "text": msg, "icon_emoji": type } }, function (err, response, body) {
// if okay 200 response
if (!err && response.statusCode == 200) {
_this.core.logger.service('Slack API Post Message: ' + '{"channel": channel, "username": this.username, "text": msg, "icon_emoji": type}', moment().format());
}
else {
_this.core.logger.error('Error in Slack: ' + err, moment().format());
}
});
}
catch (err) {
// log the error
this.core.logger.error('Error in Slack: ' + err, moment().format());
}
};
return SlackAPI;
}());
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = SlackAPI;
<file_sep>/lib/third-party/email.ts
/**
* Documentation for nodemailer found here: https://github.com/nodemailer/nodemailer
*/
var nodemailer = require('nodemailer');
import Core from './../index';
import * as moment from 'moment';
import * as fs from 'fs';
export default class Email {
protected core:Core;
private _config:Object;
private _transporter;
constructor(monitor, callback) {
// keep a reference to the monitor class
this.core = monitor
// get config file and parse it to a javascript object
this._config = JSON.parse(fs.readFileSync('./config/email.conf.json', 'utf8'));
// setup a new transporter
try {
this._transporter = nodemailer.createTransport(this._config['settings']);
// send an error log rqust
let msg = 'Email transporter initalized';
let time = moment().format();
callback();
this.core.logger.service(msg, time);
} catch (err) {
// send an error log rqust
let msg = 'Error in Email: ' + err;
let time = moment().format();
this.core.logger.error(msg, time);
}
}
/**
* Send an email
*
* @param {string} to single or multiple email recripients in single string format separated by commas.
* @param {string} subject of the html
* @param {Object} body containing a plain and html version of the email. Can be custom or pre-build with the builder tools.
*/
send(to, subject, body) {
var mailOptions = {
from: 'who its from',
to: to,
subject: subject,
text: body.plain,
html: body.html
};
// send mail with defined transport object
try {
this._transporter.sendMail(mailOptions, (error, info) => {
if(error){
// send an error log request
let msg = 'Error in Email: ' + error;
let time = moment().format();
this.core.logger.error(msg, time);
} else {
// send a service log request
let msg = 'Message sent: ' + info.response;
let time = moment().format();
this.core.logger.service(msg, time);
};
});
} catch(err) {
// send an error log rqust
let msg = 'Error in Email: ' + err;
let time = moment().format();
this.core.logger.error(msg, time);
}
}
/**
* Builds a report email. Returns an object that the send function accepts
*/
buildReport() {
return {
plain: '',
html: ''
}
}
/**
* Builds an error email template. Returns an object that the send function accepts
*/
buildError() {
return {
plain: '',
html: ''
}
}
/**
* Builds a warning email template. Returns an object that the send function accepts
*/
buildWarning() {
return {
plain: '',
html: ''
}
}
}<file_sep>/bin/utils/log.js
"use strict";
var chalk_1 = require("chalk");
var fs = require("fs");
var moment = require("moment");
/**
* Class for logging events and errors
*/
var Log = (function () {
function Log(config) {
this.config = config;
}
/**
* @param {int} logLevel used to determine the type of logging
* @param {string} msg used for the body object of the log
*/
Log.prototype.log = function (logLevel, msg) {
switch (logLevel) {
// log level 0 = no logging
case 1:
this.logLevel1(msg);
break;
case 2:
this.logLevel2(msg);
break;
case 3:
this.logLevel3(msg);
break;
case 4:
this.logLevel4(msg);
break;
case 5:
this.logLevel5(msg);
break;
}
};
/**
* Log most things to the console
* @param {Object} log json format object containing endpoint, time, status and message
*/
Log.prototype.logLevel1 = function (log) {
var _this = this;
if (log.status === 'UP') {
console.log(chalk_1.default.grey('Pinging:', log.endpoint, 'at: ', log.time));
console.log('Status:', chalk_1.default.green(log.status));
console.log('Message:', chalk_1.default.green(log.msg));
}
else {
console.log(chalk_1.default.grey('Pinging:', log.endpoint, 'at: ', log.time));
console.log('Status:', chalk_1.default.red(log.status));
console.log('Message:', chalk_1.default.red(log.msg));
}
var time = moment().format();
var data = 'Date: ' + time + ' - ' + 'Endpoint: ' + log.endpoint + ' - ' + 'Status: ' + log.status + ' - ' + 'Message: ' + log.msg;
var fileName = this.config['monitorLocation'] + '01.10.2016' + '.log';
fs.appendFile(fileName, data + '\n', function (err) {
if (err) {
_this.error(err, moment().format());
}
});
};
Log.prototype.logLevel2 = function (msg) {
};
Log.prototype.logLevel3 = function (msg) {
};
Log.prototype.logLevel4 = function (msg) {
};
Log.prototype.logLevel5 = function (msg) {
};
/**
* Log a service request
* @param {string} msg the main message of the log
* @param {string} time formatted time
*/
Log.prototype.service = function (msg, time) {
var _this = this;
var data = time + ' - ' + msg;
// append the service.log file
var fileName = this.config['serviceLocation'] + '01.10.2016' + '.log';
fs.appendFile(fileName, data + '\n', function (err) {
if (err) {
_this.error(err, moment().format());
}
});
};
/**
* Log an error
* @param {string} msg the main message of the log
* @param {string} time formatted time
*/
Log.prototype.error = function (msg, time) {
var data = time + ' - ' + msg;
// append the error.log file
var fileName = this.config['errorLocation'] + '01.10.2016' + '.log';
fs.appendFile(fileName, data + '\n', function (err) {
if (err) {
console.error('Error: Could not write to error log!', msg, moment().format());
}
});
};
return Log;
}());
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = Log;
<file_sep>/lib/db/dbManager.ts
/// <reference path="./idbSettings.ts"/>
import Core from './../index';
import mongodb from 'mongodb';
import * as mongoose from 'mongoose';
mongoose.Promise = global.Promise;
// import models
import MONITORSTATUS from './models/monitorStatus';
import RESOURCELOG from './models/resourceLog';
import * as fs from 'fs';
export default class DatabaseManager {
private core:Core;
private _settings:idbSettings;
private _models:any;
private _client:mongoose.connection;
private _connection:Object;
/**
* Database Manager Class Constructor
*
* @param server {Object} reference to the server class
*/
constructor(core, callback) {
this.core = core;
let opts = JSON.parse(fs.readFileSync('./config/database.conf.json', 'utf8'));
this._settings = {
url: opts.url,
db: opts.db,
server: opts.server
}
this._models = {
MONITORSTATUS: mongoose.model('monitorStatus', MONITORSTATUS()),
RESOURCELOG: mongoose.model('resourceLog', RESOURCELOG())
}
this.connect(callback);
}
/**
* Attempt to make a connection to the database
*/
connect(callback) {
try {
this._client = mongoose.connect(this._settings.url, this._settings);
this._connection = this._client['connection'];
this.handleCallbacks(callback);
} catch (e) {
this.core.console.log('database connection error:', e);
}
}
/**
* Disconnect from the database
*/
disconnect() {
if(this._client) {
this._client.close();
}
}
/**
* Listen for mongoose callbacks
*/
handleCallbacks(callback) {
// When successfully connected
this._client.connection.on('connected', () => {
this.core.console.log('Mongoose default connection open to ' + this._settings.url);
callback();
});
// If the connection throws an error
this._client.connection.on('error', (err) => {
this.core.console.log('Mongoose default connection error: ' + err);
});
// When the connection is disconnected
this._client.connection.on('disconnected', () => {
this.core.console.log('Mongoose default connection disconnected');
});
}
public get models() {
return this._models;
}
get connection() {
return this._client;
}
}<file_sep>/lib/utils/utils.ts
import monitorValidator from './monitorValidator';
import Monitor from './../monitor/monitor';
import Core from './../index';
export default class utils {
private core:Core;
private _monitorValidator:monitorValidator;
/**
* Constructor
*
* @param {Object} monitor - Reference to the main monitor manager class
*/
constructor(core, callback) {
this.core = core;
// make object references for easier access "this.server.utils.example"
this._monitorValidator = monitorValidator;
callback();
}
/**
* Return the total number of monitors
*
* @returns {number} monitors array length
*/
get totalMonitors() {
return this.core.monitors.length;
}
get live():number {
let count:number = 0;
for(let monitor of this.core.monitors) {
if(monitor.status.paused !== true) {
count++;
}
}
return count;
}
/**
* Returns the total number of up monitors
*
* @returns {number} number of monitors in 'UP' status
*/
get totalUp() {
let count = 0;
for(let monitor of this.core.monitors) {
if(monitor.status.isUp === true) count++;
}
return count;
}
/**
* Returns total number of down monitors
*
* @returns {number} number of moitors in 'DOWN' status
*/
get totalDown() {
let count = 0;
for(let monitor of this.core.monitors) {
if(monitor.status.isDown === true) count++;
}
return count;
}
/**
* Returns a monitor object if it can be found
*
* @param {string} name
*
* @returns {Object} monitor
*/
monitorByName(name:string):Monitor {
for(let monitor of this.core.monitors) {
if(monitor.settings.name === name) {
return monitor;
}
}
}
/**
* Filters all monitors by an array of tags. String format seperated by commas
*
* @param {array} tags
*
* @returns {array} temp
*/
filterByTags(tags) {
// create temporary array
let temp = new Array();
// itterate over every monitor
for(let monitor of this.core.monitors){
let hasBeenPulled = false;
// itterate over every monitor
for(let tag of tags) {
// if the monitor has not been pushed to the temp array
// and the check tag function returns true
if(!hasBeenPulled && monitor.checkTag(tag)) {
// construct simple data structure to be returned
let data = this.constructData(monitor);
// push the data to the temp array and flag it as been pulled
temp.push(data);
hasBeenPulled = true;
}
}
}
return temp;
}
/**
* Filters all monitors by a single category String
*
* @param {string} category
*
* @returns {array} monitors
*/
filterByCategory(category) {
let temp = new Array();
for(let monitor of this.core.monitors){
if(monitor.settings.category === category) {
let data = this.constructData(monitor);
temp.push(data);
}
}
return temp;
}
/**
* Takes a monitor instance and builds a generic data object
*
* @param {Object} monitor
*
* @returns {Object} data
*/
constructData(monitor) {
let data = {
id: monitor.id,
name: monitor.name,
description: monitor.description,
status: monitor.status.status,
paused: monitor.status.paused,
isDown: monitor.status.isDown,
isUp: monitor.status.isUp
}
return data;
}
/**
* Returns human readable data size. (Note, not 100% accurate due to rounding issues)
*
* @param {number} fileSizeInBytes number of bytes to convert
*/
getReadableFileSizeString(fileSizeInBytes) {
var i = -1;
var byteUnits = [' kB', ' MB', ' GB', ' TB', 'PB', 'EB', 'ZB', 'YB'];
do {
fileSizeInBytes = fileSizeInBytes / 1024;
i++;
} while (fileSizeInBytes > 1024);
return Math.max(fileSizeInBytes, 0.1).toFixed(1) + byteUnits[i];
}
}<file_sep>/bin/controllers/reportController.js
"use strict";
var fs = require("fs");
var ReportController = (function () {
/**
* Report Controller constructor
*
* @param {Object} Monitor
*/
function ReportController(core) {
// hold a reference to the monitor class
this.core = core;
// get the config file
this._config = JSON.parse(fs.readFileSync('./config/report.conf.json', 'utf8'));
// settings for the reporting controller
this._settings = {};
}
return ReportController;
}());
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = ReportController;
<file_sep>/bin/frontend/Frontend.js
"use strict";
var http = require("http");
var express = require("express");
var cors = require("cors");
var root_1 = require("./routes/api/root");
var root_2 = require("./routes/dashboard/root");
var Frontend = (function () {
function Frontend(server, cb) {
// hold reference for monitor class
this._server = server;
this._config = this._server.config['API'];
this.setup(cb);
}
Frontend.prototype.setup = function (cb) {
this._app = express();
this._app.server = http.createServer(this._app);
// 3rd party middleware
this._app.use(cors({
exposedHeaders: ["Link"]
}));
// api router
this._app.use('/api', root_1.default(this._server));
this._app.use('/dashboard', root_2.default(this._server));
this._listner = this._app.server.listen(process.env.PORT || this._config['port'], function () {
//this.monitor.logger.service('Express server started. Listening on port: ' + this.config.port, moment().format());
cb();
});
};
return Frontend;
}());
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = Frontend;
<file_sep>/bin/utils/queue.js
/**
* Util class for queueing up functions and calling them in async pattern.
*/
"use strict";
var Queue = (function () {
function Queue() {
// set starting position to -1. The first time you call the next function, it will increment
// the current index by 1, thus throwing it ahead of it's actual position by 1.
this._currentIndex = -1;
this._running = false;
this._queue = [];
// make an empty function to call when the queue is complete
this._onComplete = function () { };
}
/**
* Add a function to the current queue.
*
* @param {function} fnct a single function to be added into the queue
* @param {Array} fnct an array of functions to be added into the queue
*/
Queue.prototype.add = function (fnct) {
var _this = this;
if (fnct instanceof Array) {
var _loop_1 = function (i) {
this_1._queue.push(function () { i(function () { _this.next(); }); });
};
var this_1 = this;
for (var _i = 0, fnct_1 = fnct; _i < fnct_1.length; _i++) {
var i = fnct_1[_i];
_loop_1(i);
}
}
else if (typeof fnct === 'function') {
this._queue.push(function () { fnct(function () { _this.next(); }); });
}
};
/**
* Called to start the queue process
*/
Queue.prototype.start = function () {
this._running = true;
this.next();
};
/**
* Main update of the class. This function is passed as a callback to each queued function.
* The queued function will trigger the callback when the function is happy to move onto the next one
*/
Queue.prototype.next = function () {
var _this = this;
// increment the current index
this._currentIndex++;
// if there is no more functions left, trigger the onComplete function.
if (this._currentIndex > this._queue.length - 1) {
this.onComplete();
}
// sanity check to make sure the next object in the queue is a function
if (typeof this._queue[this._currentIndex] === 'function') {
// call the function, and pass this function as the callback
this._queue[this._currentIndex](function () {
_this.next();
});
}
};
Object.defineProperty(Queue.prototype, "length", {
/**
* Getter to return the number of functions in the queue
*
* @returns {number}
*/
get: function () {
return this._queue.length;
},
enumerable: true,
configurable: true
});
Object.defineProperty(Queue.prototype, "running", {
/**
* Getter to return the status of the queue
*
* @returns {boolean}
*/
get: function () {
return this._running;
},
enumerable: true,
configurable: true
});
Object.defineProperty(Queue.prototype, "onComplete", {
get: function () {
return this._onComplete;
},
set: function (val) {
this._onComplete = val;
},
enumerable: true,
configurable: true
});
return Queue;
}());
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = Queue;
<file_sep>/lib/controllers/reportController.ts
import * as fs from 'fs';
import Core from './../index';
export default class ReportController {
private core:Core;
private _config:Object;
private _settings:Object;
/**
* Report Controller constructor
*
* @param {Object} Monitor
*/
constructor(core:Core) {
// hold a reference to the monitor class
this.core = core;
// get the config file
this._config = JSON.parse(fs.readFileSync('./config/report.conf.json', 'utf8'));
// settings for the reporting controller
this._settings = {
};
}
}<file_sep>/README.md
[](https://travis-ci.org/megmut/servator)
[](https://coveralls.io/r/megmut/servator?branch=master)
[](https://david-dm.org/megmut/servator.svg)
[](https://david-dm.org/servatorgit#info=devDependencies)
# JSON Utils - Node.js
A collection of JSON utils for Node.js
#### Features
- Configuration file validation
- File update automation
- Deep JSON comparison
## Getting Started
Go ahead and install via npm
```
$ npm install json-utils
```
### Prerequisites
The only pre-requisite is an installed version of node.js
> note: download using a package manager here: https://nodejs.org/en/download/package-manager/
### Importing / Requiring
The library is created in typescript, but compiled to es5. To use it in any project, simply include the module.
```js
var jutils = require('jutils');
-- ES6 (ECMA2015)
import jutils as 'jutils';
-- Typescript --
import jutils as 'jutils';
```
>note: as of typescript 1.8.0+ typescript will automatically check the node_modules folder for the type definitions. "TypeScript overlays the TypeScript source file extensions (.ts, .tsx, and .d.ts) over the Node’s resolution logic. TypeScript will also use a field in package.json named "typings" to mirror the purpose of "main" - the compiler will use it to find the “main” definition file to consult." taken from https://www.typescriptlang.org/docs/handbook/module-resolution.html
## Usage
File Creation
```
// param1 - string to file
// param2 - data object, optional to write on creation
// param3 - override file if already exists
// param4 - callback function
jutils.create('myPath', null, true, function(created){});
```
File Deletion
```
// param1 - string to file
// param2 - callback function
jutils.delete('myPath', function(deleted){});
```
File Read
```
// param1 - string to file
// param2 - callback function
jutils.read('myPath', function(data){});
```
File Exists
```
// param1 - string to file
// param2 - callback function
jutils.exists('myPath', function(result){});
```
Is Valid JSON
```
// param1 - object to validate
jutils.isValid({...});
```
Is Valid Object
```
// param1 - object to validate
jutils.isObject({...});
```
## Documentation
This template uses typedoc to auto generate documentation. See more at https://github.com/TypeStrong/typedoc.
When generated, the relative files will reside in the docs/ folder. To run the documentation generator, simple run:
```
$ npm run docs
```
## Authors
* **<NAME>**
See also the list of [contributors](https://github.com/your/project/contributors) who participated in this project.
## License
This project is licensed under the MIT License - see the [LICENSE.md](LICENSE.md) file for details
## Acknowledgments
* A big thank you to the guys at Microsoft for Typescript
<file_sep>/bin/utils/utils.js
"use strict";
var monitorValidator_1 = require("./monitorValidator");
var utils = (function () {
/**
* Constructor
*
* @param {Object} monitor - Reference to the main monitor manager class
*/
function utils(core, callback) {
this.core = core;
// make object references for easier access "this.server.utils.example"
this._monitorValidator = monitorValidator_1.default;
callback();
}
Object.defineProperty(utils.prototype, "totalMonitors", {
/**
* Return the total number of monitors
*
* @returns {number} monitors array length
*/
get: function () {
return this.core.monitors.length;
},
enumerable: true,
configurable: true
});
Object.defineProperty(utils.prototype, "totalUp", {
/**
* Returns the total number of up monitors
*
* @returns {number} number of monitors in 'UP' status
*/
get: function () {
var count = 0;
for (var _i = 0, _a = this.core.monitors; _i < _a.length; _i++) {
var monitor = _a[_i];
if (monitor.status.isUp === true)
count++;
}
return count;
},
enumerable: true,
configurable: true
});
Object.defineProperty(utils.prototype, "totalDown", {
/**
* Returns total number of down monitors
*
* @returns {number} number of moitors in 'DOWN' status
*/
get: function () {
var count = 0;
for (var _i = 0, _a = this.core.monitors; _i < _a.length; _i++) {
var monitor = _a[_i];
if (monitor.status.isDown === true)
count++;
}
return count;
},
enumerable: true,
configurable: true
});
/**
* Returns a monitor object if it can be found
*
* @param {string} name
*
* @returns {Object} monitor
*/
utils.prototype.monitorByName = function (name) {
for (var _i = 0, _a = this.core.monitors; _i < _a.length; _i++) {
var monitor = _a[_i];
if (monitor.settings.name === name) {
return monitor;
}
}
};
/**
* Filters all monitors by an array of tags. String format seperated by commas
*
* @param {array} tags
*
* @returns {array} temp
*/
utils.prototype.filterByTags = function (tags) {
// create temporary array
var temp = new Array();
// itterate over every monitor
for (var _i = 0, _a = this.core.monitors; _i < _a.length; _i++) {
var monitor = _a[_i];
var hasBeenPulled = false;
// itterate over every monitor
for (var _b = 0, tags_1 = tags; _b < tags_1.length; _b++) {
var tag = tags_1[_b];
// if the monitor has not been pushed to the temp array
// and the check tag function returns true
if (!hasBeenPulled && monitor.checkTag(tag)) {
// construct simple data structure to be returned
var data = this.constructData(monitor);
// push the data to the temp array and flag it as been pulled
temp.push(data);
hasBeenPulled = true;
}
}
}
return temp;
};
/**
* Filters all monitors by a single category String
*
* @param {string} category
*
* @returns {array} monitors
*/
utils.prototype.filterByCategory = function (category) {
var temp = new Array();
for (var _i = 0, _a = this.core.monitors; _i < _a.length; _i++) {
var monitor = _a[_i];
if (monitor.settings.category === category) {
var data = this.constructData(monitor);
temp.push(data);
}
}
return temp;
};
/**
* Takes a monitor instance and builds a generic data object
*
* @param {Object} monitor
*
* @returns {Object} data
*/
utils.prototype.constructData = function (monitor) {
var data = {
id: monitor.id,
name: monitor.name,
description: monitor.description,
status: monitor.status.status,
paused: monitor.status.paused,
isDown: monitor.status.isDown,
isUp: monitor.status.isUp
};
return data;
};
/**
* Returns human readable data size. (Note, not 100% accurate due to rounding issues)
*
* @param {number} fileSizeInBytes number of bytes to convert
*/
utils.prototype.getReadableFileSizeString = function (fileSizeInBytes) {
var i = -1;
var byteUnits = [' kB', ' MB', ' GB', ' TB', 'PB', 'EB', 'ZB', 'YB'];
do {
fileSizeInBytes = fileSizeInBytes / 1024;
i++;
} while (fileSizeInBytes > 1024);
return Math.max(fileSizeInBytes, 0.1).toFixed(1) + byteUnits[i];
};
return utils;
}());
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = utils;
<file_sep>/bin/frontend/Frontend.d.ts
import Core from './../index';
import * as express from 'express';
export default class Frontend {
private _server;
private _config;
protected _app: express;
protected _listner: any;
constructor(server: Core, cb: Function);
setup(cb: any): void;
}
<file_sep>/lib/frontend/routes/api/live.ts
import { Router } from 'express';
export default (monitor) => {
let api = Router();
/**
* search function to find a monitor
*/
api.param('monitorName', function(request, response, next, storyId) {
});
/**
* Used to force the monitor to make a live request and return the status
*/
api.get('/:monitorName/status', (req, res) => {
res.json({health: 'OK', route: 'dashboard'});
});
return api;
}<file_sep>/lib/utils/queue.ts
/**
* Util class for queueing up functions and calling them in async pattern.
*/
export default class Queue {
private _currentIndex:number;
private _running:boolean;
private _queue:Array<Function>;
private _onComplete:Function;
constructor() {
// set starting position to -1. The first time you call the next function, it will increment
// the current index by 1, thus throwing it ahead of it's actual position by 1.
this._currentIndex = -1;
this._running = false;
this._queue = [];
// make an empty function to call when the queue is complete
this._onComplete = () => {};
}
/**
* Add a function to the current queue.
*
* @param {function} fnct a single function to be added into the queue
* @param {Array} fnct an array of functions to be added into the queue
*/
add(fnct) {
if(fnct instanceof Array) {
for(let i of fnct) {
this._queue.push(() => { i(() => { this.next(); }); });
}
} else if(typeof fnct === 'function') {
this._queue.push(() => { fnct(() => { this.next(); }); });
}
}
/**
* Called to start the queue process
*/
start() {
this._running = true;
this.next();
}
/**
* Main update of the class. This function is passed as a callback to each queued function.
* The queued function will trigger the callback when the function is happy to move onto the next one
*/
next() {
// increment the current index
this._currentIndex++;
// if there is no more functions left, trigger the onComplete function.
if(this._currentIndex > this._queue.length -1) {
this.onComplete();
}
// sanity check to make sure the next object in the queue is a function
if(typeof this._queue[this._currentIndex] === 'function') {
// call the function, and pass this function as the callback
this._queue[this._currentIndex](() => {
this.next();
});
}
}
/**
* Getter to return the number of functions in the queue
*
* @returns {number}
*/
get length() {
return this._queue.length;
}
/**
* Getter to return the status of the queue
*
* @returns {boolean}
*/
get running() {
return this._running;
}
get onComplete():Function {
return this._onComplete;
}
set onComplete(val:Function) {
this._onComplete = val;
}
}<file_sep>/bin/db/models/monitorStatus.js
"use strict";
var mongoose = require("mongoose");
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = function () {
var schema = new mongoose.Schema({
date: Date,
name: String,
status: Number,
statusValue: String,
statusData: Object,
httpRequestTime: Number
});
return schema;
};
<file_sep>/lib/third-party/slack.ts
/**
* Find the slack api calls here: https://api.slack.com/
*/
import Slack from 'slack-node';
import Request from 'request';
import * as moment from 'moment';
import Core from './../index';
/**
* Class for making API calls to the Slack Public API
*/
export default class SlackAPI {
private core:Core;
private _uri:string;
private _username:string;
private _slack:any;
constructor(core) {
this.core = core; // only here if parent callback is wanted
this._uri = '<KEY>';
this._username = 'Servator Bot';
this._slack = new Slack();
// try to make a connection using the uri webhook string
try {
this._slack.setWebhook(this._uri);
// this.postMessage('#servator', 'Service has gone down!', ':bomb:');
} catch (e) {
console.log('Could not connect to slack webhook\n', e);
}
}
/**
* @param {string} channel preceeded by #, used to dictate which chanel in the slack webhook to post to
* @param {string} msg the body of the message to post
* @param {string} type emoji type for the bot icon in the slack chanel
*/
postMessage(channel, msg, type = ':warning:') {
try {
// Make a new request
Request.post(
this._uri,
{ json: {"channel": channel, "username": this._username, "text": msg, "icon_emoji": type}},
(err, response, body) => {
// if okay 200 response
if (!err && response.statusCode == 200) {
this.core.logger.service('Slack API Post Message: ' + '{"channel": channel, "username": this.username, "text": msg, "icon_emoji": type}', moment().format())
// else log the error
} else {
this.core.logger.error('Error in Slack: ' + err, moment().format());
}
}
);
} catch (err) {
// log the error
this.core.logger.error('Error in Slack: ' + err, moment().format());
}
}
}<file_sep>/lib/utils/monitorValidator.ts
import * as fs from 'fs';
export default class monitorValidator {
static validate(server, monitor, config) {
// read the default configuration file to set the values if they aren't available in the monitor config file
let defaults = JSON.parse(fs.readFileSync('./config/defaults/monitor.conf.json', 'utf8'));
// create an error flag
let errorFlag = false;
// double check that the validator has valid JSON to work with
if(typeof config === 'undefined' || typeof config === null) {
errorFlag = true;
}
// configure the name associated with this monitor
if(typeof config.name !== 'undefined') {
monitor.name = config.name;
} else {
monitor.name = defaults.name;
}
// configure the name associated with this monitor
if(typeof config.description !== 'undefined') {
monitor.description = config.description;
} else {
monitor.description = defaults.description;
}
// configure the category associated with this monitor
if(typeof config.category !== 'undefined') {
monitor.category = config.category;
} else {
monitor.category = defaults.category;
}
// configure the tags associated with this montitor
if(typeof config.tags !== 'undefined') {
monitor.tags = config.tags;
} else {
monitor.tags = defaults.tags;
}
// if this monitor type is a ping rquest
if(typeof config.ping !== 'undefined') {
monitor.ping = config.ping;
} else {
monitor.ping = defaults.ping;
}
// if this monitor type is a http request
if(typeof config.http !== 'undefined') {
monitor.http = config.http;
} else {
monitor.http = defaults.http;
}
if(typeof config.httpHeaders !== 'undefined') {
monitor.httpHeaders = config.httpHeaders;
} else {
monitor.httpHeaders = defaults.httpHeaders;
}
// the endpoint IP / Domain
if(typeof config.endpoint !== 'undefined') {
monitor.endpoint = config.endpoint;
} else {
monitor.endpoint = defaults.endpoint;
}
// the interval for testing an endpoint
if(typeof config.interval !== 'undefined' ) {
monitor.interval = config.interval;
} else {
monitor.interval = defaults.interval;
}
// is the monitor in a paused state
if(typeof config.interval !== 'undefined' ) {
monitor.status.paused = config.paused;
} else {
monitor.status.paused = defaults.paused;
}
// set the threshold for slow responces
if(typeof config.threshold !== 'undefined') {
monitor.threshold = config.threshold;
} else {
monitor.threshold = {
"enabled": defaults.threshold.enabled,
"ok": defaults.threshold.ok,
"slow": defaults.threshold.slow
}
}
// configure the on up procedure parameters
if(typeof config.onUp !== 'undefined') {
monitor.onUp = config.onUp;
} else {
monitor.onUp = defaults.onUp;
}
// configure the on down procedure parameters
if(typeof config.onDown !== 'undefined') {
monitor.onDown = config.onDown;
} else {
monitor.onDown = defaults.onDown;
}
// configure the on down procedure parameters
if(typeof config.reports !== 'undefined') {
monitor.onDown = config.reports;
} else {
monitor.onDown = defaults.reports;
}
// indicates the logging level
if(typeof config.logLevel !== 'undefined') {
monitor.logLevel = config.logLevel;
} else {
monitor.logLevel = defaults.logLevel;
}
// set the priority of this monitor : 1 = critical, 2 = high, 3 = medium, 4 = low, 5 = insignificant
if(typeof config.priority !== 'undefined') {
monitor.priority = config.priority;
} else {
monitor.priority = defaults.priority;
}
if(errorFlag) {
return {err: {}};
} else {
return true;
}
}
}<file_sep>/lib/frontend/routes/api/service.ts
import { Router } from 'express';
import Core from './../../../index';
export default (core:Core) => {
let api = Router();
api.get('/', (req, res) => {
res.json({health: 'OK', route: 'service'});
});
api.get('/cpu', (req, res) => {
let model = core.database.models.RESOURCELOG;
model.find({}, (err, result) => {
result = result[0].toObject();
res.send({date: result.date, cpu: result.cpu});
}).limit(1).sort({$natural:-1});
});
api.get('/memory', (req, res) => {
let model = core.database.models.RESOURCELOG;
model.find({}, (err, result) => {
result = result[0].toObject();
res.send({date: result.date, memory: result.mem});
}).limit(1).sort({$natural:-1});
});
api.get('/uptime', (req, res) => {
res.json({time: Date.now() - core.resource.uptime});
});
return api;
}<file_sep>/lib/frontend/routes/dashboard/root.ts
import { Router } from 'express';
// import routes
import Index from './index';
export default (server) => {
server = server;
let api = Router();
// perhaps expose some API metadata at the root
api.get('/', (req, res) => {
res.json({health: 'OK', version: '1.0.7'});
});
api.use('/', Index());
return api;
}<file_sep>/bin/monitor/iStatus.d.ts
interface iStatus {
status: string;
paused: boolean;
isDown: boolean;
isUp: boolean;
lastNotification: number;
performance: number;
}
<file_sep>/lib/third-party/hipchat.ts
/**
* Make sure you have an Admin API Key: https://www.hipchat.com/admin/api
* API documentation can be found here: https://www.hipchat.com/docs/apiv2
*/
import * as hipchat from 'hipchatter';
export default class HipChatAPI {
private _key:string;
private _token:string;
private _hc:any;
constructor() {
this._key = '';
this._token = '';
try {
this._hc = new hipchat(this._key);
} catch (e) {
console.log('Could not connect to hipchat api\n', e);
}
}
postMessage(room, message, color, token) {
let params = {
message: message,
color: color,
token: token
};
this._hc.postMessage(room, params, (err) => {
if (err){
console.info('There was an issue posting to the hipchat room');
} else {
console.log('Successfully notified hipchat room')
}
});
}
}<file_sep>/bin/utils/utils.d.ts
import Monitor from './../monitor/monitor';
export default class utils {
private core;
private _monitorValidator;
/**
* Constructor
*
* @param {Object} monitor - Reference to the main monitor manager class
*/
constructor(core: any, callback: any);
/**
* Return the total number of monitors
*
* @returns {number} monitors array length
*/
readonly totalMonitors: number;
/**
* Returns the total number of up monitors
*
* @returns {number} number of monitors in 'UP' status
*/
readonly totalUp: number;
/**
* Returns total number of down monitors
*
* @returns {number} number of moitors in 'DOWN' status
*/
readonly totalDown: number;
/**
* Returns a monitor object if it can be found
*
* @param {string} name
*
* @returns {Object} monitor
*/
monitorByName(name: string): Monitor;
/**
* Filters all monitors by an array of tags. String format seperated by commas
*
* @param {array} tags
*
* @returns {array} temp
*/
filterByTags(tags: any): any[];
/**
* Filters all monitors by a single category String
*
* @param {string} category
*
* @returns {array} monitors
*/
filterByCategory(category: any): any[];
/**
* Takes a monitor instance and builds a generic data object
*
* @param {Object} monitor
*
* @returns {Object} data
*/
constructData(monitor: any): {
id: any;
name: any;
description: any;
status: any;
paused: any;
isDown: any;
isUp: any;
};
/**
* Returns human readable data size. (Note, not 100% accurate due to rounding issues)
*
* @param {number} fileSizeInBytes number of bytes to convert
*/
getReadableFileSizeString(fileSizeInBytes: any): string;
}
<file_sep>/lib/monitor/iMonitorSettings.ts
interface iMonitorSettings {
name:string,
description:string,
category: string,
tags: Array<string>,
endpoint:string,
ping:boolean,
http:boolean,
httpHeaders: {
enabled:boolean,
up:string,
upVal:string,
down:string,
downVal:string
},
interval:number,
onUp: {
remainderInterval:number,
mailingList:Array<string>,
slack:boolean,
hipchat:boolean,
email:boolean,
twilio:boolean,
whatsapp:boolean
},
onDown: {
remainderInterval:number,
mailingList:Array<string>,
slack:boolean,
hipchat:boolean,
email:boolean,
twilio:boolean,
whatsapp:boolean
},
paused:boolean,
threshold: {
enabled:boolean,
ok:number,
slow:number
},
reports: {
daily:boolean,
weekly:boolean,
monthly:boolean,
yearly:boolean,
mailingList:Array<string>
},
logLevel:number,
priority:number
}<file_sep>/lib/index.ts
// import API entry
import Frontend from './frontend/Frontend';
// import database modules
import mongodb from 'mongodb';
//import resource watcher
import resourceWatcher from './utils/resourceWatcher';
// import controllers
import MonitorController from './controllers/monitorController';
import ReportController from './controllers/reportController';
// import monitor modules
import chalk from 'chalk';
import Email from './third-party/email';
import SlackAPI from './third-party/slack';
import HipChatAPI from './third-party/hipchat';
import * as moment from 'moment';
import Log from './utils/log';
import Queue from './utils/queue';
import Utils from './utils/utils';
import LogHandler from './utils/logHandler';
// import custom endpoints to monitor
import Monitor from './monitor/monitor';
// import database manager
import DatabaseManager from './db/dbManager';
import * as fs from 'fs';
interface iConfig {
}
export default class Core {
/**
* Construct the monitor manager class
*/
protected _logger:Log;
protected _config:Object;
protected _console:LogHandler;
protected _database:DatabaseManager;
protected _utils:Utils;
protected _frontend:Frontend;
protected _mail:Email;
protected _resourceWatcher:resourceWatcher
protected _monitors:Array<Monitor>;
constructor() {
let queue = new Queue();
queue.onComplete = () => {
this.initaliseMonitors();
}
queue.add([
(cb) => {
fs.readFile('./config/servator.conf.json', 'utf8', (err, data) => {
if (err) throw err;
this._config = JSON.parse(data);
this._logger = new Log(this._config['logs']);
cb();
});
},
(cb) => {
this._logger.service('Starting Servator', moment().format());
cb();
},
(cb) => {
this._console = new LogHandler(this, cb);
},
(cb) => {
this._database = new DatabaseManager(this, cb);
},
(cb) => {
this._utils = new Utils(this, cb);
},
(cb) => {
this._frontend = new Frontend(this, cb);
},
(cb) => {
this._resourceWatcher = new resourceWatcher(this, cb);
},
(cb) => {
this._mail = new Email(this, cb);
}
]);
// // start the third party services
// this.slack = new SlackAPI(this);
// // this.hipchat = new HipChatAPI(this);
queue.start();
}
/**
* Sets up a new instance of Monitor for every entry in the monitoring.json
*/
initaliseMonitors() {
this._monitors = [];
let dir = './monitors/';
try {
fs.readdir(dir, (err, files) => {
if(err) {
throw err;
}
files.forEach(file => {
// hack whilst json CDF under construction
if(file !== '.DS_Store') {
let instance = new Monitor(this, {});
this._monitors.push(instance);
}
});
});
} catch (e) {
console.log('error', e)
}
}
public get config():Object {
return this._config;
}
public get monitors():Array<Monitor> {
return this._monitors;
}
public get logger():Log {
return this._logger;
}
public get utils():Utils {
return this._utils;
}
public get console():LogHandler {
return this._console;
}
public get database():DatabaseManager {
return this._database;
}
public get resource():resourceWatcher {
return this._resourceWatcher;
}
}
new Core();
// implemenet a flag to start all monitors in unpaused mode, or read the config and start in paused if it was saved like that previously<file_sep>/bin/utils/log.d.ts
/**
* Class for logging events and errors
*/
export default class Log {
private config;
constructor(config: any);
/**
* @param {int} logLevel used to determine the type of logging
* @param {string} msg used for the body object of the log
*/
log(logLevel: any, msg: any): void;
/**
* Log most things to the console
* @param {Object} log json format object containing endpoint, time, status and message
*/
logLevel1(log: any): void;
logLevel2(msg: any): void;
logLevel3(msg: any): void;
logLevel4(msg: any): void;
logLevel5(msg: any): void;
/**
* Log a service request
* @param {string} msg the main message of the log
* @param {string} time formatted time
*/
service(msg: any, time: any): void;
/**
* Log an error
* @param {string} msg the main message of the log
* @param {string} time formatted time
*/
error(msg: any, time: any): void;
}
<file_sep>/lib/third-party/whatsapp.ts
/**
* API for connecting to and pushing notifications through whatsapp
*/
export default class Whatsapp {
/**
*
*/
constructor() {
}
}<file_sep>/lib/third-party/jira.ts
/**
* Documentation for JIRA intergration found here: https://developer.atlassian.com/jiradev/jira-apis/jira-rest-apis/jira-rest-api-tutorials/jira-rest-api-version-2-tutorial
*/
import JiraClient from 'jira-connector';
export default class JiraAPI {
constructor(monitor) {
this.monitor = monitor;
try {
this.jira = new JiraClient({
host: 'jenjinstudios.atlassian.net',
oauth: {
consumer_key: 'your-consumer-key',
private_key: '-----BEGIN RSA PRIVATE KEY-----\n' +
'SomePrivateKey\n' +
'-----END RSA PRIVATE KEY-----',
token: 'your-access-token',
token_secret: 'your-token-secret'
}
});
} catch (e) {
console.log('There was an issue trying to connect or autheicate with Jira');
}
}
/**
* Example data for create issue json:
"fields": {
"project":
{
"key": "TEST"
},
"summary": "REST ye merry gentlemen.",
"description": "Creating of an issue using project keys and issue type names using the REST API",
"issuetype": {
"name": "Bug"
}
}
*/
createIssue(data) {
this.jira.issue.createIssue({
data
}, (err, issue) => {
if(err) {
console.info('There was an issue trying to create a new issue in Jira\n', err);
} else {
// do something with the success message
}
})
}
}<file_sep>/bin/third-party/hipchat.js
/**
* Make sure you have an Admin API Key: https://www.hipchat.com/admin/api
* API documentation can be found here: https://www.hipchat.com/docs/apiv2
*/
"use strict";
var hipchat = require("hipchatter");
var HipChatAPI = (function () {
function HipChatAPI() {
this._key = '';
this._token = '';
try {
this._hc = new hipchat(this._key);
}
catch (e) {
console.log('Could not connect to hipchat api\n', e);
}
}
HipChatAPI.prototype.postMessage = function (room, message, color, token) {
var params = {
message: message,
color: color,
token: token
};
this._hc.postMessage(room, params, function (err) {
if (err) {
console.info('There was an issue posting to the hipchat room');
}
else {
console.log('Successfully notified hipchat room');
}
});
};
return HipChatAPI;
}());
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = HipChatAPI;
<file_sep>/bin/third-party/hipchat.d.ts
export default class HipChatAPI {
constructor();
postMessage(room: any, message: any, color: any, token: any): void;
}
<file_sep>/bin/frontend/routes/api/live.js
"use strict";
var express_1 = require("express");
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = function (monitor) {
var api = express_1.Router();
/**
* search function to find a monitor
*/
api.param('monitorName', function (request, response, next, storyId) {
});
/**
* Used to force the monitor to make a live request and return the status
*/
api.get('/:monitorName/status', function (req, res) {
res.json({ health: 'OK', route: 'dashboard' });
});
return api;
};
<file_sep>/lib/db/models/monitorStatus.ts
import * as mongoose from 'mongoose';
export default () => {
let schema = new mongoose.Schema({
date: Date,
uid: Number,
name: String,
up: Boolean,
down: Boolean,
status: String,
statusValue: String,
statusData: String,
statusCode: Object,
performance: Number
});
return schema;
}<file_sep>/bin/utils/monitorValidator.js
"use strict";
var fs = require("fs");
var monitorValidator = (function () {
function monitorValidator() {
}
monitorValidator.validate = function (server, monitor, config) {
// read the default configuration file to set the values if they aren't available in the monitor config file
var defaults = JSON.parse(fs.readFileSync('./config/defaults/monitor.conf.json', 'utf8'));
// create an error flag
var errorFlag = false;
// double check that the validator has valid JSON to work with
if (typeof config === 'undefined' || typeof config === null) {
errorFlag = true;
}
// configure the name associated with this monitor
if (typeof config.name !== 'undefined') {
monitor.name = config.name;
}
else {
monitor.name = defaults.name;
}
// configure the name associated with this monitor
if (typeof config.description !== 'undefined') {
monitor.description = config.description;
}
else {
monitor.description = defaults.description;
}
// configure the category associated with this monitor
if (typeof config.category !== 'undefined') {
monitor.category = config.category;
}
else {
monitor.category = defaults.category;
}
// configure the tags associated with this montitor
if (typeof config.tags !== 'undefined') {
monitor.tags = config.tags;
}
else {
monitor.tags = defaults.tags;
}
// if this monitor type is a ping rquest
if (typeof config.ping !== 'undefined') {
monitor.ping = config.ping;
}
else {
monitor.ping = defaults.ping;
}
// if this monitor type is a http request
if (typeof config.http !== 'undefined') {
monitor.http = config.http;
}
else {
monitor.http = defaults.http;
}
if (typeof config.httpHeaders !== 'undefined') {
monitor.httpHeaders = config.httpHeaders;
}
else {
monitor.httpHeaders = defaults.httpHeaders;
}
// the endpoint IP / Domain
if (typeof config.endpoint !== 'undefined') {
monitor.endpoint = config.endpoint;
}
else {
monitor.endpoint = defaults.endpoint;
}
// the interval for testing an endpoint
if (typeof config.interval !== 'undefined') {
monitor.interval = config.interval;
}
else {
monitor.interval = defaults.interval;
}
// is the monitor in a paused state
if (typeof config.interval !== 'undefined') {
monitor.status.paused = config.paused;
}
else {
monitor.status.paused = defaults.paused;
}
// set the threshold for slow responces
if (typeof config.threshold !== 'undefined') {
monitor.threshold = config.threshold;
}
else {
monitor.threshold = {
"enabled": defaults.threshold.enabled,
"ok": defaults.threshold.ok,
"slow": defaults.threshold.slow
};
}
// configure the on up procedure parameters
if (typeof config.onUp !== 'undefined') {
monitor.onUp = config.onUp;
}
else {
monitor.onUp = defaults.onUp;
}
// configure the on down procedure parameters
if (typeof config.onDown !== 'undefined') {
monitor.onDown = config.onDown;
}
else {
monitor.onDown = defaults.onDown;
}
// configure the on down procedure parameters
if (typeof config.reports !== 'undefined') {
monitor.onDown = config.reports;
}
else {
monitor.onDown = defaults.reports;
}
// indicates the logging level
if (typeof config.logLevel !== 'undefined') {
monitor.logLevel = config.logLevel;
}
else {
monitor.logLevel = defaults.logLevel;
}
// set the priority of this monitor : 1 = critical, 2 = high, 3 = medium, 4 = low, 5 = insignificant
if (typeof config.priority !== 'undefined') {
monitor.priority = config.priority;
}
else {
monitor.priority = defaults.priority;
}
if (errorFlag) {
return { err: {} };
}
else {
return true;
}
};
return monitorValidator;
}());
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = monitorValidator;
<file_sep>/bin/third-party/slack.d.ts
/**
* Class for making API calls to the Slack Public API
*/
export default class SlackAPI {
constructor(monitor: any);
/**
* @param {string} channel preceeded by #, used to dictate which chanel in the slack webhook to post to
* @param {string} msg the body of the message to post
* @param {string} type emoji type for the bot icon in the slack chanel
*/
postMessage(channel: any, msg: any, type?: string): void;
}
<file_sep>/bin/utils/monitorValidator.d.ts
export default class monitorValidator {
static validate(server: any, monitor: any, config: any): true | {
err: {};
};
}
<file_sep>/lib/frontend/routes/api/root.ts
import { Router } from 'express';
// import routes
import Main from './main';
import Service from './service';
import Monitor from './monitor';
import Live from './live';
export default (core) => {
let api = Router();
// perhaps expose some API metadata at the root
api.get('/', (req, res) => {
res.json({health: 'OK', version: '1.0.7'});
});
api.use('/main', Main(core));
api.use('/service', Service(core));
api.use('/monitor', Monitor(core));
api.use('/live', Live(core));
return api;
}<file_sep>/lib/controllers/databaseController.ts
export default class databaseController {
constructor() {
}
}<file_sep>/bin/db/idbSettings.d.ts
interface idbSettings {
url: String;
db: {
native_parser: Boolean;
};
server: {
poolSize: Number;
};
}
<file_sep>/bin/controllers/monitorController.d.ts
import Monitor from './../monitor/monitor';
export default class MonitorController {
private core;
/**
* Monitor Controller constructor
*
* @param {Object} server reference to the main server class
*/
constructor(core: any);
/**
* Handles the call to make a ping request
*
* @param {Object} montior an instance of a monitor
*/
pingRequest(monitor: any): void;
/**
* Handles the call to make a http Request
*
* @param {Object} monitor an instance of a monitor
*/
httpRequest(monitor: Monitor): void;
httpResponse(error: any, response: any, body: any, monitor: any): void;
/**
* Catch an error from the http request
*
* @param {Object} monitor an instance of a monitor
* @param {Object} err error caught when tryng to make a http request
*/
httpError(monitor: any, err: any): void;
/**
* Control the monitor status UP
*
* @param {Object} monitor an instance of a monitor
* @param {number} startTime time logged to calculate how long the request took
*/
httpSuccess(monitor: any, response: any): void;
/**
* Control the monitor status DOWN
*
* @param {Object} monitor an instance of a monitor
* @param {number} statusCode returned status code of the http request
*/
httpFail(error: any, response: any, body: any, monitor: any): void;
/**
* Controls the actions when a monitor goes down
*
* @param {Object} monitor instance of a monitor
* @param {Object} data JSON object containing any relevant to report
*/
reportDown(monitor: any, data: any): void;
/**
* Controls the actions when a monitor goes up
*
* @param {Object} monitor instance of a monitor
*/
up(monitor: any, time: any): void;
/**
* To Be Replaces! Need to build a controller that will handle what to do on UP, DOWN, SLOW etc.
*/
email(htmlMsg: any): void;
/**
* Log the monitor - Should probably build a global logging mode for this. But then again, it's also nice to have contorl over how this
* particular instance of a monitor handles the log.. do be decided
*
* @param {string} status
* @param {string} msg
*/
log(status: any, msg: any): void;
/**
* Format the current date and time
*
* @param {number} time
*/
getFormattedDate(time: any): any;
}
<file_sep>/bin/monitor/iMonitorSettings.d.ts
interface iMonitorSettings {
name: string;
description: string;
category: string;
tags: Array<string>;
endpoint: string;
ping: boolean;
http: boolean;
httpHeaders: {
enabled: boolean;
up: string;
upVal: string;
down: string;
downVal: string;
};
interval: number;
onUp: {
remainderInterval: number;
mailingList: Array<string>;
slack: boolean;
hipchat: boolean;
email: boolean;
twilio: boolean;
whatsapp: boolean;
};
onDown: {
remainderInterval: number;
mailingList: Array<string>;
slack: boolean;
hipchat: boolean;
email: boolean;
twilio: boolean;
whatsapp: boolean;
};
paused: boolean;
threshold: {
enabled: boolean;
ok: number;
slow: number;
};
reports: {
daily: boolean;
weekly: boolean;
monthly: boolean;
yearly: boolean;
mailingList: Array<string>;
};
logLevel: number;
priority: number;
}
<file_sep>/lib/monitor/iStatus.ts
interface iStatus {
status:string,
paused:boolean,
isDown:boolean,
isUp:boolean,
lastNotification:number,
performance:number
}<file_sep>/bin/utils/genericUtils.js
"use strict";
var utils = (function () {
function utils() {
}
/**
* Builds a string out of arguments
*
* @param {Array} args string containing all arguments
*/
utils.buildArgs = function (args) {
var str = '';
for (var i = 0; i < args.length; i++) {
if (args[i])
str += ' ' + args[i].toString();
}
return str;
};
return utils;
}());
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = utils;
<file_sep>/lib/utils/genericUtils.ts
export default class utils {
/**
* Builds a string out of arguments
*
* @param {Array} args string containing all arguments
*/
static buildArgs(args) {
let str = '';
for(let i = 0; i < args.length; i++) {
if(args[i]) str += ' ' + args[i].toString();
}
return str;
}
}<file_sep>/bin/frontend/routes/api/main.js
"use strict";
var express_1 = require("express");
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = function (monitor) {
var api = express_1.Router();
// implemented
api.get('/', function (req, res) {
res.json({ health: 'OK', route: 'dashboard' });
});
// implemented
api.get('/monitors', function (req, res) {
res.json({ data: monitor.monitors });
});
// implemented
api.get('/up', function (req, res) {
res.json({ data: monitor.totalUp });
});
// implemented
api.get('/down', function (req, res) {
res.json({ data: monitor.totalDown });
});
api.get('/uptime', function (req, res) {
res.json({ data: '99.79' });
});
api.get('/downtime', function (req, res) {
res.json({ data: '99.79' });
});
// implemented
api.get('/monitorCount', function (req, res) {
res.json({ data: monitor.totalMonitors });
});
// implemented
api.param('tags', function (req, res, next, tags) {
var array = tags.split(',');
req.monitors = monitor.utils.filterByTags(array);
next();
});
// implemented
api.get('/monitorsByTags/:tags', function (req, res) {
res.json({ data: req.monitors });
});
// implemented
api.param('category', function (req, res, next, category) {
req.monitors = monitor.utils.filterByCategory(category);
next();
});
// implemented
api.get('/monitorsByCategory/:category', function (req, res) {
res.json({ data: req.monitors });
});
return api;
};
<file_sep>/lib/monitor/monitor.ts
/// <reference path="./iStatus.ts" />
/// <reference path="./iMonitorSettings.ts" />
import * as HTTP from 'http';
import Core from './../index';
// import controllers
import monitorController from './../controllers/monitorController';
export default class Monitor {
public status:iStatus;
public settings:iMonitorSettings;
private core:Core;
private _controller:monitorController;
private _id:number;
private _handle:NodeJS.Timer;
private _interval:number;
constructor(core, config) {
this.core = core;
this._id = this.core.utils.totalMonitors +1;
// instantiate monitor controller class and pass the monitor object
// should change this for a signal and make a controller register in the core
this._controller = new monitorController(this.core);
// set the initial starting status
this.status = {
status: null,
paused: null,
isDown: null,
isUp: null,
lastNotification: null,
performance: null
};
this.settings = {
name: "google",
description: "A test monitor service",
category: "general",
tags: ["tag1", "tag2", "tag3"],
endpoint: "https://www.google.com",
interval: 5,
paused: false,
ping: false,
http: true,
httpHeaders: {
enabled: false,
up: "x-custom",
upVal: "ok",
down: "x-custom",
downVal: "down"
},
threshold: {
enabled: true,
ok: 300,
slow: 1000
},
onDown: {
remainderInterval: 15,
mailingList: [
"<EMAIL>",
"<EMAIL>"
],
slack: true,
hipchat: false,
email: true,
twilio: false,
whatsapp: false
},
onUp: {
remainderInterval:20,
mailingList: [
"<EMAIL>",
"<EMAIL>"
],
slack: true,
hipchat: false,
email: true,
twilio: false,
whatsapp: false
},
reports: {
daily: true,
weekly: true,
monthly: true,
yearly: true,
mailingList: [
"<EMAIL>",
"<EMAIL>"
]
},
logLevel: 1,
priority: 1
}
// implement quiet mode //
// this is where the monitor is still running and commiting data to the database, but will not send notifications via any form if it's status changes
// initalise the monitor
this.init();
}
/**
* Initalise the monitor.
*/
init() {
this.start();
}
/**
* Start the monitor timer.
*/
start() {
// change the paused status to true
this.status.paused = false;
// make a new process request immediatly, then start a timer for the next one
this.handleRequestProcess();
// get the date and time now
let time = Date.now();
// create the setInterval handle
this._handle = setInterval(() => {
this.handleRequestProcess();
}, this.settings.interval * 1000);
}
/**
* Stop the timer, removing the setInterval callback and changing the paused status to true
*/
stop() {
clearInterval(this._handle);
this.status.paused = true;
}
/**
* initaliser function to handle the requests
*/
handleRequestProcess() {
console.log('#handle process request')
if(this.settings.ping) {
this._controller.pingRequest(this);
}
if(this.settings.http) {
this._controller.httpRequest(this);
}
}
/**
* @description validation check if monitor contains the tag
*
* @param {string} tag tag string to search for
*
* @returns {boolean}
*/
checkTag(tag):boolean {
for(let tempTag of this.settings.tags) {
if(tempTag === tag) {
return true;
}
}
return false;
}
get id():number {
return this._id;
}
set id(val:number) {
this._id = val;
}
get name():string {
return this.settings.name;
}
}<file_sep>/lib/utils/log.ts
import chalk from 'chalk';
import * as fs from 'fs';
import * as moment from 'moment';
/**
* Class for logging events and errors
*/
export default class Log {
private config:Object;
constructor(config) {
this.config = config;
}
/**
* @param {int} logLevel used to determine the type of logging
* @param {string} msg used for the body object of the log
*/
log(logLevel, msg) {
switch(logLevel){
// log level 0 = no logging
case 1:
this.logLevel1(msg);
break;
case 2:
this.logLevel2(msg);
break;
case 3:
this.logLevel3(msg);
break;
case 4:
this.logLevel4(msg);
break;
case 5:
this.logLevel5(msg);
break;
}
}
/**
* Log most things to the console
* @param {Object} log json format object containing endpoint, time, status and message
*/
logLevel1(log) {
if(log.status === 'UP') {
console.log(chalk.grey('Pinging:', log.endpoint, 'at: ', log.time));
console.log('Status:', chalk.green(log.status));
console.log('Message:', chalk.green(log.msg));
} else {
console.log(chalk.grey('Pinging:', log.endpoint, 'at: ', log.time));
console.log('Status:', chalk.red(log.status));
console.log('Message:', chalk.red(log.msg));
}
let time = moment().format();
let data = 'Date: ' + time + ' - ' + 'Endpoint: ' + log.endpoint + ' - ' + 'Status: ' + log.status + ' - ' + 'Message: ' + log.msg;
let fileName = this.config['monitorLocation'] + '01.10.2016' + '.log';
fs.appendFile(fileName, data + '\n', (err) => {
if(err) {
this.error(err, moment().format());
}
});
}
logLevel2(msg) {
}
logLevel3(msg) {
}
logLevel4(msg) {
}
logLevel5(msg) {
}
/**
* Log a service request
* @param {string} msg the main message of the log
* @param {string} time formatted time
*/
service(msg, time) {
let data = time + ' - ' + msg;
// append the service.log file
let fileName = this.config['serviceLocation'] + '01.10.2016' + '.log';
fs.appendFile(fileName, data + '\n', (err) => {
if(err) {
this.error(err, moment().format());
}
});
}
/**
* Log an error
* @param {string} msg the main message of the log
* @param {string} time formatted time
*/
error(msg, time) {
let data = time + ' - ' + msg;
// append the error.log file
let fileName = this.config['errorLocation'] + '01.10.2016' + '.log';
fs.appendFile(fileName, data + '\n', (err) => {
if(err) {
console.error('Error: Could not write to error log!', msg, moment().format())
}
});
}
/**
* TODO
* Make an auto cleaning service for the logging class
* Send an email to the admin if this gets too full
*/
}<file_sep>/lib/utils/resourceWatcher.ts
import Core from './../index';
import * as usage from 'usage';
export default class resourceWatcher {
private core:Core;
private _handle:NodeJS.Timer;
private _bandwidth:Array<number>;
private _pid:number;
private _units:Array<string>;
private _requests:number;
private _interval:number;
private _uptime:number;
constructor(core:Core, cb:Function) {
this.core = core;
this._requests = 0;
this._interval = 300;
this._uptime = Date.now();
this._units = ['bytes','KB','MB','GB','TB','PB'];
this._pid = process.pid;
this._handle = setInterval(() => {
this.update();
}, this._interval * 1000);
cb();
}
/**
*
*/
update() {
usage.lookup(this._pid, (err, result) => {
this.logData(result.cpu, result.memory);
// usage.clearHistory(); // is not working for some reason.
});
}
/**
* @description
*
* @param
* @param
*
* @returns {void}
*/
logData(cpu, mem) {
let status = this.core.database.models.RESOURCELOG({
date: Date.now(),
cpu: cpu,
mem: mem,
bandwidth: 0,
monitorsLive: this.core.utils.live,
requests: this._requests
});
status.save((err, res) => {
if(err) {
//consle.log('error:', err);
} else {
// console.log('saved')
}
});
}
/**
* Every time a http, ping, test or connection uses bandwidth, log it and store it to the database
*/
updateBandwidth() {
}
transform(bytes: number = 0, precision: number = 2 ) : string {
if ( isNaN( parseFloat( String(bytes) )) || ! isFinite( bytes ) ) return '?';
let unit:number = 0;
while ( bytes >= 1024 ) {
bytes /= 1024;
unit ++;
}
return bytes.toFixed( + precision ) + ' ' + this._units[ unit ];
}
request() {
this._requests++;
console.log(this._requests);
}
public get requests():number {
return this._requests;
}
public get uptime():number {
return this._uptime;
}
}<file_sep>/bin/monitor/monitor.js
/// <reference path="./iStatus.ts" />
/// <reference path="./iMonitorSettings.ts" />
"use strict";
// import controllers
var monitorController_1 = require("./../controllers/monitorController");
var Monitor = (function () {
function Monitor(core, config) {
this.core = core;
// instantiate monitor controller class and pass the monitor object
// should change this for a signal and make a controller register in the core
this._controller = new monitorController_1.default(this.core);
// set the initial starting status
this.status = {
status: null,
paused: null,
isDown: null,
isUp: null,
lastNotification: null,
performance: null
};
this.settings = {
name: "google",
description: "A test monitor service",
category: "general",
tags: ["tag1", "tag2", "tag3"],
endpoint: "https://www.google.com",
interval: 5,
paused: false,
ping: false,
http: true,
httpHeaders: {
enabled: false,
up: "x-custom",
upVal: "ok",
down: "x-custom",
downVal: "down"
},
threshold: {
enabled: true,
ok: 300,
slow: 1000
},
onDown: {
remainderInterval: 15,
mailingList: [
"<EMAIL>",
"<EMAIL>"
],
slack: true,
hipchat: false,
email: true,
twilio: false,
whatsapp: false
},
onUp: {
remainderInterval: 20,
mailingList: [
"<EMAIL>",
"<EMAIL>"
],
slack: true,
hipchat: false,
email: true,
twilio: false,
whatsapp: false
},
reports: {
daily: true,
weekly: true,
monthly: true,
yearly: true,
mailingList: [
"<EMAIL>",
"<EMAIL>"
]
},
logLevel: 1,
priority: 1
};
// implement quiet mode //
// this is where the monitor is still running and commiting data to the database, but will not send notifications via any form if it's status changes
// initalise the monitor
this.init();
}
/**
* Initalise the monitor.
*/
Monitor.prototype.init = function () {
this.start();
};
/**
* Start the monitor timer.
*/
Monitor.prototype.start = function () {
var _this = this;
// change the paused status to true
this.status.paused = false;
// make a new process request immediatly, then start a timer for the next one
this.handleRequestProcess();
// get the date and time now
var time = Date.now();
// create the setInterval handle
this._handle = setInterval(function () {
_this.handleRequestProcess();
}, this.settings.interval * 1000);
};
/**
* Stop the timer, removing the setInterval callback and changing the paused status to true
*/
Monitor.prototype.stop = function () {
clearInterval(this._handle);
this.status.paused = true;
};
/**
* initaliser function to handle the requests
*/
Monitor.prototype.handleRequestProcess = function () {
console.log('#handle process request');
if (this.settings.ping) {
this._controller.pingRequest(this);
}
if (this.settings.http) {
this._controller.httpRequest(this);
}
};
/**
* Return true or false if this monitor contains the tag
*
* @param {string} tag tag string to search for
*
* @returns {boolean}
*/
Monitor.prototype.checkTag = function (tag) {
for (var _i = 0, _a = this.settings.tags; _i < _a.length; _i++) {
var tempTag = _a[_i];
if (tempTag === tag) {
return true;
}
}
return false;
};
Object.defineProperty(Monitor.prototype, "id", {
get: function () {
return this._id;
},
set: function (val) {
this._id = val;
},
enumerable: true,
configurable: true
});
return Monitor;
}());
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = Monitor;
<file_sep>/lib/utils/logHandler.ts
import utils from './../utils/genericUtils';
import Core from './../index';
export default class LogHandler {
private core:Core;
/**
* Log Handler Handler Class constructor
*
* @param {Object} server a reference to the servator main class
* @param {function} callback a function to callback once the class is instantiated
*/
constructor(core, callback) {
this.core = core;
callback();
}
/**
* Handle a servator log
*
* @param {string} string string to be logged
* @param {Array} args array of additional parameters to be logged
*/
log(string = '', ...args) {
if(this.core.config['consoleLog']) {
let v = utils.buildArgs(args);
console.log(string + v);
} else {
// do something else with the error!
}
}
/**
* Handle a servator info
*
* @param {string} string string to info
* @param {Array} args array of additional parameters to info
*/
info(string, ...args) {
if(this.core.config['consoleInfo']) {
let v = utils.buildArgs(args);
console.info(string + v);
} else {
// do something else with the error!
}
}
/**
* Handle a servator error
*
* @param {string} string string to error
* @param {Array} args array of additional parameters to error
*/
error(string, ...args) {
if(this.core.config['consoleError']) {
let v = utils.buildArgs(args);
console.error(string + v);
} else {
// do something else with the error!
}
}
}<file_sep>/lib/frontend/routes/api/main.ts
import { Router } from 'express';
import Core from './../../../index';
export default (core:Core) => {
let api = Router();
// implemented
api.get('/', (req, res) => {
res.json({health: 'OK', route: 'dashboard'});
});
// implemented
api.get('/monitors', (req, res) => {
res.json({data: core.monitors});
});
// implemented
api.get('/up', (req, res) => {
res.json({data: core.utils.totalUp});
});
// implemented
api.get('/down', (req, res) => {
res.json({data: core.utils.totalDown});
});
api.get('/uptime', (req, res) => {
res.json({data: '99.79'});
});
api.get('/downtime', (req, res) => {
res.json({data: '99.79'});
});
// implemented
api.get('/monitorCount', (req, res) => {
res.json({data: core.utils.totalMonitors});
});
// implemented
api.param('tags', (req, res, next, tags) => {
let array = tags.split(',');
req.monitors = core.utils.filterByTags(array);
next();
});
// implemented
api.get('/monitorsByTags/:tags', (req, res) => {
res.json({data: req.monitors});
});
// implemented
api.param('category', (req, res, next, category) => {
req.monitors = core.utils.filterByCategory(category);
next();
});
// implemented
api.get('/monitorsByCategory/:category', (req, res) => {
res.json({data: req.monitors});
});
return api;
}<file_sep>/lib/frontend/routes/dashboard/index.ts
import { Router } from 'express';
export default () => {
let api = Router();
api.get('/', (req, res) => {
res.json({health: 'OK', route: 'service'});
});
// api.get('/cpu', (req, res) => {
// res.json({avg: 4.3});
// });
return api;
}<file_sep>/bin/frontend/routes/dashboard/root.d.ts
declare var _default: (server: any) => any;
export default _default;
<file_sep>/lib/frontend/routes/api/monitor.ts
import { Router } from 'express';
export default (monitor) => {
let api = Router();
/**
* search function to find a monitor
*/
api.param('monitorName', function(req, res, next, monitorName) {
req.monitor = monitor.utils.constructData(monitor.utils.monitorByName(monitorName));
next();
});
/**
* get generic monitor status
*/
api.get('/:monitorName', (req, res) => {
res.json({data: req.monitor});
});
/**
* get the monitors confiuration
*/
api.get('/:monitorName/config', (req, res) => {
res.json({monitors: [1,2,3,4,5]});
});
/**
* get monitor total uptime
*/
api.get('/:monitorName/uptime', (req, res) => {
res.json({uptime: '1234567'});
});
/**
* Get monitor total downtime
*/
api.get('/:monitorName/downtime', (req, res) => {
res.json({down: '3'});
});
/**
* Start Monitor
*/
api.get('/:monitorName/serviceStart', (req, res) => {
res.json({msg: 'Service Started'});
});
/**
* Stop Monitor
*/
api.get('/:monitorName/serviceStop', (req, res) => {
res.json({msg: 'Service Stopped'});
});
/**
* Restart Monitor
*/
api.get('/:monitorName/serviceRestart', (req, res) => {
});
return api;
}<file_sep>/lib/db/models/resourceLog.ts
import * as mongoose from 'mongoose';
export default () => {
let schema = new mongoose.Schema({
date: Date,
cpu: Number,
mem: Number,
bandwidth: Number,
monitorsLive: Number,
requests: Number
});
return schema;
}<file_sep>/bin/utils/genericUtils.d.ts
export default class utils {
/**
* Builds a string out of arguments
*
* @param {Array} args string containing all arguments
*/
static buildArgs(args: any): string;
}
<file_sep>/notes.txt
Future Features:
- server cpu load
- server memory usage
- server up time
- Change one monitor log, for multiple logs.. allowing the file system to read however many file logs there are with a custom extesion<file_sep>/lib/db/idbSettings.ts
interface idbSettings {
url: String;
db: {
native_parser:Boolean;
}
server: {
poolSize:Number;
}
}<file_sep>/bin/third-party/email.d.ts
import Core from './../index';
export default class Email {
protected core: Core;
private _config;
private _transporter;
constructor(monitor: any, callback: any);
/**
* Send an email
*
* @param {string} to single or multiple email recripients in single string format separated by commas.
* @param {string} subject of the html
* @param {Object} body containing a plain and html version of the email. Can be custom or pre-build with the builder tools.
*/
send(to: any, subject: any, body: any): void;
/**
* Builds a report email. Returns an object that the send function accepts
*/
buildReport(): {
plain: string;
html: string;
};
/**
* Builds an error email template. Returns an object that the send function accepts
*/
buildError(): {
plain: string;
html: string;
};
/**
* Builds a warning email template. Returns an object that the send function accepts
*/
buildWarning(): {
plain: string;
html: string;
};
}
<file_sep>/bin/utils/logHandler.d.ts
export default class LogHandler {
private core;
/**
* Log Handler Handler Class constructor
*
* @param {Object} server a reference to the servator main class
* @param {function} callback a function to callback once the class is instantiated
*/
constructor(core: any, callback: any);
/**
* Handle a servator log
*
* @param {string} string string to be logged
* @param {Array} args array of additional parameters to be logged
*/
log(string?: string, ...args: any[]): void;
/**
* Handle a servator info
*
* @param {string} string string to info
* @param {Array} args array of additional parameters to info
*/
info(string: any, ...args: any[]): void;
/**
* Handle a servator error
*
* @param {string} string string to error
* @param {Array} args array of additional parameters to error
*/
error(string: any, ...args: any[]): void;
}
<file_sep>/bin/controllers/reportController.d.ts
export default class ReportController {
/**
* Report Controller constructor
*
* @param {Object} Monitor
*/
constructor(monitor: any);
}
<file_sep>/lib/db/iModels.ts
import * as mongoose from 'mongoose';
interface iModels {
MONITORSTATUS:mongoose.model;
RESOURCELOG:mongoose.model;
}<file_sep>/bin/index.d.ts
import Frontend from './frontend/Frontend';
import Email from './third-party/email';
import Log from './utils/log';
import Utils from './utils/utils';
import LogHandler from './utils/logHandler';
import Monitor from './monitor/monitor';
import DatabaseManager from './db/dbManager';
export default class Core {
/**
* Construct the monitor manager class
*/
protected _logger: Log;
protected _config: Object;
protected _console: LogHandler;
protected _database: DatabaseManager;
protected _utils: Utils;
protected _frontend: Frontend;
protected _mail: Email;
protected _monitors: Array<Monitor>;
constructor();
/**
* Sets up a new instance of Monitor for every entry in the monitoring.json
*/
initaliseMonitors(): void;
readonly config: Object;
readonly monitors: Array<Monitor>;
readonly logger: Log;
readonly utils: Utils;
readonly console: LogHandler;
readonly database: DatabaseManager;
}
<file_sep>/bin/frontend/routes/api/root.js
"use strict";
var express_1 = require("express");
// import routes
var main_1 = require("./main");
var service_1 = require("./service");
var monitor_1 = require("./monitor");
var live_1 = require("./live");
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = function (monitor) {
monitor = monitor;
var api = express_1.Router();
// perhaps expose some API metadata at the root
api.get('/', function (req, res) {
res.json({ health: 'OK', version: '1.0.7' });
});
api.use('/main', main_1.default(monitor));
api.use('/service', service_1.default(monitor));
api.use('/monitor', monitor_1.default(monitor));
api.use('/live', live_1.default(monitor));
return api;
};
<file_sep>/lib/controllers/monitorController.ts
import * as Request from 'request';
import * as Ping from 'net-ping';
import * as http from 'http';
import Core from './../index';
import Monitor from './../monitor/monitor';
export default class MonitorController {
private core:Core;
/**
* Monitor Controller constructor
*
* @param {Object} server reference to the main server class
*/
constructor(core) {
// hold a reference to the monitor class
this.core = core;
}
/**
* Handles the call to make a ping request
*
* @param {Object} montior an instance of a monitor
*/
pingRequest(monitor) {
console.info('request new ping session');
let session = Ping.createSession ();
session.pingHost (monitor.endpoint, (error, ) => {
if (error)
console.log (monitor.endpoint + ": " + error.toString ());
else
console.log (monitor.endpoint + ": Alive");
});
}
/**
* Handles the call to make a http Request
*
* @param {Object} monitor an instance of a monitor
*/
httpRequest(monitor:Monitor) {
this.core.resource.request();
// get the current time for ping measuremet
let startTime = Date.now();
try {
var options = {
url: monitor.settings.endpoint,
method: 'HEAD',
time: true,
headers: [],
postData: {
mimeType: '',
params: []
},
};
// initalise request to the endpoint
let request = Request(options, (error, response, body) => {
let endTime = Date.now();
// check to see if response is not null or undefined
if(!error) {
this.httpResponse(error, response, body, monitor);
} else {
this.httpFail(error, response, body, monitor);
}
});
} catch (err) {
this.httpError(monitor, err);
}
}
httpResponse(error, response, body, monitor) {
// if http request returned an error
if(error) {
this.httpFail(error, response, body, monitor);
} else {
// if the monitor has custom headers set
if(monitor.settings.httpHeaders.enabled) {
// response header matched up
if(response.headers[monitor.settings.httpHeaders.up] === monitor.settings.httpHeaders.upVal) {
this.httpSuccess(monitor, response);
// response header matched down
} else if(response.headers[monitor.settings.httpHeaders.down] === monitor.settings.httpHeaders.downVal) {
this.httpFail(error, response, body, monitor);
// response header no match
} else {
this.httpFail(error, response, body, monitor);
}
} else {
// check standard OK 200 statusCode
if(response.statusCode === 200) {
this.httpSuccess(monitor, response);
} else {
this.httpFail(error, response, body, monitor);
}
}
}
}
/**
* Catch an error from the http request
*
* @param {Object} monitor an instance of a monitor
* @param {Object} err error caught when tryng to make a http request
*/
httpError(monitor, err) {
console.log(err)
}
/**
* Control the monitor status UP
*
* @param {Object} monitor an instance of a monitor
* @param {number} startTime time logged to calculate how long the request took
*/
httpSuccess(monitor, response) {
console.log(response.elapsedTime);
// check the response time incase it's to slow!
if(monitor.settings.threshold.enabled) {
// if total request time is less than the minimum threshold for ok
if(response.elapsedTime <= monitor.settings.threshold.ok) {
//console.log('monitor speed ok')
monitor.status.performance = 'ok';
this.up(monitor, response);
// if the total request time is greater than minimum threshold for okay and less than slow
} else if (response.elapsedTime > monitor.settings.threshold.ok && response.elapsedTime <= monitor.settings.threshold.slow) {
//console.log('monitor speed slow')
monitor.status.performance = 'slow';
this.up(monitor, response);
// otherwise, response time is taking far too long, log it as critical
} else {
monitor.status.performance = 'critical';
//console.log('monitor speed critical')
this.up(monitor, response);
}
} else {
this.up(monitor, response);
}
}
/**
* Control the monitor status DOWN
*
* @param {Object} monitor an instance of a monitor
* @param {number} statusCode returned status code of the http request
*/
httpFail(error, response, body, monitor:Monitor) {
// note to self - if domain can't resolve, then no response object.
//console.log(response);
//console.log(error)
monitor.status.status = 'DOWN';
monitor.status.isUp = false;
monitor.status.isDown = true;
// log the time it was reported down
let time = Date.now();
this.core.console.error('http-fail:', error, response)
// make a new intance of the monitorStatus model from the database schema
let status = this.core.database.models.MONITORSTATUS({
uid:monitor.id,
date: Date.now(),
name: monitor.name,
up: monitor.status.isUp,
down: monitor.status.isDown,
status: monitor.status.status,
statusValue: monitor.status.status,
statusData: error,
httpRequestTime: time,
performance: null
});
status.save((err, res) => {
if(err) {
//consle.log('error:', err);
} else {
console.log('saved http fail')
}
});
}
/**
* Controls the actions when a monitor goes down
*
* @param {Object} monitor instance of a monitor
* @param {Object} data JSON object containing any relevant to report
*/
reportDown(monitor, data) {
// if the monitor is not already logged as down
if(!monitor.status.isDown) {
// make a new push notification and set the last reported down time
} else if(monitor.status.lastNotification - Date.now() > monitor.settings.reportDownInterval) {
// if the monitor has not posted a notification within it's reported down interval'
// push new notifications to slack / email / twilio etc..
}
}
/**
* Controls the actions when a monitor goes up
*
* @param {Object} monitor instance of a monitor
*/
up(monitor:Monitor, response) {
monitor.status.status = 'UP';
monitor.status.isUp = true;
monitor.status.isDown = false;
// if the monitor is not already logged as up
if(!monitor.status.isUp) {
// make a new push notification to say that the service is back online
}
// make a new intance of the monitorStatus model from the database schema
let status = this.core.database.models.MONITORSTATUS({
uid:monitor.id,
date: Date.now(),
name: monitor.name,
up: monitor.status.isUp,
down: monitor.status.isDown,
status: monitor.status.status,
statusValue: monitor.status.status,
statusCode: response.statusCode,
httpRequestTime: response.elaspedTime,
performance: response.elapsedTime
});
status.save((err, res) => {
if(err) {
console.log('error:', err);
} else {
console.log('saved http success')
}
});
}
/**
* Format the current date and time
*
* @param {number} time
*/
getFormattedDate(time) {
let currentDate:any = new Date(time);
currentDate = currentDate.toISOString();
currentDate = currentDate.replace(/T/, ' ');
currentDate = currentDate.replace(/\..+/, '');
return currentDate;
}
}
|
e6dd9f93f6bd3f6914e954b24d7ae120ab418d91
|
[
"JavaScript",
"TypeScript",
"Text",
"Markdown"
] | 68
|
TypeScript
|
megmut/servator
|
33bedc0a018c8e2a5a1c9b3fa9cb5e6fa5ace687
|
8262fa7e1f89e0aa368fa74e785524746dfe30fe
|
refs/heads/master
|
<repo_name>CostGranda/Api-rest-nodejs<file_sep>/config.js
/*global someFunction module:true*/
/*global someFunction process:true*/
/*eslint no-undef: "error"*/
module.exports= {
port: process.env.PORT || 3000,
db: process.env.MONGODB || 'mongodb://localhost:27017/shop',
SECRET_TOKEN: '<PASSWORD>'
};
|
ba0a9b4b1b3a8932b01e7d6121b9f78addac3513
|
[
"JavaScript"
] | 1
|
JavaScript
|
CostGranda/Api-rest-nodejs
|
b84e98cba298a2ba90d3874b1b179eb54a1f2a55
|
3d599f0a36a30e943cedc48854ef03dcc6f49226
|
refs/heads/master
|
<file_sep>package com.example.trivia.util;
import android.app.Activity;
import android.content.SharedPreferences;
public class Prefs {
private SharedPreferences preferences;
public Prefs(Activity activity) { //activity is the context passed here because SharedPreferences is a separate class and we want to fetch the data into another activity or activity class so we have to pass its context when we are creating the object of our Prefs class.
this.preferences = activity.getPreferences(activity.MODE_PRIVATE);
}
public void saveHighestScore(int score)
{
int currentScore = score;
int lastScore = preferences.getInt("highest_score", 0);
if(currentScore>lastScore)
{
//hence we have a new highest score
preferences.edit().putInt("highest_score", currentScore).apply();
}
}
public int getHighestScore()
{
return preferences.getInt("highest_score", 0);
}
}
<file_sep># Trivia
Interesting Questions are waiting for you!



|
f81b052d5bcb09c84f5c534cdbe2866262d318dd
|
[
"Markdown",
"Java"
] | 2
|
Java
|
ratiktiwari/Trivia
|
3601efd87790a646836571af2fed141cea551760
|
8b207408149e2b550f121c58dfa99ad7aa1c7d91
|
refs/heads/master
|
<file_sep>input = [4,9,7,5,8,9,3]
counter = 0
num = len(input)
for i in range(num):
for j in range(i+1, num):
if input[i] > input[j]:
temp = input[i]
input[i] = input[j]
input[j] = temp
counter = counter + 1
print("sort = ", input)
print("jumlah swap = ", counter)
<file_sep>import socket
import datetime
def server():
counter = 0
date = datetime.datetime.now()
host = socket.gethostname()
port = 5000
server = socket.socket()
server.bind((host,port))
server.listen(2)
conn, address = server.accept()
print("connect : ", str(address))
while True:
counter = counter + 1
data = conn.recv(1024).decode()
if not data:
break
print("client : "+ str(data))
log = "[" + str(date) + "] Success : POST " + str(address) + "counter : " + str(counter) + ", " + str(data) + "\n"
data = input(' -> ')
file = open("server.log", "a")
file.write(log)
conn.send(data.encode())
conn.close()
if __name__ == '__main__':
server()
<file_sep>import socket
def client():
host = socket.gethostname()
port = 5000
client = socket.socket()
client.connect((host, port))
message = input(" -> ")
while message.lower().strip() != 'bye':
client.send(message.encode())
data = client.recv(1024).decode()
print('server: ' + str(data))
message = input(" -> ")
client.close()
if __name__ == '__main__':
client()
|
382f951cabaf52b5594406b6935ca6692ad66ce0
|
[
"Python"
] | 3
|
Python
|
sulukr/tes_refactory_id
|
e2a1ad19304b0f932024c3fc758a8019635134bd
|
a8ee28f5eeb968322d8ae3eabbf3cab31990d6f4
|
refs/heads/master
|
<repo_name>hongshui3000/stm32mp1-rpmsg-adcsampler<file_sep>/source/src_hal/inc/main.h
/* USER CODE BEGIN Header */
/**
******************************************************************************
* @file OpenAMP/OpenAMP_TTY_echo/Inc/main.h
* @author MCD Application Team
* @brief Header for main.c module
******************************************************************************
* @attention
*
* <h2><center>© Copyright (c) 2019 STMicroelectronics.
* All rights reserved.</center></h2>
*
* This software component is licensed by ST under BSD 3-Clause license,
* the "License"; You may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
* opensource.org/licenses/BSD-3-Clause
*
******************************************************************************
*/
/* USER CODE END Header */
/* Define to prevent recursive inclusion -------------------------------------*/
#ifndef __MAIN_H
#define __MAIN_H
#ifdef __cplusplus
extern "C" {
#endif
/* Includes ------------------------------------------------------------------*/
#include "stm32mp1xx_hal.h"
#include "stm32mp15xx_disco_stpmic1.h"
#include "openamp.h"
#include "lock_resource.h"
/* Private includes ----------------------------------------------------------*/
/* USER CODE BEGIN Includes */
#include "openamp_log.h"
/* USER CODE END Includes */
/* Exported types ------------------------------------------------------------*/
/* USER CODE BEGIN ET */
/* USER CODE END ET */
/* Exported constants --------------------------------------------------------*/
/* USER CODE BEGIN EC */
/* USER CODE END EC */
/* Exported macro ------------------------------------------------------------*/
/* USER CODE BEGIN EM */
/* USER CODE END EM */
/* Exported functions prototypes ---------------------------------------------*/
void Error_Handler(char * file, int line);
/* USER CODE BEGIN EFP */
/* USER CODE END EFP */
/* Private defines -----------------------------------------------------------*/
#define DEFAULT_IRQ_PRIO 1U
/* ########################## Assert Selection ############################## */
/**
* @brief Uncomment the line below to expanse the "assert_param" macro in the
* HAL drivers code
*/
/* #define USE_FULL_ASSERT 1U */
/* USER CODE BEGIN Private defines */
/* User can use this section to tailor ADCx instance under use and associated
resources */
/* ## Definition of ADC related resources ################################### */
/* Definition of ADCx clock resources */
#define ADCx_CLK_ENABLE() __HAL_RCC_ADC12_CLK_ENABLE()
#define ADCx_FORCE_RESET() __HAL_RCC_ADC12_FORCE_RESET()
#define ADCx_RELEASE_RESET() __HAL_RCC_ADC12_RELEASE_RESET()
/* Definition of ADCx channels
* A0 : PF14 -> ADC2_IN6
* A1 : PF13 -> ADC2_IN2
* A2 : ANA0 -> ADC1_IN0
* A3 : ANA1 -> ADC1_IN1
* A4 : PC3 -> ADC1_IN13
*/
/* Definition of ADCx NVIC resources */
/* Definition of ADCx channels pins */
#define ADCx_CHANNEL_AC_GPIO_CLK_ENABLE() __HAL_RCC_GPIOF_CLK_ENABLE()
#define ADCx_CHANNEL_CT1_GPIO_CLK_ENABLE() __HAL_RCC_GPIOF_CLK_ENABLE()
#define ADCx_CHANNEL_CT2_GPIO_CLK_ENABLE() __HAL_RCC_GPIOA_CLK_ENABLE()
#define ADCx_CHANNEL_CT3_GPIO_CLK_ENABLE() __HAL_RCC_GPIOA_CLK_ENABLE()
#define ADCx_CHANNEL_CT4_GPIO_CLK_ENABLE() __HAL_RCC_GPIOC_CLK_ENABLE()
/* Definition of ADCx DMA resources */
#define ADCx_DMA_CLK_ENABLE() __HAL_RCC_DMA2_CLK_ENABLE()
#define ADCx_DMAMUX_CLK_ENABLE() __HAL_RCC_DMAMUX_CLK_ENABLE()
enum en_emontx_channel {
EMONTX_CH_AC,
EMONTX_CH_CT1,
EMONTX_CH_CT2,
EMONTX_CH_CT3,
EMONTX_CH_CT4,
EMONTX_CH_NUM = EMONTX_CH_CT4
};
#define NUMBER_OF_ADCS 2
struct adc_dev_t {
ADC_TypeDef *adc;
uint8_t adc_irqn;
void (*adc_irq_handler)(void);
DMA_Stream_TypeDef *dma_stream;
uint8_t dma_stream_irqn;
void (*stream_irq_handler)(void);
};
struct adc_channel_t {
ADC_TypeDef *adc;
uint32_t channel;
GPIO_TypeDef *port;
uint16_t pin;
};
extern struct adc_channel_t adc_channels[EMONTX_CH_NUM];
extern struct adc_dev_t adc_dev[NUMBER_OF_ADCS];
/* USER CODE BEGIN Private defines */
/* USER CODE END Private defines */
#ifdef __cplusplus
}
#endif
#endif /* __MAIN_H */
/************************ (C) COPYRIGHT STMicroelectronics *****END OF FILE****/
<file_sep>/README.md
STM32MP157C 4xADC RPMSG firmware
----
This code is based on the cmake template for STM32MP157C which is located [here](https://github.com/dimtass/stm32mp1-cmake-template).
This firmware implements a 4x ADC read using DMA on the CM4. Then it sends those
values to the CA7 using OpenAMP.
> Note: There is a blog post [here](https://www.stupid-projects.com/using-elastic-stack-elk-on-embedded-part-2/)
which explains how to use this firmware. This firmware implements the lower part of the
adcsampler elastic beat which is located [here](https://github.com/dimtass/adcsamplerbeat).
The firmware is able to read the 4x ADCs from the Arduino connector on the STM32MP157C
(A0 - A3) and then sends those values via OpenAMP on the application CPU (CA7). The CA7
is then able to read those values from the `/dev/ttyRPMSG0` serial port.
> Note: The pdf user manual UM2534 (document dm00591354) has wrong pin assignments.
I've raised this issue [here](https://community.st.com/s/question/0D73W000000Uby8/mistake-in-stm32mp1dk2-schematics)
and it seems it will take some time to resolve.
The correct pinmux for the ADC pins on the Arduino connector of the STM32MP157C is:
```
ARD_A0 : PF14 -> ADC2_IN6
ARD_A1 : PF13 -> ADC2_IN2
ARD_A2 : ANA0 -> ADC1_IN0
ARD_A3 : ANA1 -> ADC1_IN1
```
Or also
```
ARD_A0 : PF14 -> ADC2_IN6
ARD_A1 : PF13 -> ADC2_IN2
ARD_A2 : ANA0 -> ADC2_IN0
ARD_A3 : ANA1 -> ADC2_IN1
```
In this firmware I'm using the second one.
## Build the CM firmware
To build the firmware you need to clone the repo in any directory and then inside
that directrory run the command:
```sh
./build.sh
```
The above command assumes that you have a toolchain in your `/opt` folder. In case,
you want to point to a specific toolchain path, then run:
```sh
TOOLCHAIN_DIR=/path/to/toolchain SRC=src_hal ./build.sh
```
Or you can edit the `build.sh` script and add your toolchain path.
It's better to use Docker to build the image. To do that run this command:
```sh
docker run --rm -it -v $(pwd):/tmp -w=/tmp dimtass/stm32-cde-image:latest -c "SRC=src_hal ./build.sh"
```
In order to remove any previous builds, then run:
```sh
docker run --rm -it -v $(pwd):/tmp -w=/tmp dimtass/stm32-cde-image:latest -c "CLEANBUILD=true SRC=src_hal ./build.sh"
```
## Loading the firmware to CM4
To load the firmware on the Cortex-M4 MCU you need to scp the firmware `.elf` file in the
`/lib/firmware` folder of the Linux instance of the STM32MP1. Then you also need to copy the
`fw_cortex_m4.sh` script on the `/home/root` (or anywhere you like) and then run this command
as root.
```sh
./fw_cortex_m4.sh start
```
To stop the firmware run:
```sh
./fw_cortex_m4.sh stop
```
> Note: The console of the STM32MP1 is routed in the micro-USB connector `STLINK CN11` which
in case of my Ubuntu shows up as `/dev/ttyACMx`.
When you copy the `./fw_cortex_m4.sh` you need also to enable the execution flag with:
```sh
chmod +x fw_cortex_m4.sh
```
If the firmware is loaded without problem you should see an output like this:
```sh
fw_cortex_m4.sh: fmw_name=stm32mp157c-rpmsg-test.elf
[70696.118168] remoteproc remoteproc0: powering up m4
[70696.124096] remoteproc remoteproc0: Booting fw image stm32mp157c-rpmsg-test.elf, size 1115364
[70696.184680] rproc-srm-core mlahb:m4@10000000:m4_system_resources: bound mlahb:m4@10000000:m4_system_resources:timer@40000000 (ops 0xc0cfbd7c)
[70696.208147] rproc-srm-core mlahb:m4@10000000:m4_system_resources: bound mlahb:m4@10000000:m4_system_resources:i2c@40015000 (ops 0xc0cfbd7c)
[70696.254391] rproc-srm-core mlahb:m4@10000000:m4_system_resources: bound mlahb:m4@10000000:m4_system_resources:dac@40017000 (ops 0xc0cfbd7c)
[70696.265830] rproc-srm-core mlahb:m4@10000000:m4_system_resources: bound mlahb:m4@10000000:m4_system_resources:serial@40018000 (ops 0xc0cfbd7c)
[70696.299249] rproc-srm-core mlahb:m4@10000000:m4_system_resources: bound mlahb:m4@10000000:m4_system_resources:timer@44000000 (ops 0xc0cfbd7c)
[70696.310675] rproc-srm-core mlahb:m4@10000000:m4_system_resources: bound mlahb:m4@10000000:m4_system_resources:spi@44005000 (ops 0xc0cfbd7c)
[70696.323216] rproc-srm-core mlahb:m4@10000000:m4_system_resources: bound mlahb:m4@10000000:m4_system_resources:dma@48001000 (ops 0xc0cfbd7c)
[70696.335975] rproc-srm-core mlahb:m4@10000000:m4_system_resources: bound mlahb:m4@10000000:m4_system_resources:adc@48003000 (ops 0xc0cfbd7c)
[70696.348405] rproc-srm-core mlahb:m4@10000000:m4_system_resources: bound mlahb:m4@10000000:m4_system_resources:hash@4c002000 (ops 0xc0cfbd7c)
[70696.361033] rproc-srm-core mlahb:m4@10000000:m4_system_resources: bound mlahb:m4@10000000:m4_system_resources:rng@4c003000 (ops 0xc0cfbd7c)
[70696.373738] rproc-srm-core mlahb:m4@10000000:m4_system_resources: bound mlahb:m4@10000000:m4_system_resources:crc@4c004000 (ops 0xc0cfbd7c)
[70696.386138] rproc-srm-core mlahb:m4@10000000:m4_system_resources: bound mlahb:m4@10000000:m4_system_resources:cryp@4c005000 (ops 0xc0cfbd7c)
[70696.398770] rproc-srm-core mlahb:m4@10000000:m4_system_resources: bound mlahb:m4@10000000:m4_system_resources:button (ops 0xc0cfbd7c)
[70696.410813] rproc-srm-core mlahb:m4@10000000:m4_system_resources: bound mlahb:m4@10000000:m4_system_resources:m4_led (ops 0xc0cfbd7c)
[70696.423010] mlahb:m4@10000000#vdev0buffer: assigned reserved memory node vdev0buffer@10042000
[70696.432182] virtio_rpmsg_bus virtio0: rpmsg host is online
[70696.437167] mlahb:m4@10000000#vdev0buffer: registered virtio0 (type 7)
[70696.443501] remoteproc remoteproc0: remote processor m4 is now up
[70696.449826] virtio_rpmsg_bus virtio0: creating channel rpmsg-tty-channel addr 0x0
```
This means that the firmware is loaded and the virtual tty port is mapped.
## Testing the firmware
When this example firmware loads then two new tty ports will be created in the Linux side,
which are `/dev/ttyRPMSG0` and `/dev/ttyRPMSG1`. Now to test that the firmware is working
properly run these commands on the Linux terminal (in this case we use only one port).
```sh
stty -onlcr -echo -F /dev/ttyRPMSG0
cat /dev/ttyRPMSG0 &
echo "start" >/dev/ttyRPMSG0
```
You should see something like that:
```
ADC[2.2]:4014,3706,4010,4006
ADC[2.1]:3986,3702,4022,4026
ADC[2.2]:3986,3702,4022,4026
ADC[2.1]:4018,3710,4006,4010
ADC[2.2]:4018,3710,4006,4010
```
## Debug serial port
The firmware also supports a debug UART on the CM4. This port is mapped to UART7 and the
Arduino connector pins. The pinmap is the following:
pin | Function
-|-
D0 | Rx
D1 | Tx
You can connect a USB-to-UART module to those pins and the GND and then open the tty port
on your host. The port supports 115200 baudrate. When the firmware loads on the CM4 then
you should see this messages:
```sh
[00000.008][INFO ]Cortex-M4 boot successful with STM32Cube FW version: v1.2.0
[00000.015][INFO ]Virtual UART0 OpenAMP-rpmsg channel creation
[00000.021][INFO ]Virtual UART1 OpenAMP-rpmsg channel creation
```
## Using the cmake template in Yocto
TBD
## License
Just MIT.
## Author
<NAME> <<EMAIL>><file_sep>/source/src_hal/main.c
/* USER CODE BEGIN Header */
/**
******************************************************************************
* @file OpenAMP/OpenAMP_TTY_echo/Inc/main.c
* @author MCD Application Team
* @brief Main program body.
******************************************************************************
* @attention
*
* <h2><center>© Copyright (c) 2019 STMicroelectronics.
* All rights reserved.</center></h2>
*
* This software component is licensed by ST under BSD 3-Clause license,
* the "License"; You may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
* opensource.org/licenses/BSD-3-Clause
*
******************************************************************************
*/
/* USER CODE END Header */
/* Includes ------------------------------------------------------------------*/
#include "main.h"
#include "stm32mp15xx_disco.h"
#include "stm32mp1xx_it.h"
/* Private includes ----------------------------------------------------------*/
/* USER CODE BEGIN Includes */
/* USER CODE END Includes */
/* Private typedef -----------------------------------------------------------*/
/* USER CODE BEGIN PTD */
/* USER CODE END PTD */
/* Private define ------------------------------------------------------------*/
/* USER CODE BEGIN PD */
#define MAX_BUFFER_SIZE RPMSG_BUFFER_SIZE
/* USER CODE END PD */
/* Private macro -------------------------------------------------------------*/
/* USER CODE BEGIN PM */
/* USER CODE END PM */
/* Private variables ---------------------------------------------------------*/
IPCC_HandleTypeDef hipcc;
TIM_HandleTypeDef htim2;
ADC_HandleTypeDef hadc1;
ADC_HandleTypeDef hadc2;
#define ADC_VALUE_BUFFER_SIZE 2
struct emontx_values_t {
__IO uint16_t AC[ADC_VALUE_BUFFER_SIZE];
__IO uint16_t CT1[ADC_VALUE_BUFFER_SIZE];
__IO uint16_t CT2[ADC_VALUE_BUFFER_SIZE];
__IO uint16_t CT3[ADC_VALUE_BUFFER_SIZE];
__IO uint16_t CT4[ADC_VALUE_BUFFER_SIZE];
};
struct emontx_values_t emontx_values;
#define ADC1_BUFFER_SIZE (ADC_VALUE_BUFFER_SIZE * 3)
#define ADC2_BUFFER_SIZE (ADC_VALUE_BUFFER_SIZE * 4)
uint16_t adc1_values[ADC1_BUFFER_SIZE]; // 2 channels on ADC1
uint16_t adc2_values[ADC2_BUFFER_SIZE]; // 3 channels on ADC2
struct adc_channel_t adc_channels[EMONTX_CH_NUM] = {
[EMONTX_CH_AC] = {
.adc = ADC2,
.channel = ADC_CHANNEL_6,
.port = GPIOF,
.pin = GPIO_PIN_14,
},
[EMONTX_CH_CT1] = {
.adc = ADC2,
.channel = ADC_CHANNEL_2,
.port = GPIOF,
.pin = GPIO_PIN_13,
},
[EMONTX_CH_CT2] = {
.adc = ADC2,
.channel = ADC_CHANNEL_0,
.port = GPIOA,
.pin = GPIO_PIN_0,
},
[EMONTX_CH_CT3] = {
.adc = ADC2,
.channel = ADC_CHANNEL_1,
.port = GPIOA,
.pin = GPIO_PIN_1,
},
// [EMONTX_CH_CT4] = {
// .adc = ADC1,
// .channel = ADC_CHANNEL_13,
// .port = GPIOC,
// .pin = GPIO_PIN_3,
// },
};
struct adc_dev_t adc_dev[NUMBER_OF_ADCS] = {
[0] = {
.adc = ADC2,
.adc_irqn = ADC2_IRQn,
.adc_irq_handler = &ADC2_IRQHandler,
.dma_stream = DMA2_Stream1,
.dma_stream_irqn = DMA2_Stream1_IRQn,
.stream_irq_handler = &DMA2_Stream1_IRQHandler,
},
[1] = {
.adc = ADC1,
.adc_irqn = ADC1_IRQn,
.adc_irq_handler = &ADC1_IRQHandler,
.dma_stream = DMA2_Stream2,
.dma_stream_irqn = DMA2_Stream2_IRQn,
.stream_irq_handler = &DMA2_Stream2_IRQHandler,
},
};
/* Variable to report status of DMA transfer of ADC group regular conversions */
/* 0: DMA transfer is not completed */
/* 1: DMA transfer is completed */
/* 2: DMA transfer has not yet been started yet (initial state) */
__IO uint8_t ubDmaTransferStatus = 2; /* Variable set into DMA interruption callback */
enum { PREAMBLE=0xABCD, };
#pragma pack(1)
struct packet {
uint16_t preamble;
uint16_t length;
uint16_t crc16;
};
typedef void (*VIRT_UART_RxCpltCallback)(VIRT_UART_HandleTypeDef *huart);
void VIRT_UART0_RxCpltCallback(VIRT_UART_HandleTypeDef *huart);
void VIRT_UART1_RxCpltCallback(VIRT_UART_HandleTypeDef *huart);
// uint8_t VirtUart0ChannelBuffRx[MAX_BUFFER_SIZE];
uint8_t VirtUart0ChannelBuffTx[MAX_BUFFER_SIZE];
// uint8_t VirtUart1ChannelBuffRx[MAX_BUFFER_SIZE];
uint8_t VirtUart1ChannelBuffTx[MAX_BUFFER_SIZE];
struct virt_uart {
VIRT_UART_HandleTypeDef huart;
__IO FlagStatus rx_status;
__IO uint8_t *rx_buffer;
__IO uint16_t rx_size;
__IO FlagStatus tx_status;
__IO uint8_t *tx_buffer;
__IO uint16_t tx_size;
VIRT_UART_RxCpltCallback cbk;
};
struct virt_uart virt_uart0 = {
.rx_status = RESET,
.rx_buffer = NULL,
.rx_size = 0,
.tx_status = RESET,
.tx_buffer = VirtUart0ChannelBuffTx,
.tx_size = 0,
.cbk = VIRT_UART0_RxCpltCallback,
};
__IO uint16_t virt_uart0_expected_nbytes = 0;
struct virt_uart virt_uart1 = {
.rx_status = RESET,
.rx_buffer = NULL,
.rx_size = 0,
.tx_status = RESET,
.tx_buffer = VirtUart1ChannelBuffTx,
.tx_size = 0,
.cbk = VIRT_UART1_RxCpltCallback,
};
__IO uint16_t virt_uart1_expected_nbytes = 0;
/* USER CODE BEGIN PV */
/* USER CODE END PV */
/* Private function prototypes -----------------------------------------------*/
void SystemClock_Config(void);
static void MX_IPCC_Init(void);
static void MX_GPIO_Init(void);
static void MX_TIM2_Init(void);
/* USER CODE BEGIN PFP */
static void Configure_ADC(void);
/* USER CODE END PFP */
/* Private user code ---------------------------------------------------------*/
/* USER CODE BEGIN 0 */
/* USER CODE END 0 */
/**
* @brief The application entry point.
* @retval int
*/
int main(void)
{
/* USER CODE BEGIN 1 */
/* USER CODE END 1 */
/* MCU Configuration--------------------------------------------------------*/
/* Reset of all peripherals, Initialize the Systick. */
HAL_Init();
/* USER CODE BEGIN Init */
if(IS_ENGINEERING_BOOT_MODE())
{
/* Configure the system clock */
SystemClock_Config();
}
/* USER CODE END Init */
/*HW semaphore Clock enable*/
__HAL_RCC_HSEM_CLK_ENABLE();
if(IS_ENGINEERING_BOOT_MODE())
{
/* Configure PMIC */
BSP_PMIC_Init();
BSP_PMIC_InitRegulators();
/* Configure VREFBUF */
__HAL_RCC_VREF_CLK_ENABLE();
HAL_SYSCFG_VREFBUF_HighImpedanceConfig(SYSCFG_VREFBUF_HIGH_IMPEDANCE_DISABLE);
HAL_SYSCFG_EnableVREFBUF();
}
/* IPCC initialisation */
MX_IPCC_Init();
/* OpenAmp initialisation ---------------------------------*/
MX_OPENAMP_Init(RPMSG_REMOTE, NULL);
MX_GPIO_Init();
MX_TIM2_Init();
COM_InitTypeDef uart_init;
uart_init.BaudRate = 115200;
uart_init.Parity = UART_PARITY_NONE;
uart_init.StopBits = UART_STOPBITS_1;
uart_init.WordLength = UART_WORDLENGTH_8B;
uart_init.HwFlowCtl = UART_HWCONTROL_NONE;
BSP_COM_Init(COM2, &uart_init);
BSP_COM_SelectLogPort(COM2);
log_info("Cortex-M4 boot successful with STM32Cube FW version: v%ld.%ld.%ld \r\n",
((HAL_GetHalVersion() >> 24) & 0x000000FF),
((HAL_GetHalVersion() >> 16) & 0x000000FF),
((HAL_GetHalVersion() >> 8) & 0x000000FF));
log_info("MAX_BUFFER_SIZE: %d\n", MAX_BUFFER_SIZE);
/* USER CODE BEGIN SysInit */
/* USER CODE END SysInit */
/* Initialize all configured peripherals */
/* USER CODE BEGIN 2 */
/*
* Create Virtual UART device
* defined by a rpmsg channel attached to the remote device
*/
log_info("Virtual UART0 OpenAMP-rpmsg channel creation\r\n");
if (VIRT_UART_Init(&virt_uart0.huart) != VIRT_UART_OK) {
log_err("VIRT_UART_Init UART0 failed.\r\n");
Error_Handler(__FILE__, __LINE__);
}
log_info("Virtual UART1 OpenAMP-rpmsg channel creation\r\n");
if (VIRT_UART_Init(&virt_uart1.huart) != VIRT_UART_OK) {
log_err("VIRT_UART_Init UART1 failed.\r\n");
Error_Handler(__FILE__, __LINE__);
}
/*Need to register callback for message reception by channels*/
if(VIRT_UART_RegisterCallback(&virt_uart0.huart, VIRT_UART_RXCPLT_CB_ID, virt_uart0.cbk) != VIRT_UART_OK)
{
Error_Handler(__FILE__, __LINE__);
}
if(VIRT_UART_RegisterCallback(&virt_uart1.huart, VIRT_UART_RXCPLT_CB_ID, virt_uart1.cbk) != VIRT_UART_OK)
{
Error_Handler(__FILE__, __LINE__);
}
for (int i = 0; i < ADC_VALUE_BUFFER_SIZE; i++)
{
emontx_values.AC[i] = 0;
emontx_values.CT1[i] = 0;
emontx_values.CT2[i] = 0;
emontx_values.CT3[i] = 0;
emontx_values.CT4[i] = 0;
}
/* Enable GPIOA clock */
__HAL_RCC_GPIOH_CLK_ENABLE();
/* Configure PH.6 pin as output */
GPIO_InitTypeDef GPIO_InitStruct;
GPIO_InitStruct.Mode = GPIO_MODE_OUTPUT_PP;
GPIO_InitStruct.Pull = GPIO_NOPULL;
GPIO_InitStruct.Pin = GPIO_PIN_6;
PERIPH_LOCK(GPIOH);
HAL_GPIO_Init(GPIOH, &GPIO_InitStruct);
HAL_GPIO_WritePin(GPIOH, GPIO_PIN_6, GPIO_PIN_SET);
PERIPH_UNLOCK(GPIOH);
/* Configure ADC */
/* Note: This function configures the ADC but does not enable it. */
/* Only ADC internal voltage regulator is enabled by function */
/* "HAL_ADC_Init()". */
/* To activate ADC (ADC enable and ADC conversion start), use */
/* function "HAL_ADC_Start_xxx()". */
/* This is intended to optimize power consumption: */
/* 1. ADC configuration can be done once at the beginning */
/* (ADC disabled, minimal power consumption) */
/* 2. ADC enable (higher power consumption) can be done just before */
/* ADC conversions needed. */
/* Then, possible to perform successive ADC activation and */
/* deactivation without having to set again ADC configuration. */
Configure_ADC();
/* Run the ADC linear calibration in single-ended mode */
if (HAL_ADCEx_Calibration_Start(&hadc1,ADC_CALIB_OFFSET_LINEARITY, ADC_SINGLE_ENDED) != HAL_OK)
{
/* Calibration Error */
Error_Handler(__FILE__, __LINE__);
}
if (HAL_ADCEx_Calibration_Start(&hadc2,ADC_CALIB_OFFSET_LINEARITY, ADC_SINGLE_ENDED) != HAL_OK)
{
/* Calibration Error */
Error_Handler(__FILE__, __LINE__);
}
/* USER CODE END 2 */
/* Infinite loop */
/* USER CODE BEGIN WHILE */
/*## Enable Timer ########################################################*/
if (HAL_TIM_Base_Start(&htim2) != HAL_OK)
{
/* Counter enable error */
Error_Handler(__FILE__, __LINE__);
}
/*## Start ADC conversions ###############################################*/
/* Start ADC group regular conversion with DMA */
// if (HAL_ADC_Start_DMA(&hadc1,
// (uint32_t *)adc1_values,
// ADC1_BUFFER_SIZE
// ) != HAL_OK)
// {
// /* ADC conversion start error */
// Error_Handler(__FILE__, __LINE__);
// }
if (HAL_ADC_Start_DMA(&hadc2,
(uint32_t *)adc2_values,
ADC2_BUFFER_SIZE
) != HAL_OK)
{
/* ADC conversion start error */
Error_Handler(__FILE__, __LINE__);
}
log_info("Started ADC/DMA\n");
/* USER CODE END 2 */
/* Infinite loop */
/* USER CODE BEGIN WHILE */
while (1)
{
OPENAMP_check_for_message();
/* USER CODE END WHILE */
if (virt_uart0.tx_status) {
virt_uart0.tx_status = RESET;
VIRT_UART_Transmit(&virt_uart0.huart, (uint8_t*) virt_uart0.tx_buffer, virt_uart0.tx_size);
}
if (virt_uart1.tx_status) {
virt_uart1.tx_status = RESET;
VIRT_UART_Transmit(&virt_uart1.huart, (uint8_t*) virt_uart1.tx_buffer, virt_uart1.tx_size);
}
/* USER CODE BEGIN 3 */
}
/* USER CODE END 3 */
}
/**
* @brief System Clock Configuration
* @retval None
*/
void SystemClock_Config(void)
{
RCC_OscInitTypeDef RCC_OscInitStruct = {0};
RCC_ClkInitTypeDef RCC_ClkInitStruct = {0};
/**Configure LSE Drive Capability
*/
HAL_PWR_EnableBkUpAccess();
__HAL_RCC_LSEDRIVE_CONFIG(RCC_LSEDRIVE_MEDIUMHIGH);
/**Initializes the CPU, AHB and APB busses clocks
*/
RCC_OscInitStruct.OscillatorType = RCC_OSCILLATORTYPE_HSI|RCC_OSCILLATORTYPE_HSE
|RCC_OSCILLATORTYPE_LSE;
RCC_OscInitStruct.HSEState = RCC_HSE_BYPASS_DIG;
RCC_OscInitStruct.LSEState = RCC_LSE_ON;
RCC_OscInitStruct.HSIState = RCC_HSI_ON;
RCC_OscInitStruct.HSICalibrationValue = 16;
RCC_OscInitStruct.HSIDivValue = RCC_HSI_DIV1;
/**PLL1 Config
*/
RCC_OscInitStruct.PLL.PLLState = RCC_PLL_ON;
RCC_OscInitStruct.PLL.PLLSource = RCC_PLL12SOURCE_HSE;
RCC_OscInitStruct.PLL.PLLM = 3;
RCC_OscInitStruct.PLL.PLLN = 81;
RCC_OscInitStruct.PLL.PLLP = 1;
RCC_OscInitStruct.PLL.PLLQ = 1;
RCC_OscInitStruct.PLL.PLLR = 1;
RCC_OscInitStruct.PLL.PLLFRACV = 0x800;
RCC_OscInitStruct.PLL.PLLMODE = RCC_PLL_FRACTIONAL;
RCC_OscInitStruct.PLL.RPDFN_DIS = RCC_RPDFN_DIS_DISABLED;
RCC_OscInitStruct.PLL.TPDFN_DIS = RCC_TPDFN_DIS_DISABLED;
/**PLL2 Config
*/
RCC_OscInitStruct.PLL2.PLLState = RCC_PLL_ON;
RCC_OscInitStruct.PLL2.PLLSource = RCC_PLL12SOURCE_HSE;
RCC_OscInitStruct.PLL2.PLLM = 3;
RCC_OscInitStruct.PLL2.PLLN = 66;
RCC_OscInitStruct.PLL2.PLLP = 2;
RCC_OscInitStruct.PLL2.PLLQ = 1;
RCC_OscInitStruct.PLL2.PLLR = 1;
RCC_OscInitStruct.PLL2.PLLFRACV = 0x1400;
RCC_OscInitStruct.PLL2.PLLMODE = RCC_PLL_FRACTIONAL;
RCC_OscInitStruct.PLL2.RPDFN_DIS = RCC_RPDFN_DIS_DISABLED;
RCC_OscInitStruct.PLL2.TPDFN_DIS = RCC_TPDFN_DIS_DISABLED;
/**PLL3 Config
*/
RCC_OscInitStruct.PLL3.PLLState = RCC_PLL_ON;
RCC_OscInitStruct.PLL3.PLLSource = RCC_PLL3SOURCE_HSE;
RCC_OscInitStruct.PLL3.PLLM = 2;
RCC_OscInitStruct.PLL3.PLLN = 34;
RCC_OscInitStruct.PLL3.PLLP = 2;
RCC_OscInitStruct.PLL3.PLLQ = 17;
RCC_OscInitStruct.PLL3.PLLR = 37;
RCC_OscInitStruct.PLL3.PLLRGE = RCC_PLL3IFRANGE_1;
RCC_OscInitStruct.PLL3.PLLFRACV = 0x1A04;
RCC_OscInitStruct.PLL3.PLLMODE = RCC_PLL_FRACTIONAL;
RCC_OscInitStruct.PLL3.RPDFN_DIS = RCC_RPDFN_DIS_DISABLED;
RCC_OscInitStruct.PLL3.TPDFN_DIS = RCC_TPDFN_DIS_DISABLED;
/**PLL4 Config
*/
RCC_OscInitStruct.PLL4.PLLState = RCC_PLL_ON;
RCC_OscInitStruct.PLL4.PLLSource = RCC_PLL4SOURCE_HSE;
RCC_OscInitStruct.PLL4.PLLM = 4;
RCC_OscInitStruct.PLL4.PLLN = 99;
RCC_OscInitStruct.PLL4.PLLP = 6;
RCC_OscInitStruct.PLL4.PLLQ = 8;
RCC_OscInitStruct.PLL4.PLLR = 8;
RCC_OscInitStruct.PLL4.PLLRGE = RCC_PLL4IFRANGE_0;
RCC_OscInitStruct.PLL4.PLLFRACV = 0;
RCC_OscInitStruct.PLL4.PLLMODE = RCC_PLL_INTEGER;
RCC_OscInitStruct.PLL4.RPDFN_DIS = RCC_RPDFN_DIS_DISABLED;
RCC_OscInitStruct.PLL4.TPDFN_DIS = RCC_TPDFN_DIS_DISABLED;
if (HAL_RCC_OscConfig(&RCC_OscInitStruct) != HAL_OK)
{
Error_Handler(__FILE__, __LINE__);
}
/**RCC Clock Config
*/
RCC_ClkInitStruct.ClockType = RCC_CLOCKTYPE_HCLK|RCC_CLOCKTYPE_ACLK
|RCC_CLOCKTYPE_PCLK1|RCC_CLOCKTYPE_PCLK2
|RCC_CLOCKTYPE_PCLK3|RCC_CLOCKTYPE_PCLK4
|RCC_CLOCKTYPE_PCLK5|RCC_CLOCKTYPE_MPU;
RCC_ClkInitStruct.MPUInit.MPU_Clock = RCC_MPUSOURCE_PLL1;
RCC_ClkInitStruct.MPUInit.MPU_Div = RCC_MPU_DIV2;
RCC_ClkInitStruct.AXISSInit.AXI_Clock = RCC_AXISSOURCE_PLL2;
RCC_ClkInitStruct.AXISSInit.AXI_Div = RCC_AXI_DIV1;
RCC_ClkInitStruct.MCUInit.MCU_Clock = RCC_MCUSSOURCE_PLL3;
RCC_ClkInitStruct.MCUInit.MCU_Div = RCC_MCU_DIV1;
RCC_ClkInitStruct.APB4_Div = RCC_APB4_DIV2;
RCC_ClkInitStruct.APB5_Div = RCC_APB5_DIV4;
RCC_ClkInitStruct.APB1_Div = RCC_APB1_DIV2;
RCC_ClkInitStruct.APB2_Div = RCC_APB2_DIV2;
RCC_ClkInitStruct.APB3_Div = RCC_APB3_DIV2;
if (HAL_RCC_ClockConfig(&RCC_ClkInitStruct) != HAL_OK)
{
Error_Handler(__FILE__, __LINE__);
}
/**Set the HSE division factor for RTC clock
*/
__HAL_RCC_RTC_HSEDIV(24);
}
/**
* @brief IPPC Initialization Function
* @param None
* @retval None
*/
static void MX_IPCC_Init(void)
{
hipcc.Instance = IPCC;
if (HAL_IPCC_Init(&hipcc) != HAL_OK)
{
Error_Handler(__FILE__, __LINE__);
}
}
/* TIM2 init function */
static void MX_TIM2_Init(void)
{
TIM_ClockConfigTypeDef sClockSourceConfig;
TIM_MasterConfigTypeDef sMasterConfig;
htim2.Instance = TIM2;
htim2.Init.Prescaler = 100;
htim2.Init.CounterMode = TIM_COUNTERMODE_UP;
htim2.Init.Period = 97999;
htim2.Init.ClockDivision = TIM_CLOCKDIVISION_DIV4;
htim2.Init.AutoReloadPreload = TIM_AUTORELOAD_PRELOAD_DISABLE;
if (HAL_TIM_Base_Init(&htim2) != HAL_OK)
{
Error_Handler(__FILE__, __LINE__);
}
sClockSourceConfig.ClockSource = TIM_CLOCKSOURCE_INTERNAL;
if (HAL_TIM_ConfigClockSource(&htim2, &sClockSourceConfig) != HAL_OK)
{
Error_Handler(__FILE__, __LINE__);
}
sMasterConfig.MasterOutputTrigger = TIM_TRGO_UPDATE;
sMasterConfig.MasterSlaveMode = TIM_MASTERSLAVEMODE_DISABLE;
if (HAL_TIMEx_MasterConfigSynchronization(&htim2, &sMasterConfig) != HAL_OK)
{
Error_Handler(__FILE__, __LINE__);
}
}
static void MX_GPIO_Init(void)
{
/* GPIO Ports Clock Enable */
__HAL_RCC_GPIOF_CLK_ENABLE();
}
/* USER CODE BEGIN 4 */
void VIRT_UART0_RxCpltCallback(VIRT_UART_HandleTypeDef *huart)
{
/* copy received msg in a variable to sent it back to master processor in main infinite loop*/
uint16_t recv_size = huart->RxXferSize < MAX_BUFFER_SIZE? huart->RxXferSize : MAX_BUFFER_SIZE-1;
struct packet* in = (struct packet*) &huart->pRxBuffPtr[0];
if (in->preamble == PREAMBLE) {
in->preamble = 0;
virt_uart0_expected_nbytes = in->length;
log_info("length: %d\n", virt_uart0_expected_nbytes);
}
virt_uart0.rx_size += recv_size;
log_info("UART0: %d/%d\n", virt_uart0.rx_size, virt_uart0_expected_nbytes);
if (virt_uart0.rx_size >= virt_uart0_expected_nbytes) {
virt_uart0.rx_size = 0;
virt_uart0.tx_buffer[0] = virt_uart0_expected_nbytes & 0xff;
virt_uart0.tx_buffer[1] = (virt_uart0_expected_nbytes >> 8) & 0xff;
log_info("UART0 resp: %d\n", virt_uart0_expected_nbytes);
virt_uart0_expected_nbytes = 0;
virt_uart0.tx_size = 2;
virt_uart0.tx_status = SET;
// huart->RxXferSize = 0;
}
}
void VIRT_UART1_RxCpltCallback(VIRT_UART_HandleTypeDef *huart)
{
/* copy received msg in a variable to sent it back to master processor in main infinite loop*/
uint16_t recv_size = huart->RxXferSize < MAX_BUFFER_SIZE? huart->RxXferSize : MAX_BUFFER_SIZE-1;
struct packet* in = (struct packet*) &huart->pRxBuffPtr[0];
if (in->preamble == PREAMBLE) {
}
virt_uart1.rx_size += recv_size;
log_info("UART0: %d/%d\n", recv_size, virt_uart1.rx_size);
if (virt_uart1.rx_size >= 512) {
virt_uart1.tx_buffer[1] = (virt_uart1.rx_size >> 8) & 0xff;
virt_uart1.tx_buffer[0] = virt_uart1.rx_size & 0xff;
virt_uart1.rx_size = 0;
virt_uart1.tx_size = 2;
virt_uart1.tx_status = SET;
}
}
/**
* @brief Configure ADC (ADC instance: ADCx) and GPIO used by ADC channels.
* Configuration of GPIO:
* - Pin: PA.04 (on this STM32 device, ADC2 channel 16 is mapped on this GPIO)
* - Mode: analog
* Configuration of ADC:
* - Common to several ADC:
* - Conversion clock: Synchronous from PCLK
* - Internal path: None (default configuration from reset state)
* - Multimode
* Feature not used: all parameters let to default configuration from reset state
* - Mode Independent (default configuration from reset state)
* - DMA transfer: Disabled (default configuration from reset state)
* - Delay sampling phases 1 ADC clock cycle (default configuration from reset state)
* - ADC instance
* - Resolution: 12 bits (default configuration from reset state)
* - Data alignment: right aligned (default configuration from reset state)
* - Low power mode: disabled (default configuration from reset state)
* - Offset: none (default configuration from reset state)
* - Group regular
* - Trigger source: SW start
* - Trigger edge: not applicable with SW start
* - Continuous mode: single conversion (default configuration from reset state)
* - DMA transfer: enabled, unlimited requests
* - Overrun: data overwritten
* - Sequencer length: disabled: 1 rank (default configuration from reset state)
* - Sequencer discont: disabled: sequence done in 1 scan (default configuration from reset state)
* - Sequencer rank 1: ADCx ADCx_CHANNELa
* - Group injected
* Feature not used: all parameters let to default configuration from reset state
* - Trigger source: SW start (default configuration from reset state)
* - Trigger edge: not applicable with SW start
* - Auto injection: disabled (default configuration from reset state)
* - Contexts queue: disabled (default configuration from reset state)
* - Sequencer length: disabled: 1 rank (default configuration from reset state)
* - Sequencer discont: disabled: sequence done in 1 scan (default configuration from reset state)
* - Sequencer rank 1: first channel available (default configuration from reset state)
* - Channel
* - Sampling time: ADCx ADCx_CHANNELa set to sampling time 160.5 ADC clock cycles (on this STM32 serie, sampling time is channel wise)
* - Differential mode: single ended (default configuration from reset state)
* - Analog watchdog
* Feature not used: all parameters let to default configuration from reset state
* - AWD number: 1
* - Monitored channels: none (default configuration from reset state)
* - Threshold high: 0x000 (default configuration from reset state)
* - Threshold low: 0xFFF (default configuration from reset state)
* - Oversampling
* Feature not used: all parameters let to default configuration from reset state
* - Scope: none (default configuration from reset state)
* - Discontinuous mode: disabled (default configuration from reset state)
* - Ratio: 2 (default configuration from reset state)
* - Shift: none (default configuration from reset state)
* - Interruptions
* None: with HAL driver, ADC interruptions are set using
* function "HAL_ADC_start_xxx()".
* @note Using HAL driver, configuration of GPIO used by ADC channels,
* NVIC and clock source at top level (RCC)
* are not implemented into this function,
* must be implemented into function "HAL_ADC_MspInit()".
* @param None
* @retval None
*/
__STATIC_INLINE void Configure_ADC(void)
{
ADC_ChannelConfTypeDef sConfig;
/*## Configuration of ADC ##################################################*/
/*## Configuration of ADC hierarchical scope: ##############################*/
/*## common to several ADC, ADC instance, ADC group regular ###############*/
log_info("Starting ADC configuration...\n");
/* Set ADC instance of HAL ADC handle hadc */
hadc1.Instance = ADC1;
/* Configuration of HAL ADC handle init structure: */
/* parameters of scope ADC instance and ADC group regular. */
/* Note: On this STM32 serie, ADC group regular sequencer is */
/* fully configurable: sequencer length and each rank */
/* affectation to a channel are configurable. */
hadc1.Init.ClockPrescaler = ADC_CLOCK_SYNC_PCLK_DIV2;
hadc1.Init.Resolution = ADC_RESOLUTION_12B;
hadc1.Init.ScanConvMode = ADC_SCAN_ENABLE; /* Sequencer disabled (ADC conversion on only 1 channel: channel set on rank 1) */
hadc1.Init.EOCSelection = ADC_EOC_SINGLE_CONV;
hadc1.Init.LowPowerAutoWait = DISABLE;
hadc1.Init.ContinuousConvMode = DISABLE; /* Continuous mode disabled to have only 1 conversion at each conversion trig */
hadc1.Init.NbrOfConversion = 3; /* Parameter discarded because sequencer is disabled */
hadc1.Init.DiscontinuousConvMode = ENABLE; /* Parameter discarded because sequencer is disabled */
hadc1.Init.NbrOfDiscConversion = 1; /* Parameter discarded because sequencer is disabled */
hadc1.Init.ExternalTrigConv = ADC_EXTERNALTRIG_T2_TRGO; /* Trig of conversion start done by external event */
hadc1.Init.ExternalTrigConvEdge = ADC_EXTERNALTRIGCONVEDGE_RISING; /* Parameter discarded because trig of conversion by software start (no external event) */
hadc1.Init.ConversionDataManagement = ADC_CONVERSIONDATA_DMA_CIRCULAR;
hadc1.Init.Overrun = ADC_OVR_DATA_OVERWRITTEN;
hadc1.Init.OversamplingMode = DISABLE;
// if (HAL_ADC_DeInit(&hadc1) != HAL_OK)
// {
// /* ADC Deinitialization error */
// Error_Handler(__FILE__, __LINE__);
// }
// if (HAL_ADC_Init(&hadc1) != HAL_OK)
// {
// /* ADC initialization error */
// Error_Handler(__FILE__, __LINE__);
// }
memcpy(&hadc2, &hadc1, sizeof(ADC_HandleTypeDef));
hadc2.Instance = ADC2;
hadc2.Init.NbrOfConversion = 4;
if (HAL_ADC_DeInit(&hadc2) != HAL_OK)
{
/* ADC Deinitialization error */
Error_Handler(__FILE__, __LINE__);
}
if (HAL_ADC_Init(&hadc2) != HAL_OK)
{
/* ADC initialization error */
Error_Handler(__FILE__, __LINE__);
}
/*## Configuration of ADC hierarchical scope: ##############################*/
/*## ADC group injected and channels mapped on group injected ##############*/
/* Note: ADC group injected not used and not configured in this example. */
/* Refer to other ADC examples using this feature. */
/* Note: Call of the functions below are commented because they are */
/* useless in this example: */
/* setting corresponding to default configuration from reset state. */
/*## Configuration of ADC hierarchical scope: ##############################*/
/*## channels mapped on group regular ##############################*/
/* Configuration of channel on ADCx regular group on sequencer rank 1 */
/* Note: On this STM32 serie, ADC group regular sequencer is */
/* fully configurable: sequencer length and each rank */
/* affectation to a channel are configurable. */
/* Note: Considering IT occurring after each ADC conversion */
/* (IT by ADC group regular end of unitary conversion), */
/* select sampling time and ADC clock with sufficient */
/* duration to not create an overhead situation in IRQHandler. */
sConfig.SamplingTime = ADC_SAMPLETIME_810CYCLES_5; /* ADC channel sampling time */
sConfig.SingleDiff = ADC_SINGLE_ENDED; /* ADC channel differential mode */
sConfig.OffsetNumber = ADC_OFFSET_NONE; /* ADC channel affected to offset number */
sConfig.Offset = 0; /* Parameter discarded because offset correction is disabled */
sConfig.Rank = ADC_REGULAR_RANK_1; /* ADC group regular rank in which is mapped the selected ADC channel */
sConfig.Channel = adc_channels[EMONTX_CH_AC].channel; /* ADC channel selection */
if (HAL_ADC_ConfigChannel(&hadc2, &sConfig) != HAL_OK)
{
/* Channel Configuration Error */
Error_Handler(__FILE__, __LINE__);
}
sConfig.Rank = ADC_REGULAR_RANK_2; /* ADC group regular rank in which is mapped the selected ADC channel */
sConfig.Channel = adc_channels[EMONTX_CH_CT1].channel; /* ADC channel selection */
if (HAL_ADC_ConfigChannel(&hadc2, &sConfig) != HAL_OK)
{
/* Channel Configuration Error */
Error_Handler(__FILE__, __LINE__);
}
sConfig.Rank = ADC_REGULAR_RANK_3; /* ADC group regular rank in which is mapped the selected ADC channel */
sConfig.Channel = adc_channels[EMONTX_CH_CT2].channel; /* ADC channel selection */
if (HAL_ADC_ConfigChannel(&hadc2, &sConfig) != HAL_OK)
{
/* Channel Configuration Error */
Error_Handler(__FILE__, __LINE__);
}
sConfig.Rank = ADC_REGULAR_RANK_4; /* ADC group regular rank in which is mapped the selected ADC channel */
sConfig.Channel = adc_channels[EMONTX_CH_CT3].channel; /* ADC channel selection */
if (HAL_ADC_ConfigChannel(&hadc2, &sConfig) != HAL_OK)
{
/* Channel Configuration Error */
Error_Handler(__FILE__, __LINE__);
}
// sConfig.Rank = ADC_REGULAR_RANK_3; /* ADC group regular rank in which is mapped the selected ADC channel */
// sConfig.Channel = adc_channels[EMONTX_CH_CT4].channel; /* ADC channel selection */
// if (HAL_ADC_ConfigChannel(&hadc1, &sConfig) != HAL_OK)
// {
// /* Channel Configuration Error */
// Error_Handler(__FILE__, __LINE__);
// }
log_info("ADC configuration done...\n");
/*## Configuration of ADC hierarchical scope: multimode ####################*/
/* Note: ADC multimode not used and not configured in this example. */
/* Refer to other ADC examples using this feature. */
/*## Configuration of ADC transversal scope: analog watchdog ###############*/
/* Note: ADC analog watchdog not used and not configured in this example. */
/* Refer to other ADC examples using this feature. */
/*## Configuration of ADC transversal scope: oversampling ##################*/
/* Note: ADC oversampling not used and not configured in this example. */
/* Refer to other ADC examples using this feature. */
}
/**
* @brief Conversion complete callback in non blocking mode
* @param hadc: ADC handle
* @note This example shows a simple way to report end of conversion
* and get conversion result. You can add your own implementation.
* @retval None
*/
void HAL_ADC_ConvCpltCallback(ADC_HandleTypeDef *hadc)
{
/* Update status variable of DMA transfer */
ubDmaTransferStatus = 1;
/* Set LED depending on DMA transfer status */
/* - Turn-on if DMA transfer is completed */
/* - Turn-off if DMA transfer is not completed */
BSP_LED_On(LED7);
if (hadc->Instance == ADC1) {
sprintf((char*)virt_uart0.tx_buffer, "ADC[1.2]:%d,%d,%d,%d\n",
adc2_values[0], adc2_values[1], adc2_values[2], adc2_values[3]);
printf((char*)virt_uart0.tx_buffer);
}
else if (hadc->Instance == ADC2) {
sprintf((char*)virt_uart0.tx_buffer, "ADC[2.2]:%d,%d,%d,%d\n",
adc2_values[0], adc2_values[1], adc2_values[2], adc2_values[3]);
printf((char*)virt_uart0.tx_buffer);
}
virt_uart0.rx_size = 0;
virt_uart0_expected_nbytes = 0;
virt_uart0.tx_size = strlen((char*)virt_uart0.tx_buffer);
virt_uart0.tx_status = SET;
}
/**
* @brief Conversion DMA half-transfer callback in non blocking mode
* @note This example shows a simple way to report end of conversion
* and get conversion result. You can add your own implementation.
* @retval None
*/
void HAL_ADC_ConvHalfCpltCallback(ADC_HandleTypeDef *hadc)
{
/* Update status variable of DMA transfer */
ubDmaTransferStatus = 0;
/* Set LED depending on DMA transfer status */
/* - Turn-on if DMA transfer is completed */
/* - Turn-off if DMA transfer is not completed */
BSP_LED_Off(LED7);
if (hadc->Instance == ADC1) {
sprintf((char*)virt_uart0.tx_buffer, "ADC[1.1]:%d,%d,%d,%d\n",
adc2_values[0], adc2_values[1], adc2_values[2], adc2_values[3]);
printf((char*)virt_uart0.tx_buffer);
}
else if (hadc->Instance == ADC2) {
sprintf((char*)virt_uart0.tx_buffer, "ADC[2.1]:%d,%d,%d,%d\n",
adc2_values[0], adc2_values[1], adc2_values[2], adc2_values[3]);
printf((char*)virt_uart0.tx_buffer);
}
virt_uart0.rx_size = 0;
virt_uart0_expected_nbytes = 0;
virt_uart0.tx_size = strlen((char*)virt_uart0.tx_buffer);
virt_uart0.tx_status = SET;
}
/**
* @brief ADC error callback in non blocking mode
* (ADC conversion with interruption or transfer by DMA)
* @param hadc: ADC handle
* @retval None
*/
void HAL_ADC_ErrorCallback(ADC_HandleTypeDef *hadc)
{
/* In case of ADC error, call main error handler */
Error_Handler(__FILE__, __LINE__);
}
/* USER CODE END 4 */
/**
* @brief This function is executed in case of error occurrence.
* @param file: The file name as string.
* @param line: The line in file as a number.
* @retval None
*/
void Error_Handler(char * file, int line)
{
/* USER CODE BEGIN Error_Handler_Debug */
printf("Error_Handler: %s:%d", file, line);
while(1)
{
/* Toggle LED7 */
BSP_LED_Off(LED7);
HAL_Delay(800);
BSP_LED_On(LED7);
HAL_Delay(10);
BSP_LED_Off(LED7);
HAL_Delay(180);
BSP_LED_On(LED7);
HAL_Delay(10);
}
/* USER CODE END Error_Handler_Debug */
}
#ifdef USE_FULL_ASSERT
/**
* @brief Reports the name of the source file and the source line number
* where the assert_param error has occurred.
* @param file: pointer to the source file name
* @param line: assert_param error line source number
* @retval None
*/
void assert_failed(uint8_t* file, uint32_t line)
{
/* USER CODE BEGIN 6 */
log_err("OOOps: file %s, line %d\r\n", __FILE__, __LINE__);
/* USER CODE END 6 */
}
#endif /* USE_FULL_ASSERT */
/************************ (C) COPYRIGHT STMicroelectronics *****END OF FILE****/
|
7965daf9b61b3bfb11b44d419e1bc2cd4e6d8a91
|
[
"Markdown",
"C"
] | 3
|
C
|
hongshui3000/stm32mp1-rpmsg-adcsampler
|
16bba2e1f56e15e936638232a81bbeca6b1c260a
|
9ecfb680baa0d3f4e286d8e0e869d638a1806563
|
refs/heads/master
|
<file_sep>#/bin/python
This is a test line
|
fae598147b44e3d47309518cf44cb40568678787
|
[
"Python"
] | 1
|
Python
|
Softlexicon/Hello-World
|
3ea567399927aa1f2c49a8b689a126616c1ca6c9
|
ae775e74dedf6a748eeda28d9e016f4245a174c4
|
refs/heads/main
|
<repo_name>jerryshadb/AoC2020<file_sep>/advent_of_code/T3/t3.py
#part one
#For-loopilla mennään inputin loppua kohti, aina pykälä kerrallaan (parametri "down").
#tarkastetaan, onko input-listan kohdassa sijainti puuta. Jos on, inkrementoidaan puulaskuri. Sitten lisätään sijaintiin 3 (parametri "right") eli montako pykälää mennään "mäessä" oikealle
#jatketaan tätä niin kauan, kunnes lista on käyty läpi. ts "mäki laskettu". Lopussa tarkastetaan vielä jäljelle jäänyt kohta.
# eli mennään siis yksi alas, kolme oikalle. yksi alas, kolme oikealle kunnes koko mäki on laskettu = lista luettu
def day3teht1(right=3, down=1):
sijainti = 0
puidenMaara = 0
with open('advent_of_code/T3/t3_input.txt') as f:
lines = f.readlines()
for i in range(0, len(lines), down):
line = lines[i].strip()
if '#' == line[sijainti]:
puidenMaara += 1
sijainti += right
sijainti = sijainti % len(line)
return puidenMaara
#part 2
#Varmaan suht itsestäänselvä.
def day3teht2():
return day3teht1(1,1) * day3teht1(5,1) * day3teht1(7,1) * day3teht1(1,2) * day3teht1()
print(f"Part 1 solution: {day3teht1()}")
print(f"Part 2 solution: {day3teht2()}")
<file_sep>/advent_of_code/T4/t4.py
import re
#part1
def t4part1():
with open("advent_of_code/T4/t4_input.txt",'r') as f:
lista = f.read().split('\n\n') #muunnetaan tekstifilu listaksi, käsittelyn helpottamiseksi
lista = [x.replace('\n', ' ').split() for x in lista] #muunnetaan listan datat omiksi listoikseen
passit = list() #luodaan lista johon tiedot talletetaan hajautustauluina
for hlö in lista:
passit.append(dict(data.split(":") for data in hlö)) #lista hajautustauluja passien datoista.
passit = [x for x in passit if len(x.keys()) == 8 or len(x) == 7 and 'cid' not in x.keys()] #tehtävänannon kriteerien mukaan, passi on ok jos siellä on vaadittu data
# CID (Country ID) sai kuitenkin puuttua, joten jos passissa on vain 7 tietoa ja puuttuva on CID
# on passi edelleen ok. Muussa tapauksessa se ei ole ok.
return len(passit) #tehtävän vastaus eli listan passit jotka menee kriteereistä läpi
#part 2
def t4part2():
#häikäilemättömästi kopioidaan part1:n koodi säilyttääksemme jo-hyväksytyt passit
with open("advent_of_code/T4/t4_input.txt",'r') as f:
lista = f.read().split('\n\n')
lista = [x.replace('\n', ' ').split() for x in lista]
passit = list()
for hlö in lista:
passit.append(dict(data.split(":") for data in hlö))
passit = [x for x in passit if len(x.keys()) == 8 or len(x) == 7 and 'cid' not in x.keys()]
#häikäilemättömyys lopppuu nyt.
okPassit = list()
for henkilö in passit:
if (1920 <= int(henkilö['byr']) <= 2002
and (2010 <= int(henkilö['iyr']) <= 2020)
and (2020 <= int(henkilö['eyr']) <= 2030)
and
((henkilö['hgt'][-2:] == 'cm' and 150 <= int(henkilö['hgt'][:-2]) <= 193)
or (henkilö['hgt'][-2:] == 'in' and 59 <= int(henkilö['hgt'][:-2]) <= 76)) #uusintarkastus uusin parametrein.
and (re.match(r'#[\da-f]{6}', henkilö['hcl']))
and (henkilö['ecl'] in ['amb', 'blu', 'brn', 'gry', 'grn', 'hzl', 'oth'])
and (re.match(r'\d{9}', henkilö['pid']))):
okPassit.append(henkilö)
return len(okPassit) - 1
print(f"Part 1 solution: {t4part1()}")
print(f"Part 2 solution: {t4part2()}")
<file_sep>/advent_of_code/T2/t2.py
a = list()
part1 = 0
part2 = 0
with open("advent_of_code/T2/t2_input.txt", 'rb') as f:
lines = [line.decode("utf-8") for line in f.readlines()]
for line in lines:
line = line.split(" ")
ranges = line[0]
low = int(ranges.split("-")[0])
high = int(ranges.split("-")[1]) #parsitaan tehtävädatasta tarvittavat rajat ratkaisulle jeejee String-manipulointia kärsimykseni on sanoinkuvaamaton
letter = line[1].strip(":")
password = line[2].strip("")
counter = 0
for x in range (len(password)):
if password[x] == letter: ##Part 1
counter += 1
if counter >= low and counter <= high:
part1 += 1
if letter == password[low - 1] or letter == password[high-1]:
if password[low-1] != password[high-1]: ##Part 2
part2 += 1
print(f"Part1 solution: {part1}")
print(f"Part2 solution: {part2}")
<file_sep>/advent_of_code/T1/t1.py
def part1():
with open("advent_of_code/T1/t1_input.txt", 'r') as f:
syöte = [int(x) for x in f.readlines()]
for x in syöte:
for y in syöte:
if x + y == 2020:
return x * y
def part2():
with open("advent_of_code/T1/t1_input.txt", 'r') as f:
syöte = [int(x) for x in f.readlines()]
for x in syöte:
for y in syöte:
for j in syöte:
if x + y + j== 2020:
return x * y * j
print(f"Part 1 solution: {part1()}")
print(f"Part 2 solution: {part2()}")<file_sep>/advent_of_code/T5/t5.py
def t5():
with open("advent_of_code/T5/t5_input.txt", 'r') as f:
liput = [x.replace("\n", " ").strip() for x in f.readlines()]
paikkaNumerot = list()
for rivi in liput:
r = rivi[:7]
alku = 0
loppu = 127
row, col = 0, 0
for char in r:
if char == "F":
loppu = int((alku + loppu + 1)/2) - 1
elif char == "B":
alku = int((alku + loppu + 1)/2)
row = alku
r = rivi[7:]
alku = 0
loppu = 7
for char in r:
if char == "L":
loppu = ((alku + loppu + 1)/2) - 1
elif char == "R":
alku = ((alku + loppu +1 )/2)
col = alku
paikkaNro = row * 8 + col
paikkaNumerot.append(paikkaNro)
paikkaNrotJärjestyksessa = sorted(paikkaNumerot)
puuttuvaID = 0
for i in range(int(min(paikkaNumerot)), int(max(paikkaNumerot)+1)):
if i != paikkaNrotJärjestyksessa[int(i-min(paikkaNumerot))-1]:
puuttuvaID = i
return max(paikkaNumerot), puuttuvaID
print(t5())
<file_sep>/README.md
# AoC2020
Assignments that i had the capability/time to complete from the 2020 Advent of Code. Could've probably done more but had exams 'n' things.
|
3abc64f44bd0eb6209914125ce63a15c606e70e8
|
[
"Markdown",
"Python"
] | 6
|
Python
|
jerryshadb/AoC2020
|
a981ca2e0a256643c62f7f41475b1c2325f3b629
|
a711ea0e49d55cfe9efa1ca9fc8afa99d5659889
|
refs/heads/master
|
<repo_name>baccigalupi/cucaroo<file_sep>/test/unit/support/sample-compiled-feature.js
module.exports = {
document: {
type:"GherkinDocument",
feature: {
type: "Feature",
tags: [],
location: {
line:1,
column:1
},
language: "en",
keyword: "Feature",
name: "Signing in",
description: " As a customer\n I want to sign in to see subscribed content\n So that I am informed",
children: [
{
type: "Scenario",
tags: [],
location: {
line: 6,
column: 3
},
keyword: "Scenario",
name: "Seeing the prompt to sign in",
steps: [
{
type: "Step",
location: {
line: 7,
column: 5
},
keyword: "Given ",
text: "I am a registered user"
},
{
type: "Step",
location: {
line: 8,
column: 5
},
keyword: "And ",
text: "I am logged out"
},
{
type: "Step",
location: {
line: 9,
column: 5
},
keyword: "When ",
text: "I visit the application"
},
{
type: "Step",
location: {
line: 10,
column: 5
},
keyword: "And ",
text: "I click to view protected content"
},
{
type: "Step",
location: {
line: 11,
column: 5
},
keyword: "Then ",
text:"I should be taken to the login page"
}
]
}
]
},
comments:[]
},
scenarios: [
{
tags: [],
name: "Seeing the prompt to sign in",
language: "en",
locations: [
{
line: 6,
column: 3
}
],
steps: [
{
text: "I am a registered user",
arguments: [],
locations: [
{
line: 7,
column: 11
}
]
},
{
text:"I am logged out",
arguments: [],
locations: [
{
line: 8,
column: 9
}
]
},
{
text: "I visit the application",
arguments: [],
locations: [
{
line: 9,
column: 10
}
]
},
{
text: "I click to view protected content",
arguments: [],
locations: [
{
line: 10,
column:9
}
]
},
{
text: "I should be taken to the login page",
arguments: [],
locations: [
{
line: 11,
column: 10
}
]
}
]
}
]
};
<file_sep>/lib/step-value-printer.js
'use strict';
const chalk = require('chalk');
class StepPrinter {
constructor(step, logger) {
this.step = step;
this.log = logger;
}
suggestedDefinition() {
var definition = ` world.${this.step.type.toLowerCase().trim()}('${this.step.text}', function(done) {\n`;
definition += ' done(world.pending());\n'
definition += ' });'
return definition;
}
print(event, indent='') {
this.log.write(`${indent}${this.body(event)}${this.comment(event)}\n`);
}
printSub(event) {
this.print(event, ' ');
}
comment(event) {
let comment = '';
if (this.step.ambiguous()) {
comment += `${this.step.definitionCount} definitions exist for this step.`;
}
if (this.step.notFound()) {
comment += 'no definition found for this step.';
}
if (event === 'pending') {
comment += 'step is pending; halting scenario';
}
if (event === 'fail') {
comment += 'step threw an error; halting scenario';
}
if (event === 'substep-not-run') {
comment += 'substep could not be run'
}
return comment.length ? chalk.red(` // ${comment}`) : comment;
}
body(event) {
let color = this.getColor(event);
return ` ${chalk.cyan(this.step.type)} ${chalk[color](this.step.text)}`;
}
getColor(event) {
let color = 'green';
if (event === 'fail') { color = 'red'; }
if (event === 'pending') { color = 'yellow'; }
if (event === 'not-run' ||
event === 'substep-not-run') { color = 'gray'; }
return color;
}
error(message) {
this.log.error(message);
}
}
module.exports = StepPrinter;
<file_sep>/lib/step-value.js
'use strict';
const inherits = require('util').inherits;
const EventEmitter = require('eventemitter2').EventEmitter2
class StepValue {
constructor(compiled, definitions) {
this.text = compiled.text;
this.type = compiled.keyword;
this.definitionCount = definitions.length;
this.implementation = definitions.length && definitions[0].implementation;
}
valid() {
return this.definitionCount === 1;
}
ambiguous() {
return this.definitionCount > 1;
}
notFound() {
return this.definitionCount == 0;
}
}
inherits(StepValue, EventEmitter);
module.exports = StepValue;
<file_sep>/test/unit/world-test.js
'use strict';
const assert = require('assert');
const World = require('../../lib/world');
describe('World', function() {
let world;
beforeEach(function() {
world = new World({
hello: 'yup, world!',
foo: 'bar',
baz: 'zardoz'
});
});
it('injects all the attributes from the initialization into the object', function() {
assert.equal(world.hello, 'yup, world!');
assert.equal(world.foo, 'bar');
assert.equal(world.baz, 'zardoz');
});
it('adds steps definitions via methods', function() {
world.given(function givenStep(done) { done(); });
world.when(function whenStep(done) { done(); });
world.then(function thenStep(done) { done(); });
world.and(function andStep(done) { done(); });
assert.equal(world.stepDefinitions.steps.length, 4);
});
});
<file_sep>/lib/step-runner.js
'use strict';
class StepRunner {
constructor(step, parent, world) {
this.step = step;
this.world = world;
this.timeout = world.timeout;
this.parent = parent;
}
run(done) {
this.step.emit('started', this.step);
if (this.parent.halted()) {
this.emit();
done();
} else {
this._run(done);
}
}
_run(next) {
this.timer = this.setupTimeout(next);
try {
this.step.implementation((err) => {
clearTimeout(this.timer);
this.emit(err);
next();
});
} catch(err) {
clearTimeout(this.timer);
this.emit(err);
next(err);
}
}
emit(err) {
if (this.parent.halted()) {
this.step.emit('not-run', this.step);
} else if (!err) {
this.step.emit('pass', this.step);
} else if (err.message && err.message.toLowerCase() === 'pending') {
this.step.emit('pending', this.step, err);
} else {
if (!err.message) {
err = new Error(`Step callback received unexpected, non-error value: ${err}`);
}
this.step.emit('fail', this.step, err);
}
this.step.emit('finished', this.step);
}
setupTimeout(callback) {
return setTimeout(() => {
let err = new Error(`Step timed out after ${this.timeout}ms.`);
this.emit(err);
callback();
}, this.timeout);
}
}
module.exports = StepRunner;
<file_sep>/lib/step-definitions.js
'use strict';
const StepDefinition = require('./step-definition');
class StepDefinitions {
constructor() {
this.steps = [];
}
add(matcher, implementation, type) {
type && (this.currentType = type);
this.steps.push(new StepDefinition(matcher, implementation, type || this.currentType));
}
given(matcher, implementation) {
this.add(matcher, implementation, 'Given');
}
when(matcher, implementation) {
this.add(matcher, implementation, 'When');
}
then(matcher, implementation) {
this.add(matcher, implementation, 'Then');
}
and(matcher, implementation) {
this.add(matcher, implementation);
}
size() {
return this.steps.length;
}
matches(text) {
return this.steps.filter((step) => { return step.match(text); });
}
}
module.exports = StepDefinitions;
<file_sep>/test/unit/config-test.js
'use strict';
const assert = require('assert');
const OutputStream = require('./support/output-stream');
const Config = require('../../lib/config');
const Logger = require('../../lib/logger');
const localConfig = require('../../.cucaroo.config');
describe('Config', function() {
let mockStream, outputStream, config;
beforeEach(function() {
mockStream = new OutputStream();
outputStream = mockStream.stream;
config = new Config(outputStream, []);
});
it('should create a logger based on the output stream', function() {
assert(config.logger instanceof Logger);
assert.equal(config.logger.outputStream, outputStream);
});
it('uses the `.cucaroo.config.js` file to locate important directories', function() {
config.load();
let paths = config.paths();
assert.equal(paths.features, localConfig.featuresDirectory);
assert.equal(paths.stepDefinitions, localConfig.stepDefinitions);
assert.equal(paths.suiteSetup, localConfig.suiteSetup);
assert.equal(paths.suiteTeardown, localConfig.suiteTeardown);
});
it('will resort to defaults if the config file is not found', function() {
config.load('not-here.yo');
let paths = config.paths();
let base = config.base;
assert.equal(paths.features, base + '/features');
assert.equal(paths.stepDefinitions, base + '/features/step_definitions');
assert.equal(paths.suiteSetup, base + '/features/support/setup');
assert.equal(paths.suiteTeardown, base + '/features/support/teardown');
});
it('logs a warning if the config file is not found', function() {
config.load('not-here.yo');
let expectedMessage = '`.cucaroo.config.js` config file not found.';
assert(mockStream.cleanOutput().includes(expectedMessage));
});
it('does not log anything if the config file is found', function() {
config.load();
assert.equal('', mockStream.cleanOutput());
});
it('should have a default timeout that is overridden by the file', function() {
config.load();
assert.equal(config.timeout, 200);
config.load('not-here.yo');
assert.equal(config.timeout, 3000);
});
it('should read all the features on load', function() {
config.load();
assert(config.features.length >= 5);
});
it('should require all the step definitions on load', function() {
config.load();
assert(config.stepExports.length >= 5);
});
it('requires the setup and teardown scripts', function() {
config.load();
let setup = require('../features/support/setup');
let teardown = require('../features/support/teardown');
assert.equal(config.setup, setup);
assert.equal(config.teardown, teardown);
});
describe('when passed filters', function() {
it('when one relative file path is passed, it reduces features to that file', function() {
config = new Config(outputStream, ['test/features/success.feature']);
config.load();
assert.equal(config.features.length, 1);
});
});
});
<file_sep>/test/acceptance/failing-feature-test.js
'use strict';
const assert = require('assert');
const OutputStream = require('../unit/support/output-stream');
const Config = require('../../lib/config');
const Runner = require('../../lib/suite-runner');
describe('pending.feature', function() {
let config, runner, mockStream, outputStream, exitCode;
beforeEach(function() {
mockStream = new OutputStream();
outputStream = mockStream.stream;
config = new Config(outputStream, ['test/features/pending.feature']);
config.load();
});
it('returns an error code', function(done) {
Runner.prototype.close = function() {
assert.equal(this.observer.exitCode(), 1);
done();
};
runner = new Runner(config);
runner.run();
});
it('Outputs the feature header', function(done) {
Runner.prototype.close = function() {
let plainOutput = mockStream.cleanOutput();
assert(plainOutput.includes('Feature with pending steps'));
done();
};
runner = new Runner(config);
runner.run();
});
it('Prints comments about which step is pending', function(done) {
Runner.prototype.close = function() {
let plainOutput = mockStream.cleanOutput();
assert(plainOutput.includes('Given The first step is pending by throwing a pending error // step is pending; halting scenario'));
assert(plainOutput.includes('Given The first step is pending by passing a pending error to the callback // step is pending; halting scenario'));
assert(plainOutput.includes('When I throw in a pending step // step is pending; halting scenario'));
done();
};
runner = new Runner(config);
runner.run();
});
it('Outputs each scenario info', function(done) {
Runner.prototype.close = function() {
let plainOutput = mockStream.cleanOutput();
assert(plainOutput.includes('Scenario: Pending via thrown error'));
assert(plainOutput.includes('Scenario: First step is pending via passing an error to the callback'));
assert(plainOutput.includes('Scenario: Later step is pending'));
done();
};
runner = new Runner(config);
runner.run();
});
it('Spits out pending step summary', function(done) {
Runner.prototype.close = function() {
let plainOutput = mockStream.cleanOutput();
assert(plainOutput.includes('Pending steps!!!'));
assert(plainOutput.includes('\'The first step is pending by throwing a pending error\''));
assert(plainOutput.includes('\'The first step is pending by passing a pending error to the callback\''));
assert(plainOutput.includes('\'I throw in a pending step\''));
done();
};
runner = new Runner(config);
runner.run();
});
it('Summarizes correctly', function(done) {
Runner.prototype.close = function() {
let plainOutput = mockStream.cleanOutput();
assert(plainOutput.includes('Some features failed:'));
assert(plainOutput.includes('Features - Passing: 0, Failing: 1'));
assert(plainOutput.includes('Scenarios - Passing: 0, Failing: 3'));
assert(plainOutput.includes('Steps - Passing: 1, Failing: 0'));
assert(plainOutput.includes('Pending: 3'));
done();
};
runner = new Runner(config);
runner.run();
});
});
<file_sep>/lib/step-definition.js
'use strict';
class StepDefinition {
constructor(matcher, implementation, type) {
this.matcher = matcher;
this.implementation = implementation;
this.type = type;
}
match(text) {
return text.match(this.matcher)
}
run(done) {
this.implementation(done);
}
}
module.exports = StepDefinition;
<file_sep>/test/acceptance/success-feature-test.js
'use strict';
const assert = require('assert');
const OutputStream = require('../unit/support/output-stream');
const Config = require('../../lib/config');
const Runner = require('../../lib/suite-runner');
describe('success.feature', function() {
let config, runner, mockStream, outputStream, exitCode;
beforeEach(function() {
mockStream = new OutputStream();
outputStream = mockStream.stream;
config = new Config(outputStream, ['test/features/success.feature']);
config.load();
});
it('runs successfully', function(done) {
Runner.prototype.close = function() {
assert.equal(this.observer.exitCode(), 0);
done();
};
runner = new Runner(config);
runner.run();
});
it('Outputs the feature header', function(done) {
Runner.prototype.close = function() {
let plainOutput = mockStream.cleanOutput();
assert(plainOutput.includes('Feature: Successful feature runs'));
assert(plainOutput.includes('As a BDD developer'));
assert(plainOutput.includes('I want to implement and run cucumber tests'));
assert(plainOutput.includes('So that I have regression testing, and a starting place for talking with product people'));
done();
};
runner = new Runner(config);
runner.run();
});
it('Outputs the scenario information', function(done) {
Runner.prototype.close = function() {
let plainOutput = mockStream.cleanOutput();
assert(plainOutput.includes('Scenario: All is good'));
assert(plainOutput.includes('Given all step definitions are defined'));
assert(plainOutput.includes('When I run the feature'));
assert(plainOutput.includes('And the exit code should be 0'));
done();
};
runner = new Runner(config);
runner.run();
});
it('Spits out a success summary', function(done) {
Runner.prototype.close = function() {
let plainOutput = mockStream.cleanOutput();
assert(plainOutput.includes('All features passed!'));
assert(plainOutput.includes('Features - Passing: 1, Failing: 0'));
assert(plainOutput.includes('Scenarios - Passing: 1, Failing: 0'));
assert(plainOutput.includes('Steps - Passing: 4, Failing: 0'));
done();
};
runner = new Runner(config);
runner.run();
});
});
<file_sep>/lib/collection.js
'use strict';
class Collection {
constructor(items) {
this.collection = items.map((item) => {
return new this.constructor.ValueClass(item);
});
}
listen(listener) {
this.collection.forEach((item) => {
item.onAny((event, item, err) => {
listener(event, item, err);
});
});
}
map(iterator) {
return this.collection.map(iterator);
}
}
Collection.ValueClass = function() {
throw new Error('Not implemented. Attach the ValueClass to this Collection; k, thanks!');
}
module.exports = Collection;
<file_sep>/bin/cucaroo
#!/usr/bin/env node
'use strict';
const Config = require('../lib/config');
const Runner = require('../lib/suite-runner');
let filters = process.argv.slice(2);
let config = new Config(process.stdout, filters);
config.load();
new Runner(config).run();
<file_sep>/lib/config.js
'use strict';
const path = require('path');
const chalk = require('chalk');
const requireDirectory = require('require-directory');
const loadFeatures = require('./load-features');
const Logger = require('./logger');
class Config {
constructor(outputStream, filters) {
this.outputStream = outputStream;
this.logger = new Config.Logger(outputStream);
this.base = process.cwd();
this.filters = filters || [];
}
load(configPath) {
this.configPath = configPath || path.resolve(this.base, './.cucaroo.config.js');
this.loadFromConfigFile();
this.loadFeatureFiles();
this.loadDefinitions();
this.loadSetupTeardown();
this.timeout = this.fileAttributes.timeout || this.defaultTimeout();
}
loadFromConfigFile() {
this.fileAttributes = {};
let message = '`.cucaroo.config.js` config file not found. Using default values.'
this.tryOrWarn(() => {
this.fileAttributes = this.remap(require(this.configPath));
}, message);
}
loadFeatureFiles() {
this.features = [];
this.tryOrWarn(() => {
this.features = loadFeatures(this.paths().features);
});
if (this.filters.length) {
this.filterFeatures();
}
}
filterFeatures() {
let filters = this.filters.map((path) => {
return this.base + '/' + path;
});
this.features = this.features.filter((feature) => {
return filters.includes(feature.filename);
});
}
loadDefinitions() {
this.stepExports = [];
this.tryOrWarn(() => {
let fileMap = requireDirectory(module, this.paths().stepDefinitions);
this.stepExports = Object.keys(fileMap).map((filename) => {
return fileMap[filename];
});
});
}
loadSetupTeardown() {
this.tryOrWarn(() => {
this.setup = require(this.paths().suiteSetup);
this.teardown = require(this.paths().suiteTeardown);
});
}
remap(attributes) {
return Object.assign(attributes, {
features: attributes.featuresDirectory || path.resolve(this.base, 'features')
});
}
paths() {
return Object.assign(this.defaultPaths(), this.fileAttributes);
}
defaultPaths() {
let featuresDirectory = path.resolve(this.base, 'features');
return {
features: featuresDirectory,
suiteSetup: featuresDirectory + '/support/setup',
suiteTeardown: featuresDirectory + '/support/teardown',
stepDefinitions: featuresDirectory + '/step_definitions'
};
}
defaultTimeout() {
return 3000;
}
tryOrWarn(block, message) {
try {
block();
} catch(e) {
this.logger.warn(message || e.stack);
}
}
}
Config.Logger = Logger;
module.exports = Config;
<file_sep>/lib/feature-collection-runner.js
'use strict';
const inherits = require('util').inherits;
const EventEmitter = require('eventemitter2').EventEmitter2
const Collection = require('./collection');
const Runner = require('./runner');
const CollectionRunner = require('./collection-value-runner');
const ScenarioCollectionRunner = require('./scenario-collection-runner');
// temp
const Logger = require('./logger');
class FeatureValue {
constructor(compiled) {
let feature = compiled.document.feature || {};
this.name = feature.name;
this.description = feature.description;
this.scenarios = feature.children;
}
}
FeatureValue.prototype.eventPrefix = 'feature-';
inherits(FeatureValue, EventEmitter);
class FeatureCollection extends Collection {}
FeatureCollection.ValueClass = FeatureValue;
class FeatureRunner extends Runner {
runChildren(callback) {
let runner = new ScenarioCollectionRunner(this.item.scenarios, this.world);
runner.onAny((event, item, err) => {
this.parent.emit(event, item, err);
});
runner.run(callback);
}
}
FeatureRunner.prototype.eventPrefix = 'feature-';
class FeatureCollectionRunner extends CollectionRunner {}
FeatureCollectionRunner.eventPrefix = 'feature-';
FeatureCollectionRunner.ItemRunner = FeatureRunner;
FeatureCollectionRunner.CollectionClass = FeatureCollection;
module.exports = FeatureCollectionRunner;
<file_sep>/test/features/step_definitions/failing-steps.js
'use strict';
const assert = require('assert');
module.exports = function(world) {
world.given('Things are moving along just fine', function(done) {
done();
});
world.when('And then I make an assertion that aint true', function(done) {
assert(false, 'it isnt like we thought');
done();
});
world.then('I should see a helpful stack trace', function(done) {
done();
});
world.and('the summary at the end reflects those errors', function(done) {
done();
});
world.when('I make an error resulting in a runtime failure', function(done) {
failHard();
});
};
<file_sep>/test/unit/step-definitions-test.js
'use strict';
const assert = require('assert');
const StepDefinitions = require('../../lib/step-definitions');
describe('StepDefinitions', function() {
let definitions, callcount, implementation;
beforeEach(function() {
callcount = 0;
implementation = function(done) {
callcount += 1;
done();
};
definitions = new StepDefinitions();
});
it('allows adding step definitions via special names', function() {
definitions.given('I rise in the morning', implementation);
definitions.when('I make coffee', implementation);
definitions.and('I drink the coffee', implementation);
definitions.then('I will feel good', implementation);
assert.equal(definitions.size(), 4);
let types = definitions.steps.map((def) => {return def.type;});
assert.equal(types[0], 'Given');
assert.equal(types[1], 'When');
assert.equal(types[2], 'When');
assert.equal(types[3], 'Then');
});
it('returns matches', function() {
definitions.given('I go to the home page', implementation);
definitions.when('I wander away', implementation);
definitions.and(/I \w+ to the home page/, implementation);
definitions.then('I will feel good', implementation);
let matches = definitions.matches('I navigate to the home page');
assert.equal(matches.length, 1);
matches = definitions.matches('I go to the home page');
assert.equal(matches.length, 2);
matches = definitions.matches('oh not here!');
assert.equal(matches.length, 0);
});
});
<file_sep>/lib/collection-value-runner.js
'use strict';
const async = require('async');
const inherits = require('util').inherits;
const EventEmitter = require('eventemitter2').EventEmitter2
class CollectionRunner {
constructor(items, world) {
this.collection = this.wrap(items);
this.world = world;
this._halted = this.initialHaltCondition();
}
run(callback) {
this.listen();
async.series(this.runners(), callback);
}
runners() {
return this.collection.map((item) => {
return (next) => {
new this.constructor.ItemRunner(item, this, this.world).run(next);
}
});
}
listen() {
this.collection.listen((event, item, err) => {
if (event === 'fail' || event === 'pending' || event === 'not-run') {
this._halted = true;
}
this.emit(`${this.eventPrefix}${event}`, item, err);
});
}
halted() {
return this._halted;
}
initialHaltCondition() {
return false;
}
wrap(items) {
return new this.constructor.CollectionClass(items)
}
}
inherits(CollectionRunner, EventEmitter);
CollectionRunner.prototype.eventPrefix = '';
CollectionRunner.ItemRunner = function() {
throw new Error('Not implemented. Attach the ItemRunner to this CollectionRunner; k, thanks!');
};
CollectionRunner.CollectionClass = function() {
throw new Error('Not implemented. Attach the CollectionClass to this CollectionRunner; k, thanks!');
};
module.exports = CollectionRunner;
<file_sep>/lib/status-report.js
'use strict';
const chalk = require('chalk');
const StepPrinter = require('./step-value-printer');
class StatusReport {
constructor(status, logger) {
this.status = status;
this.logger = logger;
}
print() {
this.logger.warn('------------------------------------------------');
this.logger.addBreak();
this.ambiguous();
this.pending();
this.unimplemented();
this.logger.warn('------------------------------------------------');
this.logger.write(this.formatSummary());
}
// private methods below here
unimplemented() {
if (!this.status.notFound.length) { return; }
let header = 'Missing step definitions!!!';
let description = 'You can implement step definitions with these snippets:';
let body = this.status.notFound
.map((step) => {
return new StepPrinter(step, this.logger).suggestedDefinition();
})
.join('\n\n');
let wrapped = `module.exports = function(world) {\n${body}\n};`;
this.logger.error(header);
this.logger.write(chalk.gray(description) + '\n\n');
this.logger.warn(wrapped);
this.logger.addBreak();
}
pending() {
if (!this.status.pending.length) { return; }
let header = 'Pending steps!!!';
let body = this.status.pending
.map((step) => { return ` '${step.text}'\n`; })
.join('');
let wrapped = `${body}`;
this.logger.error(header);
this.logger.warn(wrapped);
}
ambiguous() {
if (!this.status.ambiguous.length) { return; }
let header = 'Ambiguous step definitions found for these steps:';
let body = this.status.ambiguous
.map((step) => { return ` '${step.text}'\n`; })
.join('');
let wrapped = `${body}`;
this.logger.error(header);
this.logger.warn(wrapped);
}
formatSummary() {
let header;
if (this.status.features.fail) {
header = chalk.red('Some features failed:');
} else {
header = chalk.green('All features passed!');
}
let body = `\n${this.formatHeaderType('Features - ', this.status.features)}`;
body += `\n${this.formatHeaderType('Scenarios - ', this.status.scenarios)}`;
body += `\n${this.formatHeaderType('Steps - ', this.status.steps)}`;
return `${header}\n${body}\n${this.formatErrorCounts()}`;
}
formatErrorCounts() {
let formatted = '';
if (this.status.features.fail) {
formatted += this.formatStep('Pending', this.status.pending.length);
formatted += this.formatStep('Unimplemented', this.status.notFound.length);
formatted += this.formatStep('Ambiguous', this.status.ambiguous.length);
}
return formatted;
}
formatHeaderType(type, stats) {
let prefix = this.formatWithColor(type);
let passingHeader = this.formatWithColor('Passing: ') + this.formatPassing(stats.pass);
let failingHeader = this.formatWithColor('Failing: ') + this.formatError(stats.fail);
return ` ${prefix} ${passingHeader}, ${failingHeader}`;
}
formatStep(title, count) {
return ` ${chalk.yellow(title + ':')} ${this.formatError(count)}\n`;
}
formatWithColor(message) {
let color = this.status.features.fail ? 'yellow' : 'green';
return chalk[color](message)
}
formatPassing(number) {
let color = number ? 'green' : 'grey';
return chalk[color](number)
}
formatError(number) {
let color = number ? 'red' : 'grey';
return chalk[color](number);
}
}
module.exports = StatusReport;
<file_sep>/lib/suite-runner.js
'use strict';
const World = require('./world');
const StatusReport = require('./status-report');
const compileFeatures = require('./compile-feature');
const SuiteObserver = require('./suite-observer');
const FeatureCollectionRunner = require('./feature-collection-runner');
function passThrough(callback) {
callback();
}
class SuiteRunner {
constructor(config) {
this.timeout = config.timeout || 3000;
this.setup = config.setup || passThrough;
this.teardown = config.teardown || passThrough;
this.rawFeatures = config.features;
this.stepExports = config.stepExports;
this.logger = config.logger;
this.observer = new SuiteObserver(config.logger);
}
run(){
this.setup((setupData) => {
setupData = setupData || {};
this.world = new SuiteRunner.World(setupData, this.logger, this.observer);
this.world.timeout = this.timeout;
this.injectDefinitionsIntoWorld();
this.compileFeatures();
this.runFeatures();
});
}
injectDefinitionsIntoWorld() {
Object.keys(this.stepExports).forEach((key) => {
this.stepExports[key](this.world);
});
}
compileFeatures() {
this.features = this.rawFeatures.map((feature) => {
let compiled = SuiteRunner.compile(feature.content, this.logger);
compiled.filename = feature.filename;
return compiled;
});
}
runFeatures() {
let featureRunner = new FeatureCollectionRunner(this.features, this.world);
featureRunner.onAny((event, item, err) => {
this.observer.handleEvent(event, item, err);
});
featureRunner.run(() => { this.teardownSuite(); });
}
teardownSuite() {
this.teardown(this.world, () => {
new SuiteRunner.StatusReport(this.observer.data, this.logger).print();
this.close();
});
}
close() {
process.exit(this.observer.exitCode());
}
}
SuiteRunner.World = World;
SuiteRunner.compile = compileFeatures;
SuiteRunner.StatusReport = StatusReport;
module.exports = SuiteRunner;
<file_sep>/lib/compile-feature.js
'use strict';
const Gherkin = require('gherkin');
function compileFeature(featureText, logger) {
let parser = new Gherkin.Parser();
let document = parser.parse(featureText, logger);
let scenarios = new Gherkin.Compiler().compile(document);
return {
document: document,
scenarios: scenarios
};
}
function compileFeatureWithCatch(featureText, logger) {
try {
return compileFeature(featureText);
} catch (e) {
logger.warn(e.stack);
process.emit('exit');
}
}
compileFeatureWithCatch.compile = compileFeature;
module.exports = compileFeatureWithCatch;
<file_sep>/README.md
cucaroo 🥒
=======
[](https://travis-ci.org/baccigalupi/cucaroo) [](https://codeclimate.com/github/baccigalupi/cucaroo)
A light cucumber implementation for node javascript. Cucaroo allows you to have product defined `.features` and facilitates the testing against the narrative of the feature.
## Usage
```
$> cucaroo
Feature: Successful feature runs
As a BDD developer
I want to implement and run cucumber tests
So that I have regression testing, and a starting place for talking with product people
Scenario: All is good
Given all step definitions are defined
When I run the feature
Then I should see all steps in green
And the exit code should be 0
$>
```
## Installation
```bash
npm install -g cucaroo
```
# Overview
Run tests against your feature steps.
## Successfully Passed Features
The good. Here is an example of a feature that is well defined and passes all of it's scenario's steps.
__Feature Definition__
```
Feature: Successful feature runs
As a BDD developer
I want to implement and run cucumber tests
So that I have regression testing, and a starting place for talking with product people
Scenario: All is good
Given all step definitions are defined
When I run the feature
Then I should see all steps in green
And the exit code should be 0
```
Here is our `step_definition` for this feature
```js
const expect = require('chai').expect;
module.exports = function(world) {
world.given('all step definitions are defined', function(done) {
expect(foo).to.be.ok;
done();
});
world.when('I run the feature', function(done) {
expect(bar).to.ok;
done();
});
world.then('I should see all steps in green', function(done) {
expect(baz.success).to.equal(4);
done();
});
world.and('the exit code should be 0', function(done) {
assert.equal(world.errors, 0);
done();
});
};
```
Assuming your tests successfully passed, you will get an exit code of `0`.
## Failed Features
The bad. During development and other situations, tests will fail. In the event your `cucuroo` test fails, you will received information on where and how a test failed. Here is an example of a feature failing.
__Feature Definition__
```
Feature: Feature with a whole lot of errors
As a BDD developer
I want to see my regression tests fail
So that I know my code works after modifications
Scenario: Assertion failure
Given Things are moving along just fine
When And then I make an assertion that aint true
Then I should see a helpful stack trace
And the summary at the end reflects those errors
Scenario: Runtime errors
Given Things are moving along just fine
When I make an error resulting in a runtime failure
Then I should see a helpful stack trace
And the summary at the end reflects those errors
```
And then the `step_definition` file that actually does the assertions.
```js
const expect = require('chai').expect;
module.exports = function(world) {
world.given('Things are moving along just fine', function(done) {
done();
});
world.when('And then I make an assertion that aint true', function(done) {
expect(true).to.be.(false);
done();
});
world.then('I should see a helpful stack trace', function(done) {
done();
});
world.and('the summary at the end reflects those errors', function(done) {
done();
});
world.when('I make an error resulting in a runtime failure', function(done) {
failHard();
});
};
```
And with these failing steps, you will recevied the following output _(but with pretty colors)_.
```
Feature: Feature with a whole lot of errors
As a BDD developer
I want to see my regression tests fail
So that I know my code works after modifications
Scenario: Assertion failure
Given Things are moving along just fine
When And then I make an assertion that aint true // step threw an error; halting scenario
AssertionError: it isnt like we thought
at StepDefinition.implementation (/Users/user1/Projects/cucaroo/test/features/step_definitions/failing-steps.js:11:5)
at Step.run (/Users/user1/Projects/cucaroo/lib/step.js:25:23)
at StepCollection.runStepUnlessHalted (/Users/user1/Projects/cucaroo/lib/step-collection.js:53:10)
at /Users/user1/Projects/cucaroo/lib/step-collection.js:30:33
at /Users/user1/Projects/cucaroo/node_modules/async/dist/async.js:3830:24
at replenish (/Users/user1/Projects/cucaroo/node_modules/async/dist/async.js:946:17)
at iterateeCallback (/Users/user1/Projects/cucaroo/node_modules/async/dist/async.js:931:17)
at /Users/user1/Projects/cucaroo/node_modules/async/dist/async.js:906:16
at /Users/user1/Projects/cucaroo/node_modules/async/dist/async.js:3835:13
at definition.implementation (/Users/user1/Projects/cucaroo/lib/step.js:33:9)
Then I should see a helpful stack trace
And the summary at the end reflects those errors
Scenario: Runtime errors
Given Things are moving along just fine
When I make an error resulting in a runtime failure // step threw an error; halting scenario
ReferenceError: failHard is not defined
at StepDefinition.implementation (/Users/user1/Projects/cucaroo/test/features/step_definitions/failing-steps.js:24:5)
at Step.run (/Users/user1/Projects/cucaroo/lib/step.js:25:23)
at StepCollection.runStepUnlessHalted (/Users/user1/Projects/cucaroo/lib/step-collection.js:53:10)
at /Users/user1/Projects/cucaroo/lib/step-collection.js:30:33
at /Users/user1/Projects/cucaroo/node_modules/async/dist/async.js:3830:24
at replenish (/Users/user1/Projects/cucaroo/node_modules/async/dist/async.js:946:17)
at iterateeCallback (/Users/user1/Projects/cucaroo/node_modules/async/dist/async.js:931:17)
at /Users/user1/Projects/cucaroo/node_modules/async/dist/async.js:906:16
at /Users/user1/Projects/cucaroo/node_modules/async/dist/async.js:3835:13
at definition.implementation (/Users/user1/Projects/cucaroo/lib/step.js:33:9)
Then I should see a helpful stack trace
And the summary at the end reflects those errors
Some features failed:
Features - Passing: 0, Failing: 1
Scenarios - Passing: 0, Failing: 2
Steps - Passing: 2, Failing: 2
```
# Config
`TODO`
# Project Structure
`TODO`
# Contributing
`TODO`
<file_sep>/lib/step-collection-runner.js
'use strict';
const async = require('async');
const StepRunner = require('./step-runner');
const CollectionRunner = require('./collection-value-runner');
class StepCollectionRunner extends CollectionRunner {
initialHaltCondition() {
return !this.collection.valid();
}
wrap(items) {
return items;
}
}
StepCollectionRunner.ItemRunner = StepRunner;
module.exports = StepCollectionRunner;
<file_sep>/lib/suite-observer.js
'use strict';
const StepPrinter = require('./step-value-printer');
class SuiteObserver {
constructor(logger) {
this.logger = logger;
this.data = {
features: {
pass: 0, fail: 0
},
scenarios: {
pass: 0, fail: 0
},
steps: {
pass: 0, fail: 0
},
pending: [],
notFound: [],
ambiguous: []
};
}
exitCode() {
return this.data.features.fail;
}
handleEvent(event, item, err) {
let method = event.replace(/-([a-z])/g, (match) => { return match[1].toUpperCase(); });
this[method] && this[method](item, err);
}
featureStarted(item) {
this.featurePassing = true;
this.logger.featureName(item.name);
this.logger.featureDescription(item.description);
}
featureFinished(item) {
if (this.featurePassing) {
this.data.features.pass += 1;
} else {
this.data.features.fail += 1;
}
}
scenarioStarted(item) {
this.scenarioPassing = true;
this.logger.scenarioName(item.name);
}
scenarioFinished(item) {
if (this.scenarioPassing) {
this.data.scenarios.pass += 1;
} else {
this.data.scenarios.fail += 1;
}
this.logger.addBreak();
}
stepStarted(item) {
this.currentStep = item;
}
stepPass(item) {
this.data.steps.pass += 1;
this.printStep(item);
}
stepFail(item, err) {
this.featurePassing = this.scenarioPassing = false;
this.data.steps.fail += 1;
this.printStep(item, 'fail');
this.logger.addBreak();
this.logger.error(err.stack);
this.logger.addBreak();
}
stepNotRun(item) {
this.featurePassing = this.scenarioPassing = false;
this.printStep(item, 'not-run');
}
stepPending(item) {
this.featurePassing = this.scenarioPassing = false;
this.printStep(item, 'pending');
}
substepNotRun(item, err) {
let errType;
if (item.ambiguous()) {
errType = `substep-ambiguous: '${item.text}'`;
} else if (item.notFound()) {
errType = `substep-not-found: '${item.text}'`;
}
if (errType) { throw new Error(errType); }
}
substepPending(item, err) {
throw new Error(`substep-pending: ${item.text}`);
}
printStep(step, event, sub=false) {
if (step.ambiguous()) { this.data.ambiguous.push(step); }
else if (step.notFound()) { this.data.notFound.push(step); }
else if (event === 'pending') { this.data.pending.push(step); }
let printMethod = sub ? 'printSub' : 'print';
new SuiteObserver.StepPrinter(step, this.logger)[printMethod](event);
}
}
SuiteObserver.StepPrinter = StepPrinter;
module.exports = SuiteObserver;
<file_sep>/lib/logger.js
'use strict';
const chalk = require('chalk');
class Logger {
constructor(outputStream) {
this.outputStream = outputStream;
}
warn(message) {
this.outputStream.write(`${chalk.yellow(message)}\n`);
}
error(message) {
this.outputStream.write(`${chalk.red(message)}\n`);
}
disabled(message) {
this.outputStream.write(`${chalk.gray(message)}\n`);
}
featureName(text) {
this.outputStream.write(`\nFeature: ${chalk.yellow(text)}\n`);
}
featureDescription(text) {
this.outputStream.write(`${text}\n\n`);
}
scenarioName(text) {
this.outputStream.write(` Scenario: ${chalk.yellow(text)}\n`);
}
addBreak() {
this.outputStream.write('\n');
}
stepsDefinitionsMissing() {
this.addBreak();
this.addBreak();
this.error('Missing step definitions!!!');
this.disabled('You can implement step definitions with these snippets:');
this.addBreak();
}
write(message) {
this.outputStream.write(message);
}
}
module.exports = Logger;
<file_sep>/test/unit/step-definition-test.js
'use strict';
const assert = require('assert');
const StepDefinition = require('../../lib/step-definition');
describe('StepDefinition', function() {
let definition, implementation, callcount;
beforeEach(function() {
callcount = 0;
implementation = function(done) {
callcount += 1;
done();
};
});
it('matching steps works when the matcher is a string', function() {
definition = new StepDefinition('I visit to the home page', implementation, 'When');
let matched = definition.match('I visit to the home page');
assert(matched);
matched = definition.match('I go to the home page');
assert(!matched);
});
it('matching steps works when the matcher is a regex', function() {
definition = new StepDefinition(/I \w+ to the home page/i, implementation, 'When');
let matched = definition.match('i visit to the home page');
assert(matched);
matched = definition.match('i go to the home page');
assert(matched);
matched = definition.match('I click something');
assert(!matched);
});
it('run() calls the implemetation', function(done) {
definition = new StepDefinition('I visit to the home page', implementation, 'When');
definition.run(function() {
assert.equal(callcount, 1);
done();
});
});
});
<file_sep>/test/unit/step-value-test.js
'use strict';
const assert = require('assert');
const sinon = require('sinon');
const StepValue = require('../../lib/step-value');
describe('StepValue', function() {
let stepValue, compiledStep, matchingDefinitions;
beforeEach(function() {
compiledStep = {text: 'I am the step text', keyword: 'Given'};
matchingDefinitions = [
{ implementation: sinon.spy() }
];
stepValue = new StepValue(compiledStep, matchingDefinitions);
});
it('makes available the most important parts of the step', function() {
assert.equal(stepValue.text, 'I am the step text');
assert.equal(stepValue.type, 'Given');
assert.equal(stepValue.implementation, matchingDefinitions[0].implementation);
});
it('is valid with one definition', function() {
assert(stepValue.valid());
assert(!stepValue.ambiguous());
assert(!stepValue.notFound());
});
it('is not found when there are no definitions', function() {
matchingDefinitions = [];
stepValue = new StepValue(compiledStep, matchingDefinitions);
assert(!stepValue.valid());
assert(!stepValue.ambiguous());
assert(stepValue.notFound());
});
it('is ambiguous when there are multiple definitions', function() {
matchingDefinitions.push({ implementation: sinon.spy() });
stepValue = new StepValue(compiledStep, matchingDefinitions);
assert(!stepValue.valid());
assert(stepValue.ambiguous());
assert(!stepValue.notFound());
});
it('is an event emitter', function() {
let listener = sinon.spy();
stepValue.on('error', listener);
stepValue.emit('error', 'Mayday!');
assert(listener.calledWith('Mayday!'));
});
});
<file_sep>/test/unit/marry-steps-test.js
'use strict';
const assert = require('assert');
const MarrySteps = require('../../lib/marry-steps');
describe('MarrySteps', function() {
let marry, compiledSteps, definitions;
class MockDefinition {
constructor(text) {
this.text = text;
}
match(matchText) {
return this.text === matchText;
}
}
beforeEach(function() {
compiledSteps = [
{text: 'I am the step text', keyword: 'Given'},
'I come from the land of substep',
{text: 'I live and breath', keyword: 'When'},
{text: 'I will be validated', keyword: 'Then'}
];
definitions = [
new MockDefinition('I am the step text'),
new MockDefinition('I come from the land of substep'),
new MockDefinition('I will be validated'),
new MockDefinition('I will be validated')
];
marry = new MarrySteps(compiledSteps, definitions);
});
it('generates the right step text values for each step', function() {
let stepCollection = marry.marry();
let textValues = stepCollection.map((item) => { return item.text; });
assert.equal(textValues[0], 'I am the step text');
assert.equal(textValues[1], 'I come from the land of substep');
assert.equal(textValues[2], 'I live and breath');
assert.equal(textValues[3], 'I will be validated');
});
it('generates the right types for each step', function() {
let stepCollection = marry.marry();
let value = stepCollection.map((item) => { return item.type; });
assert.equal(value[0], 'Given');
assert.equal(value[1], 'And');
assert.equal(value[2], 'When');
assert.equal(value[3], 'Then');
});
it('matches definitions with compiled steps', function() {
let stepCollection = marry.marry();
let value = stepCollection.map((item) => { return item.definitionCount; });
assert.equal(value[0], 1);
assert.equal(value[1], 1);
assert.equal(value[2], 0);
assert.equal(value[3], 2);
});
});
<file_sep>/test/features/step_definitions/ambiguous-steps.js
'use strict';
const assert = require('assert');
module.exports = function(world) {
world.given('there are two steps with the same text', function(done) {
done();
});
world.given('there are two steps with the same text', function(done) {
done();
});
world.when('I should see a message after the step', function(done) {
done();
});
};
<file_sep>/lib/sub-step-runner.js
'use strict';
const async = require('async');
const inherits = require('util').inherits;
const EventEmitter = require('eventemitter2').EventEmitter2;
const MarrySteps = require('./marry-steps');
const StepCollectionRunner = require('./step-collection-runner');
class SubStepRunner {
constructor(world) {
this.stepTexts = [];
this.world = world;
this.definitions = world.stepDefinitions;
}
add(text) {
this.stepTexts.push(text);
return this;
}
finish(callback) {
try {
this.runAll(callback);
} catch(e) {
callback(e);
}
}
runAll(callback) {
let steps = new MarrySteps(this.stepTexts, this.definitions.steps).marry();
let runner = new StepCollectionRunner(steps, this);
runner.onAny((event, obj, err) => {
this.emit(`substep-${event}`, obj, err);
});
runner.run(callback);
}
}
SubStepRunner.prototype.runStep = SubStepRunner.prototype.add;
inherits(SubStepRunner, EventEmitter);
SubStepRunner.MarrySteps = MarrySteps;
SubStepRunner.StepCollectionRunner = StepCollectionRunner;
module.exports = SubStepRunner;
<file_sep>/lib/runner.js
'use strict';
class Runner {
constructor(item, parent, world) {
this.item = item;
this.parent = parent;
this.world = world;
}
run(done) {
this.item.emit(`${this.eventPrefix}started`, this.item);
this.runChildren(() => {
this.item.emit(`${this.eventPrefix}finished`, this.item);
done();
});
}
runChildren(callback) {
callback();
}
}
Runner.prototype.eventPrefix = '';
module.exports = Runner;
<file_sep>/test/features/step_definitions/pending-steps.js
'use strict';
module.exports = function(world) {
world.given('The first step is pending by throwing a pending error', function(done) {
throw world.pending();
});
world.given('The first step is pending by passing a pending error to the callback', function(done) {
done(world.pending());
});
world.then('I throw in a pending step', function(done) {
done(world.pending());
});
world.then('I should see all steps are gray', function(done) {
done();
});
world.and('the exit code should be 1', function(done) {
done();
});
};
<file_sep>/test/unit/logger-test.js
'use strict';
const assert = require('assert');
const OutputStream = require('./support/output-stream');
const Logger = require('../../lib/logger');
describe('Logger', function() {
let mockStream, outputStream, logger;
beforeEach(function() {
mockStream = new OutputStream();
outputStream = mockStream.stream;
logger = new Logger(outputStream);
});
it('.warn(message) ends the message in a line break', function() {
logger.warn('Oh no!');
assert(mockStream.output.match(/\n$/));
});
it('.warn(message) formats the message in color', function() {
logger.warn('Oh no!');
assert(mockStream.output.match(/\x1b[[0-9;]+m/));
});
it('.warn(message) includes the passed in message', function() {
logger.warn('Oh no!');
assert(mockStream.output.match(/Oh no!/));
});
it('prints out a good feature name and description', function() {
let name = 'Signing in';
let description = ' As a customer\n' +
' I want to sign in to see subscribed content\n' +
' So that I am informed\n';
logger.featureName(name);
logger.featureDescription(description);
let expectedText = `\nFeature: ${name}\n${description}\n\n`;
assert.equal(mockStream.cleanOutput(), expectedText);
});
it('prints out scenario names', function() {
let name = 'Doing something great!';
logger.scenarioName(name);
let expectedText = ' Scenario: Doing something great!\n';
assert.equal(mockStream.cleanOutput(), expectedText);
});
it('prints a break', function() {
logger.addBreak();
assert.equal(mockStream.cleanOutput(), '\n');
});
it('prints missing step declaration', function() {
logger.stepsDefinitionsMissing();
let expected = `\n\nMissing step definitions!!!\nYou can implement step definitions with these snippets:\n\n`;
assert.equal(mockStream.cleanOutput(), expected);
});
});
<file_sep>/test/unit/status-report-test.js
'use strict';
const assert = require('assert');
const Status = require('../../lib/status');
const StatusReport = require('../../lib/status-report');
const Logger = require('../../lib/logger');
const OutputStream = require('./support/output-stream');
describe('StatusReport', function() {
let status, statusReport;
let mockStream, outputStream, logger;
beforeEach(function() {
mockStream = new OutputStream();
outputStream = mockStream.stream;
logger = new Logger(outputStream);
status = new Status();
statusReport = new StatusReport(status, logger);
});
// eyeballing it for now, because lots of change, and tedium
xit('prints', function() {
status.add('unimplemented', 'where is it?');
status.add('unimplemented', 'and more question about how');
status.add('pending', 'not yet yo!');
status.failStep();
status.pass('step');
statusReport.print();
console.log(mockStream.output);
});
});
<file_sep>/test/acceptance/ambiguous-feature-test.js
'use strict';
const assert = require('assert');
const OutputStream = require('../unit/support/output-stream');
const Config = require('../../lib/config');
const Runner = require('../../lib/suite-runner');
describe('failure.feature', function() {
let config, runner, mockStream, outputStream, exitCode;
beforeEach(function() {
mockStream = new OutputStream();
outputStream = mockStream.stream;
config = new Config(outputStream, ['test/features/failure.feature']);
config.load();
});
it('returns an error code', function(done) {
Runner.prototype.close = function() {
assert.equal(this.observer.exitCode(), 1);
done();
};
runner = new Runner(config);
runner.run();
});
it('Outputs the feature header', function(done) {
Runner.prototype.close = function() {
let plainOutput = mockStream.cleanOutput();
assert(plainOutput.includes('Feature with a whole lot of errors'));
done();
};
runner = new Runner(config);
runner.run();
});
it('Outputs scenario info', function(done) {
Runner.prototype.close = function() {
let plainOutput = mockStream.cleanOutput();
assert(plainOutput.includes('Scenario: Assertion failure'));
assert(plainOutput.includes('Scenario: Runtime errors'));
done();
};
runner = new Runner(config);
runner.run();
});
it('Prints comments about which steps are unimplemented', function(done) {
Runner.prototype.close = function() {
let plainOutput = mockStream.cleanOutput();
assert(plainOutput.includes('When And then I make an assertion that aint true // step threw an error; halting scenario'));
assert(plainOutput.includes('When I make an error resulting in a runtime failure // step threw an error; halting scenario'));
done();
};
runner = new Runner(config);
runner.run();
});
it('Spits out stacktraces', function(done) {
Runner.prototype.close = function() {
let plainOutput = mockStream.cleanOutput();
assert(plainOutput.includes('AssertionError: it isnt like we thought'));
assert(plainOutput.includes('ReferenceError: failHard is not defined'));
assert(plainOutput.includes('at StepValue.implementation'));
assert(plainOutput.includes('test/features/step_definitions/failing-steps.js:24:5'));
done();
};
runner = new Runner(config);
runner.run();
});
it('Summarizes correctly', function(done) {
Runner.prototype.close = function() {
let plainOutput = mockStream.cleanOutput();
assert(plainOutput.includes('Some features failed:'));
assert(plainOutput.includes('Features - Passing: 0, Failing: 1'));
assert(plainOutput.includes('Scenarios - Passing: 0, Failing: 2'));
assert(plainOutput.includes('Steps - Passing: 2, Failing: 2'));
done();
};
runner = new Runner(config);
runner.run();
});
});
<file_sep>/test/features/step_definitions/substeps-steps.js
'use strict';
const assert = require('assert');
module.exports = function(world) {
world.given('I wrap a number of substeps into one', function(done) {
world
.runStep('all step definitions are defined')
.runStep('I run the feature')
.finish(done);
});
world.then('I should see everything is green', function(done) {
world.run('I should see all steps in green', done);
});
world.given('I wrap an unimplemented substep into another step set', function(done) {
world
.runStep('all step definitions are defined')
.runStep('not hear mo-fo!')
.finish(done);
});
world.when('I run that scenario', function(done) {
done();
});
world.then('I will see that the step containing the substep will be unimplemented', function(done) {
done();
});
world.then('the scenario will fail', function(done) {
done();
});
world.given('I wrap a pending substep into another step set', function(done) {
world
.runStep('all step definitions are defined')
.runStep('I throw in a pending step')
.finish(done);
});
world.given('I wrap a error prone substep into another step set', function(done) {
world
.runStep('all step definitions are defined')
.runStep('I make an error resulting in a runtime failure')
.finish(done);
});
world.then('I will see that the step containing the substep will be pending', function(done) {
done();
});
world.then('I will see that the step containing the substep will have failed', function(done) {
done();
});
world.given('I wrap an ambiguous substep into another step set', function(done) {
world
.runStep('all step definitions are defined')
.runStep('there are two steps with the same text')
.finish(done);
});
};
<file_sep>/test/features/support/teardown.js
'use strict';
module.exports = function teardown(world, callback) {
callback();
};
<file_sep>/test/unit/sub-step-runner-test.js
'use strict';
const assert = require('assert');
const World = require('../../lib/world');
const SubStepRunner = require('../../lib/sub-step-runner');
describe('SubStepRunner', function() {
let subStep, world;
beforeEach(function() {
world = new World({}, {});
subStep = new SubStepRunner(world);
});
it('add("step text") adds the step text to an array', function() {
subStep.add('Hello, world');
assert.equal(subStep.stepTexts[0], 'Hello, world');
});
it('add("step text") returns the subStep for chaining', function() {
assert.equal(subStep.add('whatever'), subStep);
});
it('finish(done) marries and runs the substeps capturing "not-run" when step not found', function(done) {
subStep.add('more o that!');
subStep.on('substep-not-run', function(step) {
assert(step.notFound());
done();
});
subStep.finish(function() {});
});
it('finish(done) captures "pass" events', function() {
world.given('more o that!', function(callback) {
callback();
});
subStep.on('pass', function(step) {
assert.equal(step.text, 'more o that!');
done();
});
subStep.add('more o that!');
subStep.finish(function() {});
});
it('finish(done) captures "not-run" events when ambiguous steps', function() {
world.given('more o that!', function(callback) {
callback();
});
world.given('more o that!', function(callback) {
callback();
});
subStep.on('not-run', function(step) {
assert(step.ambpiguous());
done();
});
subStep.add('more o that!');
subStep.finish(function() {});
});
it('finish(done) captures "pending" events when that happens', function() {
world.given('more o that!', function(callback) {
throw new Error('pending');
callback();
});
subStep.on('pending', function(step) {
assert.equal(step.text, 'more o that!');
done();
});
subStep.add('more o that!');
subStep.finish(function() {});
});
it('finish(done) captures "fail" events when that happens', function() {
world.given('more o that!', function(callback) {
throw new Error('bang!');
callback();
});
subStep.on('fail', function(step) {
assert.equal(step.text, 'more o that!');
done();
});
subStep.add('more o that!');
subStep.finish(function() {});
});
});
<file_sep>/test/unit/support/output-stream.js
'use strict';
const Stream = require('stream');
class MockOutputStream {
constructor() {
this.output = '';
this.stream = new Stream.Writable();
this.stream._write = (chunk, encoding, next) => {
this.write(chunk, encoding, next);
}
}
write(chunk, encoding, next) {
this.output += chunk.toString();
next();
}
cleanOutput() {
return this.output.replace(/\x1b[[0-9;]+m/g, '');
}
}
module.exports = MockOutputStream;
<file_sep>/test/unit/compile-feature-test.js
'use strict';
const assert = require('assert');
const sinon = require('sinon');
const path = require('path');
const loadFeatures = require('../../lib/load-features');
const compileFeature = require('../../lib/compile-feature');
describe('compileFeatures', function() {
let featureTexts, logger;
beforeEach(function() {
featureTexts = loadFeatures(path.resolve(__dirname, '../features'));
logger = {
warn: sinon.spy()
};
});
it('uses gherkins to compile the features', function() {
let features = featureTexts.map((featureText) => {
return compileFeature(featureText, logger);
});
assert(features.length);
});
it('logs and exits when the gherkins is unable to compile', function() {
let sandbox = sinon.sandbox.create();
let feature = 'huh\nwhat?';
process.emit = sinon.spy();
compileFeature(feature, logger);
assert(process.emit.calledWith('exit'));
assert(logger.warn.called);
sandbox.restore();
});
});
<file_sep>/lib/scenario-collection-runner.js
'use strict';
const inherits = require('util').inherits;
const EventEmitter = require('eventemitter2').EventEmitter2
const Collection = require('./collection');
const Runner = require('./runner');
const CollectionRunner = require('./collection-value-runner');
const MarrySteps = require('./marry-steps');
const StepCollectionRunner = require('./step-collection-runner');
let eventPrefix = 'scenario-'
class ScenarioValue {
constructor(compiled) {
this.name = compiled.name;
this.steps = compiled.steps;
}
}
inherits(ScenarioValue, EventEmitter);
class ScenarioCollection extends Collection {}
ScenarioCollection.ValueClass = ScenarioValue;
class ScenarioRunner extends Runner {
runChildren(callback) {
let steps = new MarrySteps(this.item.steps, this.world.stepDefinitions.steps).marry();
let runner = new StepCollectionRunner(steps, this.world);
runner.onAny((event, item, err) => {
this.parent.emit(`step-${event}`, item, err);
});
runner.run(callback);
}
}
ScenarioRunner.prototype.eventPrefix = eventPrefix;
class ScenarioCollectionRunner extends CollectionRunner {}
ScenarioCollectionRunner.ItemRunner = ScenarioRunner;
ScenarioCollectionRunner.CollectionClass = ScenarioCollection;
module.exports = ScenarioCollectionRunner;
<file_sep>/test/unit/step-collection-runner-test.js
'use strict';
const assert = require('assert');
const StepValue = require('../../lib/step-value');
const StepValueCollection = require('../../lib/step-value-collection');
const StepCollectionRunner = require('../../lib/step-collection-runner');
describe('StepCollectionRunner', function() {
let stepValue, compiledStep, matchingDefinitions,
steps, runner;
beforeEach(function() {
compiledStep = {text: 'I am the step text', keyword: 'Given'};
matchingDefinitions = [
{
implementation: (callback) => {
callback();
}
}
];
stepValue = new StepValue(compiledStep, matchingDefinitions);
steps = new StepValueCollection([stepValue]);
runner = new StepCollectionRunner(steps, 200);
});
it('when a step fails', function(done) {
let err = new Error('Hot dang! Something went wrong');
stepValue.implementation = function(callback) {
throw err;
};
runner.run(function() {
assert.equal(runner.halted(), true);
done();
});
});
it('when a step is pending', function(done) {
let err = new Error('Pending');
stepValue.implementation = function(callback) {
throw err;
};
runner.run(function() {
assert.equal(runner.halted(), true);
done();
});
});
it('when a step is not run', function(done) {
stepValue.implementation = function(callback) {
stepValue.emit('not-run');
callback();
};
runner.run(function() {
assert.equal(runner.halted(), true);
done();
});
});
it('when a step passes', function(done) {
runner.run(function() {
assert.equal(runner.halted(), false);
done();
});
});
});
|
9dd631d5e0949bc0325fe823c870e6302f296436
|
[
"JavaScript",
"Markdown"
] | 41
|
JavaScript
|
baccigalupi/cucaroo
|
64d5e715a8652e7d90bc5554046a34503bf0dea2
|
10f579c7fe5a74cd364c5bc47c6e69761894e46d
|
refs/heads/master
|
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class PlayerView : MonoBehaviour
{
public float maxRotationDegreeSecond = 75f;
public float mouseRotaionSpeed = 100f;
[Range(0, 45)]
public float maxPitchUpAngle = 45f;
[Range(0, 45)]
public float maxPitchDownAngle = 5f;
void start()
{
Cursor.visible = false;
}
void Update()
{
MouseInput();
}
private void MouseInput()
{
Vector3 rotation = new Vector3(-Input.GetAxis("Mouse Y"), Input.GetAxis("Mouse X"), 0);
RotateView(rotation * mouseRotaionSpeed);
}
private void RotateView(Vector3 rotation)
{
//Rotate Player
transform.Rotate(rotation * Time.deltaTime);
//Limit Player rotation pitch
float playerPitch = LimitPitch();
//Apply clamped pitch and clear roll
transform.rotation = Quaternion.Euler(playerPitch, transform.eulerAngles.y, 0);
}
private float LimitPitch()
{
float playerPitch = transform.eulerAngles.x;
float maxPitchUp = 360 - maxPitchUpAngle;
float maxPitchDown = maxPitchDownAngle;
if (playerPitch > 180 && playerPitch < maxPitchUp)
{
//Limit pitch up
playerPitch = maxPitchUp;
}
else if (playerPitch < 180 && playerPitch > maxPitchDown)
{
//limit pitch down
playerPitch = maxPitchDown;
}
return playerPitch;
}
}
|
5a51b594055539bc3cffc0f908f8f36187493637
|
[
"C#"
] | 1
|
C#
|
mariem-mah/TargetShooter
|
eee0d9a44f91c0f9d60c68e6b1f9d4892a0c9e51
|
220a67b15b7887759127bb7110a1322ed339bed4
|
refs/heads/master
|
<file_sep>package main;
import commands.CommandHandler;
import sx.blah.discord.api.IDiscordClient;
import sx.blah.discord.util.DiscordException;
import utils.BotUtils;
import utils.ConfigReader;
import java.io.FileNotFoundException;
/**
* Main Class
*/
public class Main {
/**
* Indicates that this class should not be instantiated
*/
private Main() {
}
/**
* Main method
* @param args passed arguments
*/
public static void main(String[] args) {
// Sets start time for uptime command
BotUtils.setStartTime(System.currentTimeMillis());
try {
// Reads config file
new ConfigReader(BotUtils.DEFAULT_CONFIG_PATH);
startClient();
} catch (FileNotFoundException e) { // Config File doesnt exist
System.out.println(BotUtils.ERROR_MESSAGE);
System.out.println(BotUtils.CONFIG_NOT_FOUND_ERROR);
System.out.println(BotUtils.TERMINATING_MESSAGE);
} catch (ArrayIndexOutOfBoundsException e1) { // Bad config
System.out.println(BotUtils.ERROR_MESSAGE);
System.out.println(e1.getMessage());
System.out.println(BotUtils.TERMINATING_MESSAGE);
} catch (DiscordException e2) { // Invalid token
System.out.println(BotUtils.ERROR_MESSAGE);
System.out.println("Unexpected error while loading RexCord."
+ "\nDid you insert a valid token?");
System.out.println(BotUtils.TERMINATING_MESSAGE);
}
}
/**
* Starts Bot Client
*/
private static void startClient() {
// Creates a new Client
IDiscordClient client = BotUtils.createDiscordClient();
// Registers a new listener
client.getDispatcher().registerListener(new CommandHandler());
// Logs in
client.login();
}
}
|
219641ebe9ca2423c77943d0c7cd0664f6fa6e93
|
[
"Java"
] | 1
|
Java
|
tminussi/RexCord
|
867a3943d6ce387d9efcc248735e91cea6ec2333
|
3ecd915a850f6829e32ea7d704b3a9e4ccdf5945
|
refs/heads/master
|
<file_sep>package jp.supership.nativesample.helper;
import android.content.Context;
import android.graphics.Color;
import android.graphics.Typeface;
import android.graphics.drawable.GradientDrawable;
import android.view.Gravity;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.facebook.ads.AdChoicesView;
import com.facebook.ads.MediaView;
import com.facebook.ads.NativeAd;
import jp.supership.nativesample.utilities.Utilities;
public class FANHelper {
private static final int LMP = LinearLayout.LayoutParams.MATCH_PARENT;
private static final int LWC = LinearLayout.LayoutParams.WRAP_CONTENT;
private static final int VGLMP = ViewGroup.LayoutParams.MATCH_PARENT;
private static final int VGLWC = ViewGroup.LayoutParams.WRAP_CONTENT;
public static void setIcon(NativeAd nativeAd, LinearLayout layout, Context context) {
ImageView nativeIcon = new ImageView(context);
ViewGroup.LayoutParams nativeIconLayoutParams = new LinearLayout.LayoutParams(Utilities.convertedPixelToDp(context, 30), VGLWC);
NativeAd.Image icon = nativeAd.getAdIcon();
NativeAd.downloadAndDisplayImage(icon, nativeIcon);
layout.addView(nativeIcon, nativeIconLayoutParams);
}
public static void setTitle(NativeAd nativeAd, LinearLayout layout, Context context) {
TextView titleView = new TextView(context);
titleView.setTextColor(Color.BLACK);
titleView.setTextSize(10);
titleView.setLayoutParams(new LinearLayout.LayoutParams(LMP, LWC));
titleView.setPadding(0, 0, 0, Utilities.convertedPixelToDp(context, 3));
titleView.setTypeface(Typeface.DEFAULT_BOLD);
Utilities.setOneLineAndEllipsisForTextView(titleView);
String title = nativeAd.getAdTitle();
titleView.setText(title);
layout.addView(titleView);
}
public static void setDescription(NativeAd nativeAd, LinearLayout layout, Context context) {
TextView nativeAdBody = new TextView(context);
nativeAdBody.setLayoutParams(new LinearLayout.LayoutParams(LMP, Utilities.convertedPixelToDp(context, 24)));
nativeAdBody.setTextSize(8);
nativeAdBody.setTextColor(Color.rgb(218, 218, 218));
nativeAdBody.setPadding(0, Utilities.convertedPixelToDp(context, 6), 0, Utilities.convertedPixelToDp(context, 6));
Utilities.setOneLineAndEllipsisForTextView(nativeAdBody);
layout.addView(nativeAdBody);
String description = nativeAd.getAdBody();
nativeAdBody.setText(description);
}
public static void setAdMark(LinearLayout layout, Context context) {
TextView prTextView = new TextView(context);
prTextView.setText("広告");
prTextView.setTextColor(Color.rgb(218, 218, 218));
prTextView.setTextSize(9);
prTextView.setLayoutParams(new ViewGroup.LayoutParams(LWC, LWC));
layout.addView(prTextView);
}
public static void setMediaView(NativeAd nativeAd, FrameLayout layout, Context context) {
MediaView mediaView = new MediaView(context);
// 動画/静止画兼用のとき
mediaView.setLayoutParams(new ViewGroup.LayoutParams(VGLMP, VGLMP));
mediaView.setNativeAd(nativeAd);
layout.addView(mediaView, new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, Utilities.convertedPixelToDp(context, 157)));
}
public static void setCta(NativeAd nativeAd, LinearLayout layout, Context context) {
LinearLayout nativeAdButtonArea = new LinearLayout(context);
nativeAdButtonArea.setLayoutParams(new LinearLayout.LayoutParams(LMP, LMP));
nativeAdButtonArea.setGravity(Gravity.CENTER);
TextView nativeAdButton = new TextView(context);
LinearLayout.LayoutParams nativeAdButtonParam = new LinearLayout.LayoutParams(Utilities.convertedPixelToDp(context, 130), Utilities.convertedPixelToDp(context, 25));
nativeAdButton.setLayoutParams(nativeAdButtonParam);
nativeAdButton.setTextColor(Color.rgb(11, 144, 255));
nativeAdButton.setTextSize(12);
nativeAdButton.setBackgroundColor(Color.WHITE);
nativeAdButton.setGravity(Gravity.CENTER);
//borderや角丸をセット
GradientDrawable borders = new GradientDrawable();
borders.setColor(Color.WHITE);
borders.setCornerRadius(10);
borders.setStroke(3, Color.rgb(11, 144, 255));
//setBackgroundDrawableはDeprecatedですが、古いバージョンの端末サポートのため使用しています
nativeAdButton.setBackgroundDrawable(borders);
Utilities.setOneLineAndEllipsisForTextView(nativeAdButton);
nativeAdButton.setText(nativeAd.getAdCallToAction());
nativeAdButtonArea.addView(nativeAdButton);
layout.addView(nativeAdButtonArea);
}
public static void setNativeSocialContext(NativeAd nativeAd, LinearLayout layout, Context context) {
TextView nativeSocialContext = new TextView(context);
nativeSocialContext.setTextColor(Color.rgb(218, 218, 218));
nativeSocialContext.setWidth(Utilities.convertedPixelToDp(context, 130));
nativeSocialContext.setTextSize(8);
Utilities.setOneLineAndEllipsisForTextView(nativeSocialContext);
//〇〇人が利用中ですorドメインが表示される
String socialText = nativeAd.getAdSocialContext() != null ? nativeAd.getAdSocialContext() : "";
nativeSocialContext.setText(socialText);
layout.addView(nativeSocialContext);
}
public static void setAdChoicesView(NativeAd nativeAd, FrameLayout innerLayout, LinearLayout adChoiceContainer, Context context) {
AdChoicesView adChoicesView = new AdChoicesView(context, nativeAd, true);
adChoiceContainer.addView(adChoicesView);
nativeAd.registerViewForInteraction(innerLayout);
}
}
<file_sep># Ad Generation Androidサンプル
## Android Studio
- NativeSample
ネイティブ広告及びFacebook Audience Networkのネイティブ広告のサンプルです
※ ネイティブ広告のアイコンやメイン画像はロードする処理は除外しています
### 動作環境
- Android Studio - Version 2.1.2
- Android - Version 4.0.0以降
## Eclipse
**※ GoogleによるEclipse上でのAndroid開発環境サポートは2015年12月をもって終了しました。**
- SampleApp
バナー広告及びインタースティシャル広告のサンプルです
### 動作環境
- ADT - Revition 17
- Android - Version 2.1.0以降
## 注意事項
- 各SDKは最新版をダウンロードして参照に追加してください
<file_sep>//
// NativeAdView.swift
// ADGNativeSampleForSwift
//
// Created on 2016/06/10.
// Copyright © 2016年 Supership. All rights reserved.
//
import UIKit
class NativeAdView: UIView {
required init(coder aDecoder: NSCoder) {
fatalError("not implemented")
}
init (adgManagerViewController: ADGManagerViewController, nativeAd: ADGNativeAd) {
// 広告を貼り付けるViewを生成
super.init(frame: CGRect(x: 0, y: 0, width: 300, height: 250))
// アイコン
if let adIcon = nativeAd.iconImage {
if !adIcon.url.isEmpty {
let iconImageView = UIImageView(frame: CGRect(x: 4, y: 6, width: 30, height: 30))
let iconImageData = NSData(contentsOfURL: NSURL(string: adIcon.url)!)!
iconImageView.image = UIImage(data: iconImageData)
self.addSubview(iconImageView)
}
}
// タイトル
if let adTitle = nativeAd.title {
if !adTitle.text.isEmpty {
let titleLabel = UILabel(frame: CGRect(x: 38, y: 4, width: 258, height: 15))
titleLabel.text = adTitle.text;
titleLabel.numberOfLines = 1
titleLabel.font = titleLabel.font.fontWithSize(13)
self.addSubview(titleLabel)
}
}
// 広告マーク
let adMarkLabel = UILabel(frame: CGRect(x: 38, y: 22, width: 28, height: 14))
adMarkLabel.text = "広告"
adMarkLabel.font = adMarkLabel.font.fontWithSize(11)
adMarkLabel.textColor = UIColor.lightGrayColor()
self.addSubview(adMarkLabel)
// 本文
if let adDesc = nativeAd.desc {
if !adDesc.value.isEmpty {
let descLabel = UILabel(frame: CGRect(x: 4, y: 30, width: 296, height: 40))
descLabel.text = adDesc.value
descLabel.numberOfLines = 2
descLabel.font = descLabel.font.fontWithSize(11)
descLabel.textColor = UIColor.lightGrayColor()
self.addSubview(descLabel)
}
}
// 広告イメージ
if let adImage = nativeAd.mainImage {
if !adImage.url.isEmpty {
let imageView = UIImageView(frame: CGRect(x: 0, y: 65, width: 300, height: 156))
let imageData = NSData(contentsOfURL: NSURL(string: adImage.url)!)!
imageView.image = UIImage(data: imageData)
imageView.contentMode = UIViewContentMode.ScaleAspectFit
imageView.clipsToBounds = true
self.addSubview(imageView)
}
}
// スポンサー
if let adSponsored = nativeAd.sponsored {
let sponsoredLabel = UILabel(frame: CGRect(x: 4, y: 226, width: 150, height: 20))
if !adSponsored.value.isEmpty {
sponsoredLabel.text = "sponsored by " + adSponsored.value
} else {
sponsoredLabel.text = "sponsored"
}
sponsoredLabel.numberOfLines = 1
sponsoredLabel.font = sponsoredLabel.font.fontWithSize(10)
sponsoredLabel.textColor = UIColor(red: 0.7, green: 0.7, blue: 0.7, alpha: 1.0)
self.addSubview(sponsoredLabel)
}
// CTA
if let adCTA = nativeAd.ctatext {
if !adCTA.value.isEmpty {
let actionButton = UIButton(frame: CGRect(x: 178, y: 223, width: 114, height: 25))
actionButton.setTitle(adCTA.value, forState: UIControlState.Normal)
actionButton.setTitleColor(UIColor(red: 0.12, green: 0.56, blue: 1.00, alpha: 1.0), forState: UIControlState.Normal)
actionButton.titleLabel?.font = UIFont.boldSystemFontOfSize(14)
actionButton.titleLabel?.adjustsFontSizeToFitWidth = true
actionButton.titleEdgeInsets = UIEdgeInsets(top: 1.0, left: 1.0, bottom: 1.0, right: 1.0)
actionButton.backgroundColor = UIColor.whiteColor()
actionButton.layer.borderWidth = 1.0
actionButton.layer.borderColor = UIColor(red: 0.12, green: 0.56, blue: 1.00, alpha: 1.0).CGColor
actionButton.layer.cornerRadius = 5.0
// ボタンへのタップ反応追加
nativeAd.setTapEvent(actionButton)
self.addSubview(actionButton)
}
}
// Viewへのタップ制御やローテーション制御
// 必ず記述してください
adgManagerViewController.delegateViewManagement(self, nativeAd: nativeAd)
}
}
|
525a3a598a3041849e043eb835efa3cfdb86c16c
|
[
"Markdown",
"Java",
"Swift"
] | 3
|
Java
|
kawasaki2013/sdk
|
72c2a60b1f25a2ed66b5611f110189f88a1a32b7
|
1660b4076d286bf63a6363e002b600eb5b5a05e6
|
refs/heads/master
|
<file_sep>#ifndef PARSER_H
#define PARSER_H
int yywrap( );
int yylex( );
void yyerror(const char* str);
int yyparse (void);
#endif
<file_sep>#ifndef STRING_H
#define STRING_H
char* substring(char* str, int start, int end);
#endif
<file_sep>#include <iostream>
#include <string>
#include <algorithm>
#include "environment.hpp"
#include "variable.hpp"
Variable* find_variable(Environment* env, std::string id) {
auto result = std::find_if(env->vars.begin(), env->vars.end(),
[id](const Variable* element) {
return element->id == id;
});
if (result != env->vars.end()) {
return *result;
}
return nullptr;
}
void add_variable(Environment* env, Variable* var) {
// If variable exists, replace it
Variable* temp_var = find_variable(env, var->id);
if (temp_var != nullptr) {
temp_var->value = std::unique_ptr<Value>(new Value(*var->value));
free(var);
return;
}
// If not, add variable to environment
env->vars.push_back(var);
}
Environment::~Environment() {
for (uint i = 0; i < size(vars); i++) {
delete vars[i];
}
}
<file_sep>#include <iostream>
#include <stdlib.h>
#include "sloth.hpp"
#include "shell.hpp"
void interpret_file(char* fileName);
/* the result variable */
Node* result;
int main(int argc, char* argv[]) {
if (argc == 1) {
start_shell();
} else if (argc == 2) {
interpret_file(argv[1]);
} else {
std::cout << "Incorrect number of arguments passed. Expected " << 0 << " or " << 1 << ", got " << argc - 1 << std::endl;
std::cout << "Usage: sloth [program_name]" << std::endl;
exit(-1);
}
}
void interpret_file(char* fileName) {
/* save stdin */
FILE* orig_stdin = stdin;
stdin = fopen(fileName, "r");
yyparse( );
/* restore stdin */
fclose(stdin);
stdin = orig_stdin;
// Interpret the AST
// print_tree(result, 0); // For debugging
Environment* env = new Environment();
eval_statement(result, env);
delete env;
delete result;
}<file_sep>#ifndef SHELL_H
#define SHELL_H
FILE* stringToFile(char* str);
void start_shell();
#endif
<file_sep>GPP = g++ -Wall -std=c++17 -O3
DESTDIR ?= /usr
sloth: src/main.cpp src/parser/lex.yy.o src/parser/parser.tab.o src/variables/environment.o src/variables/variable.o src/variables/value.o src/operations/node.o src/operations/operators.o src/string.o src/shell.o
$(GPP) src/main.cpp src/parser/lex.yy.o src/parser/parser.tab.o src/variables/environment.o src/variables/variable.o src/variables/value.o src/operations/node.o src/operations/operators.o src/string.o src/shell.o -ledit -o sloth
src/parser/lex.yy.o: src/parser/lex.yy.c src/parser/parser.tab.h
$(GPP) -c src/parser/lex.yy.c -o src/parser/lex.yy.o
src/parser/parser.tab.o: src/parser/parser.tab.c
$(GPP) -c src/parser/parser.tab.c -o src/parser/parser.tab.o
src/parser/parser.tab.h: src/parser/parser.y
bison -d -o src/parser/parser.tab.c src/parser/parser.y
src/parser/parser.tab.c: src/parser/parser.y
bison -d -o src/parser/parser.tab.c src/parser/parser.y
src/parser/lex.yy.c: src/parser/lexer.l
flex -o src/parser/lex.yy.c src/parser/lexer.l
src/variables/environment.o: src/variables/environment.hpp src/variables/environment.cpp
$(GPP) -c src/variables/environment.cpp -o src/variables/environment.o
src/variables/variable.o: src/variables/variable.hpp src/variables/variable.cpp
$(GPP) -c src/variables/variable.cpp -o src/variables/variable.o
src/variables/value.o: src/variables/value.hpp src/variables/value.cpp
$(GPP) -c src/variables/value.cpp -o src/variables/value.o
src/operations/operators.o: src/operations/operators.hpp src/operations/operators.cpp
$(GPP) -c src/operations/operators.cpp -o src/operations/operators.o
src/operations/node.o: src/operations/node.hpp src/operations/node.cpp
$(GPP) -c src/operations/node.cpp -o src/operations/node.o
src/shell.o: src/shell.hpp src/shell.cpp
$(GPP) -c src/shell.cpp -o src/shell.o
src/string.o: src/string.h src/string.c
$(GPP) -c src/string.c -o src/string.o
install:
install -Dt $(DESTDIR)/bin/ sloth
clean:
rm src/parser/lex.yy.c src/parser/parser.tab.c src/parser/parser.tab.h src/parser/lex.yy.o src/parser/parser.tab.o src/variables/environment.o src/variables/variable.o src/variables/value.o src/operations/node.o src/operations/operators.o src/string.o src/shell.o sloth
<file_sep>#include <string.h>
#include <stdlib.h>
#include <stdio.h>
#include "string.h"
char* substring(char* str, int start, int end) {
// Add conditions that substring is valid, probably using strlen
int stringLength = strlen(str);
if (start >= stringLength || end > stringLength) { fprintf(stderr, "Requesting substring outside of valid range.\n"); return NULL; }
char* substr;
int c;
int length = end - start;
substr = (char*) malloc(length + 1);
if (substr == nullptr) {
fprintf(stderr, "Unable to allocate memory for substring.\n");
exit(1);
}
for (c = 0; c < length; c++) {
substr[c] = str[start + c];
}
substr[c] = '\0';
return substr;
}
<file_sep>#ifndef VALUE_H
#define VALUE_H
#include "../operations/node.hpp"
#include <iostream>
#include <string>
#include <variant>
#include <vector>
class Node;
enum TypeTag { DOUBLE, LONG, BOOLEAN, STRING, LAMBDA };
class Value {
public:
enum TypeTag type;
std::variant<std::vector<long>, std::vector<double>, Node*, std::string> val;
// << Overload
friend std::ostream & operator << (std::ostream &out, const Value* val);
std::string toString() const;
// Constructor
Value(TypeTag t, std::vector<long> ns, std::vector<double> ds, Node* e, std::string s) {
/* set properties */
type = t;
if (type == LONG || type == BOOLEAN) {
val = ns;
} else if (type == DOUBLE){ // Assume DOUBLE
val = ds;
} else if (type == STRING) {
val = s;
} else { // Assume lambda expression
val = e;
}
}
// Copy constructor
Value(const Value &v) {
type = v.type;
val = v.val;
}
// // Copy when assigned
// Value operator = (const Value &v) {
// std::cout << "COPIED" << std::endl;
// type = v.type;
// val = v.val;
// return *this;
// }
// Destructor
~Value() {
// std::cout << "VALUE DESTROYED" << std::endl;
}
};
// Constructors
Value* make_long(std::vector<long> num);
Value* make_double(std::vector<double> dec);
Value* make_true();
Value* make_false();
Value* make_boolean(int x);
Value* make_booleans(std::vector<long> x);
Value* make_expression(Node* expr);
Value* make_string(std::string str);
// Destructor
void delete_value(Value* val);
// Getters
std::vector<long> get_long(const Value* val);
std::vector<double> get_double(const Value* val);
Node* get_expression(const Value* val);
std::string get_string(const Value* val);
// Setters
void set_long(Value* val, std::vector<long> num);
void set_boolean(Value* val, int x);
void set_booleans(Value* val, std::vector<long> num);
void set_double(Value* val, std::vector<double> dec);
void set_expression(Value* val, Node* node);
void set_string(Value* val, std::string str);
#endif
<file_sep>#ifndef SLOTH_H
#define SLOTH_H
#include "constants.hpp"
#include "operations/node.hpp"
#include "variables/value.hpp"
#include "variables/variable.hpp"
#include "variables/environment.hpp"
#include "parser/parser.hpp"
#include "parser/parser.tab.h"
#endif
<file_sep>#include <stdio.h>
#include <string.h>
#include "shell.hpp"
#include "parser/parser.hpp"
#include "variables/environment.hpp"
#include "operations/node.hpp"
#include "constants.hpp"
// For keeping track of command history
#ifdef _WIN32 // If we're compiling on windows
#include <string.h>
static char buffer[2048];
// Fake readline function
char* readline(char* prompt) {
fputs(prompt, stdout);
fgets(buffer, 2048, stdin);
char* cpy = malloc(strlen(buffer) + 1);
strcpy(cpy, buffer);
cpy[strlen(cpy) - 1] = '\0';
return cpy;
}
void add_history(char* unused) {}
#else // Otherwise we'll just use the handy readline library
#include <editline/readline.h>
#endif
FILE* stringToFile(char* str) { // Creates a temporary file with the given string as its contents
int i = 0;
FILE* tmp = tmpfile();
if (tmp == nullptr) {
fprintf(stderr, "Unable to create temp file");
return nullptr;
}
while (str[i] != '\0') {
fputc(str[i], tmp); i++;
}
// Set the file pointer to the beginning
rewind(tmp);
return tmp;
}
void start_shell() {
printf("Welcome to SLOTH Version 0.0.1\n");
printf("Press CTRL+C to Exit\n");
Environment* env = new Environment();
while (1) {
// Read line from user and input it into the history
char* input = readline("sloth> ");
add_history(input);
// If user enters "exit" quit the loop
if (strcmp(input, "exit") == 0) { break; }
FILE* inputAsFile = stringToFile(input);
/* save stdin */
FILE* orig_stdin = stdin;
stdin = inputAsFile;
yyparse( );
/* restore stdin */
fclose(stdin);
stdin = orig_stdin;
eval_statement(result, env);
}
delete env;
delete result;
}<file_sep>#ifndef VARIABLE_H
#define VARIABLE_H
#include <string>
#include <memory>
#include "../variables/value.hpp"
class Value;
class Node;
class Variable {
public:
std::string id;
std::unique_ptr<Value> value;
Variable(std::string s, Value* val) : value(std::move(val)) {
id = s;
}
~Variable() {
}
};
// Variable Functions
Value* get_value(const Variable* var);
#endif
<file_sep>#include <iostream>
#include <string>
#include "node.hpp"
#include "operators.hpp"
#include "../constants.hpp"
#include "../parser/parser.tab.h"
#include "../variables/value.hpp"
#include "../variables/variable.hpp"
/* attach an existing node onto a parent */
void attach_node(Node* parent, Node* child) {
/* connect it */
parent->children[parent->num_children] = child;
parent->num_children++;
if (parent->num_children > MAX_CHILDREN) { std::cerr << "Error, max children attached to a node" << std::endl; }
}
void check_num_nodes(Node* node, uint num_children, std::string error) {
if (node && node->num_children != num_children) {
std::cerr << "Error, " << error << std::endl;
}
}
std::ostream & operator << (std::ostream &out, const Node* n) {
out << n->toString();
return out;
}
std::string Node::toString() const {
return tree_string(this, 0);
}
std::string tree_string(const Node* node, uint tabs) {
std::string result = "";
uint i;
/* base case */
if(!node) {
result += "NO TREE STRUCTURE\n";
return result;
}
/* print leading tabs */
for(i = 0; i < tabs; i++) {
result += " ";
}
switch(node->type) {
case IDENTIFIER: result += "IDENTIFIER: " + node->id + "\n"; break;
case VECTOR: result += "VECTOR:\n"; break;
case PLUS: result += "PLUS:\n"; break;
case MINUS: result += "MINUS:\n"; break;
case DIVIDE: result += "DIVIDE:\n"; break;
case TIMES: result += "TIMES:\n"; break;
case LESS: result += "LESS THAN:\n"; break;
case GREATER: result += "GREATER:\n"; break;
case LESSEQ: result += "LESS EQUAL:\n"; break;
case GREATEREQ: result += "GREATER EQUAL:\n"; break;
case EQUALS: result += "EQUAL:\n"; break;
case NEQUALS: result += "NOT EQUAL:\n"; break;
case AND: result += "AND:\n"; break;
case OR: result += "OR:\n"; break;
case NOT: result += "NOT:\n"; break;
case ASSIGN: result += "ASSIGN:\n"; break;
case IF: result += "IF:\n"; break;
case WHILE: result += "WHILE:\n"; break;
case PRINT: result += "PRINT:\n"; break;
case INPUT: result += "INPUT:\n"; break;
case LAMBDATAG: result += "LAMBDA:\n"; break;
case CALLFUNC: result += "FUNCCALL:\n"; break;
case STATEMENT: result += "STATEMENT:\n"; break;
case VALUE: result += "VALUE: " + node->value->toString() + "\n"; break;
default:
std::cerr << "Error, " << node->type << " is not a valid node type." << std::endl;
exit(1);
}
/* print all children nodes underneath */
for(i = 0; i < node->num_children; i++) {
result += tree_string(node->children[i], tabs + 1);
}
return result;
}
Value* parse_vector(Node* node) {
// We will only support homogeneous vectors
// Get the first data type from the first node
Value* tempValue = new Value(*node->value);
TypeTag type = tempValue->type;
delete tempValue;
// Only numeric types and booleans are supported currently
if (type != LONG && type != BOOLEAN && type != DOUBLE) {
std::cerr << "Error, only numeric types and booleans are supported in vector form." << std::endl;
return make_false();
}
std::vector<long> longResult;
std::vector<double> doubleResult;
bool lastNode = false;
Node* currentNode = node;
do {
if (currentNode->value->type == type) {
tempValue = new Value(*currentNode->value);
if (type == LONG || type == BOOLEAN) {
std::vector<long> tempVec = get_long(tempValue);
delete tempValue;
longResult.insert(std::end(longResult), std::begin(tempVec), std::end(tempVec));
} else { // Assume Double
std::vector<double> tempVec = get_double(tempValue);
delete tempValue;
doubleResult.insert(std::end(doubleResult), std::begin(tempVec), std::end(tempVec));
}
} else { std::cerr << "Error, only homogenous arrays are supported." << std::endl; return make_false(); }
if (currentNode->num_children == 0) {
lastNode = true;
} else {
currentNode = currentNode->children[0];
}
} while (currentNode->num_children == 1 || !lastNode);
if (type == LONG || type == BOOLEAN) {
return make_long(longResult);
} else { // Assume double
return make_double(doubleResult);
}
}
Value* eval_expression(Node* node, Environment* env) {
/* base case */
if(!node) {
fprintf(stderr, "Error: No tree structure to evaluate\n");
return 0;
}
// Needed if we are going to take input from the user
double temp;
std::vector<double> tempDecs;
Variable* var = nullptr;
Environment* local_env = nullptr;
Node* tempNode = nullptr;
Value* tempVal = nullptr;
// Evaluate subexpressions if existent and node is not a lambda expression
Value* val1 = nullptr;
Value* val2 = nullptr;
// struct Value* val3 = nullptr;
if (node->num_children > 0 && node->type != LAMBDATAG) {
val1 = eval_expression(node->children[0], env);
if (node->num_children > 1) {
val2 = eval_expression(node->children[1], env);
// if (node->num_children > 2) {
// val3 = eval_expression(node->children[2], env);
// }
}
}
switch(node->type) {
case LAMBDATAG: return make_expression(node); break;
case VECTOR: return parse_vector(node->children[0]); break;
case CALLFUNC:
check_num_nodes(node, 2, "cannot have more than two nodes for a function call.");
tempVal = get_value(find_variable(env, node->children[0]->id));
tempNode = get_expression(tempVal);
delete tempVal;
local_env = new Environment();
add_variable(local_env,
new Variable(tempNode->children[0]->id, // Get the name of the variable needed for the lambda expression
eval_expression(node->children[1], env)));
tempVal = eval_expression(tempNode->children[1], local_env);
delete local_env;
return tempVal;
break;
case PLUS:
check_num_nodes(node, 2, "cannot add more than two expressions.");
return add(val1, val2);
// return val1 + val2;
break;
//----------
case MINUS:
check_num_nodes(node, 2, "cannot subtract more than two expressions.");
return subtract(val1, val2);
// return val1 - val2;
break;
//----------
case DIVIDE:
check_num_nodes(node, 2, "cannot divide more than two expressions.");
return division(val1, val2);
// return val1 / val2;
break;
//----------
case TIMES:
check_num_nodes(node, 2, "cannot multiply more than two expressions.");
return multiplication(val1, val2);
// return val1 * val2;
break;
//----------
case LESS:
check_num_nodes(node, 2, "cannot compare more than two expressions.");
if (node->num_children != 2) { fprintf(stderr, "Error, cannot compare more than two expressions.\n"); }
return less(val1, val2);
// return val1 < val2;
break;
//----------
case GREATER:
check_num_nodes(node, 2, "cannot compare more than two expressions.");
return greater(val1, val2);
// return val1 > val2;
break;
//----------
case LESSEQ:
check_num_nodes(node, 2, "cannot compare more than two expressions.");
return less_equal(val1, val2);
// return val1 <= val2;
break;
//----------
case GREATEREQ:
check_num_nodes(node, 2, "cannot compare more than two expressions.");
return greater_equal(val1, val2);
// return val1 >= val2;
break;
//----------
case EQUALS:
check_num_nodes(node, 2, "cannot compare more than two expressions.");
return equals(val1, val2);
// return val1 == val2;
break;
//----------
case NEQUALS:
check_num_nodes(node, 2, "cannot compare more than two expressions.");
return not_equals(val1, val2);
// return val1 != val2;
break;
//----------
case AND:
check_num_nodes(node, 2, "cannot perform logical operators on more than two expressions.");
return and_value(val1, val2);
// return val1 && val2;
break;
//----------
case OR:
check_num_nodes(node, 2, "cannot perform logical operators on more than two expressions.");
return or_value(val1, val2);
// return val1 || val2;
break;
//----------
case NOT:
check_num_nodes(node, 1, "cannot negate more than one expressions.");
return not_value(val1);
// return !val1;
break;
//----------
case INPUT: // We're only going to support reading in doubles
scanf("%lf", &temp);
tempDecs.push_back(temp);
return make_double(tempDecs);
break;
//----------
case IDENTIFIER:
var = find_variable(env, node->id);
if (var == nullptr) {
std::cerr << "Error: Symbol " << node->id << " not found." << std::endl;
return 0;
}
return get_value(var);
break;
//----------
case VALUE:
return new Value(*node->value);
break;
//----------
default:
fprintf(stderr,"Error, %d not a valid expression type.\n", node->type);
return 0;
}
}
void eval_statement(Node* node, Environment* env) {
/* base case */
if(!node) {
fprintf(stderr, "Error: No tree structure to evaluate\n");
return;
}
Value* tempVal;
std::vector<long> tempLong;
switch(node->type) {
case ASSIGN:
check_num_nodes(node, 2, "cannot make an assignment without an identifier and a value.");
add_variable(env,
new Variable(node->children[0]->id,
eval_expression(node->children[1], env)));
break;
//------------
case IF:
if (node->num_children != 2 && node->num_children != 3) {
fprintf(stderr, "Error: The format of an if-statement is if expression statement with an optional else.\n");
}
tempVal = eval_expression(node->children[0], env);
if (tempVal->type == BOOLEAN) {
tempLong = get_long(tempVal);
delete tempVal;
if (tempLong.size() > 1) { std::cerr << "Cannot have a vector of booleans for your if expression" << std::endl; break;}
if (tempLong[0]) {
eval_statement(node->children[1], env);
} else if (node->num_children == 3) {
eval_statement(node->children[2], env);
}
} else {
fprintf(stderr, "Error, a non-boolean was in the condition of an if statement.\n");
}
break;
//------------
case WHILE:
check_num_nodes(node, 2, "the format of a while statement is: while expression statement(s)");
tempVal = eval_expression(node->children[0], env);
if (tempVal->type == BOOLEAN) {
tempLong = get_long(tempVal);
delete tempVal;
if (tempLong.size() > 1) { std::cerr << "Cannot have a vector of booleans for your while expression" << std::endl; break;}
while (tempLong[0]) {
eval_statement(node->children[1], env);
tempVal = eval_expression(node->children[0], env);
tempLong = get_long(tempVal);
delete tempVal;
if (tempLong.size() > 1) { std::cerr << "Cannot have a vector of booleans for your while expression" << std::endl; break;}
}
} else {
fprintf(stderr, "Error, a non-boolean was in the condition of the while loop.\n");
}
break;
//------------
case PRINT:
check_num_nodes(node, 1, "can only print out one expression at a time.");
tempVal = eval_expression(node->children[0], env);
std::cout << tempVal << std::endl;
delete tempVal;
break;
//------------
case STATEMENT: // Can have a maximum of two children statement nodes
if (node->num_children > 0) {
eval_statement(node->children[0], env);
}
if (node->num_children > 1) {
eval_statement(node->children[1], env);
}
break;
//------------
default:
printf("Error, %d not a valid statement type.\n", node->type);
}
return;
}
<file_sep>#include <iostream>
#include <string>
#include "variable.hpp"
Value* get_value(const Variable* var) {
if (!var) { std::cerr << "Error: Invalid Variable" << std::endl; return 0; }
return new Value(*var->value);
}<file_sep>#ifndef OPERATORS_H
#define OPERATORS_H
#include "../variables/value.hpp"
Value* add(Value* x, Value* y);
Value* subtract(Value* x, Value* y);
Value* division(Value* x, Value* y);
Value* multiplication(Value* x, Value* y);
Value* less(Value* x, Value* y);
Value* greater(Value* x, Value* y);
Value* less_equal(Value* x, Value* y);
Value* greater_equal(Value* x, Value* y);
Value* equals(Value* x, Value* y);
Value* not_equals(Value* x, Value* y);
Value* and_value(Value* x, Value* y);
Value* or_value(Value* x, Value* y);
Value* not_value(Value* x);
#endif
<file_sep>#ifndef CONSTANTS_H
#define CONSTANTS_H
#define STATEMENT 200
#define CALLFUNC 201
#define VECTOR 202
class Node;
// Share the line number between files
extern int linenum;
extern Node* result;
#endif<file_sep>#include <iostream>
#include <algorithm>
#include "operators.hpp"
#include "../variables/value.hpp"
// NOTE: Value* x is what is going to be returned for operations, so that we avoid the need for allocating more memory
Value* add(Value* x,Value* y) {
if (!x || !y) { std::cerr << "Error, uninitialized values being used in add." << std::endl; }
if (x->type == BOOLEAN || y->type == BOOLEAN) { std::cerr << "Error, cannot add a boolean." << std::endl; }
if ((x->type == STRING || y->type == STRING) && (x->type != STRING || y->type != STRING)) {
std::cerr << "Error, cannot add a string with another data type." << std::endl;
}
std::vector<long> longResult;
std::vector<double> doubleResult;
// Destruct all four cases
if (x->type == LONG && y->type == LONG) {
std::vector<long> x_long = get_long(x);
std::vector<long> y_long = get_long(y);
longResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_long.begin(),
std::back_inserter(longResult), std::plus<long>());
set_long(x, longResult);
} else if (x->type == LONG && y->type == DOUBLE) {
std::vector<long> x_long = get_long(x);
std::vector<double> y_double = get_double(y);
doubleResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_double.begin(),
std::back_inserter(doubleResult), std::plus<>());
set_double(x, doubleResult);
} else if (x->type == DOUBLE && y->type == LONG) {
std::vector<double> x_double = get_double(x);
std::vector<long> y_long = get_long(y);
doubleResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_long.begin(),
std::back_inserter(doubleResult), std::plus<>());
set_double(x, doubleResult);
} else if (x->type == STRING && y->type == STRING) {
set_string(x, get_string(x) + get_string(y));
} else { // Both are DOUBLE
std::vector<double> x_double = get_double(x);
std::vector<double> y_double = get_double(y);
doubleResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_double.begin(),
std::back_inserter(doubleResult), std::plus<double>());
set_double(x, doubleResult);
}
delete y;
return x;
}
Value* subtract(Value* x, Value* y) {
if (!x || !y) { std::cerr << "Error, uninitialized values being used in subtract." << std::endl; }
if (x->type == BOOLEAN || y->type == BOOLEAN) { std::cerr << "Error, cannot subtract a boolean." << std::endl; }
if (x->type == STRING || y->type == STRING) { std::cerr << "Error, cannot subtract a string." << std::endl; }
std::vector<long> longResult;
std::vector<double> doubleResult;
// Destruct all four cases
if (x->type == LONG && y->type == LONG) {
std::vector<long> x_long = get_long(x);
std::vector<long> y_long = get_long(y);
longResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_long.begin(),
std::back_inserter(longResult), std::minus<long>());
set_long(x, longResult);
} else if (x->type == LONG && y->type == DOUBLE) {
std::vector<long> x_long = get_long(x);
std::vector<double> y_double = get_double(y);
doubleResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_double.begin(),
std::back_inserter(doubleResult), std::minus<>());
set_double(x, doubleResult);
} else if (x->type == DOUBLE && y->type == LONG) {
std::vector<double> x_double = get_double(x);
std::vector<long> y_long = get_long(y);
doubleResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_long.begin(),
std::back_inserter(doubleResult), std::minus<>());
set_double(x, doubleResult);
} else { // Both are DOUBLE
std::vector<double> x_double = get_double(x);
std::vector<double> y_double = get_double(y);
doubleResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_double.begin(),
std::back_inserter(doubleResult), std::minus<double>());
set_double(x, doubleResult);
}
delete y;
return x;
}
Value* division(Value* x, Value* y) {
if (!x || !y) { std::cerr << "Error, uninitialized values being used in divide." << std::endl; }
if (x->type == BOOLEAN || y->type == BOOLEAN) { std::cerr << "Error, cannot divide a boolean." << std::endl; }
if (x->type == STRING || y->type == STRING) { std::cerr << "Error, cannot division a string." << std::endl; }
std::vector<long> longResult;
std::vector<double> doubleResult;
// Destruct all four cases
if (x->type == LONG && y->type == LONG) {
std::vector<long> x_long = get_long(x);
std::vector<long> y_long = get_long(y);
longResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_long.begin(),
std::back_inserter(longResult), std::divides<long>());
set_long(x, longResult);
} else if (x->type == LONG && y->type == DOUBLE) {
std::vector<long> x_long = get_long(x);
std::vector<double> y_double = get_double(y);
doubleResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_double.begin(),
std::back_inserter(doubleResult), std::divides<>());
set_double(x, doubleResult);
} else if (x->type == DOUBLE && y->type == LONG) {
std::vector<double> x_double = get_double(x);
std::vector<long> y_long = get_long(y);
doubleResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_long.begin(),
std::back_inserter(doubleResult), std::divides<>());
set_double(x, doubleResult);
} else { // Both are DOUBLE
std::vector<double> x_double = get_double(x);
std::vector<double> y_double = get_double(y);
doubleResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_double.begin(),
std::back_inserter(doubleResult), std::divides<double>());
set_double(x, doubleResult);
}
delete y;
return x;
}
Value* multiplication(Value* x, Value* y) {
if (!x || !y) { std::cerr << "Error, uninitialized values being used in multiply." << std::endl; }
if (x->type == BOOLEAN || y->type == BOOLEAN) { std::cerr << "Error, cannot multiply a boolean." << std::endl; }
if (x->type == STRING || y->type == STRING) { std::cerr << "Error, cannot multiply a string." << std::endl; }
std::vector<long> longResult;
std::vector<double> doubleResult;
// Destruct all four cases
if (x->type == LONG && y->type == LONG) {
std::vector<long> x_long = get_long(x);
std::vector<long> y_long = get_long(y);
longResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_long.begin(),
std::back_inserter(longResult), std::multiplies<long>());
set_long(x, longResult);
} else if (x->type == LONG && y->type == DOUBLE) {
std::vector<long> x_long = get_long(x);
std::vector<double> y_double = get_double(y);
doubleResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_double.begin(),
std::back_inserter(doubleResult), std::multiplies<>());
set_double(x, doubleResult);
} else if (x->type == DOUBLE && y->type == LONG) {
std::vector<double> x_double = get_double(x);
std::vector<long> y_long = get_long(y);
doubleResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_long.begin(),
std::back_inserter(doubleResult), std::multiplies<>());
set_double(x, doubleResult);
} else { // Both are DOUBLE
std::vector<double> x_double = get_double(x);
std::vector<double> y_double = get_double(y);
doubleResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_double.begin(),
std::back_inserter(doubleResult), std::multiplies<double>());
set_double(x, doubleResult);
}
delete y;
return x;
}
Value* less(Value* x, Value* y) {
if (!x || !y) { std::cerr << "Error, uninitialized values being used in <." << std::endl; }
if (x->type == BOOLEAN || y->type == BOOLEAN) { std::cerr << "Error, cannot numerically compare a boolean." << std::endl; }
if ((x->type == STRING || y->type == STRING) && (x->type != STRING || y->type != STRING)) {
std::cerr << "Error, cannot compare a string with another data type." << std::endl;
}
std::vector<long> longResult;
// Destruct all four cases
if (x->type == LONG && y->type == LONG) {
std::vector<long> x_long = get_long(x);
std::vector<long> y_long = get_long(y);
longResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_long.begin(),
std::back_inserter(longResult), std::less<long>());
set_booleans(x, longResult);
} else if (x->type == LONG && y->type == DOUBLE) {
std::vector<long> x_long = get_long(x);
std::vector<double> y_double = get_double(y);
longResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_double.begin(),
std::back_inserter(longResult), std::less<>());
set_booleans(x, longResult);
} else if (x->type == DOUBLE && y->type == LONG) {
std::vector<double> x_double = get_double(x);
std::vector<long> y_long = get_long(y);
longResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_long.begin(),
std::back_inserter(longResult), std::less<>());
set_booleans(x, longResult);
} else if (x->type == STRING && y->type == STRING) {
set_boolean(x, get_string(x).compare(get_string(y)) < 0);
} else { // Both are DOUBLE
std::vector<double> x_double = get_double(x);
std::vector<double> y_double = get_double(y);
longResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_double.begin(),
std::back_inserter(longResult), std::less<double>());
set_booleans(x, longResult);
}
delete y;
return x;
}
// CONTINUE REPLACING FROM HERE
Value* greater(Value* x, Value* y) {
if (!x || !y) { std::cerr << "Error, uninitialized values being used in greater." << std::endl; }
if (x->type == BOOLEAN || y->type == BOOLEAN) { std::cerr << "Error, cannot numerically compare a boolean." << std::endl; }
if ((x->type == STRING || y->type == STRING) && (x->type != STRING || y->type != STRING)) {
std::cerr << "Error, cannot compare a string with another data type." << std::endl;
}
std::vector<long> longResult;
// Destruct all four cases
if (x->type == LONG && y->type == LONG) {
std::vector<long> x_long = get_long(x);
std::vector<long> y_long = get_long(y);
longResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_long.begin(),
std::back_inserter(longResult), std::greater<long>());
set_booleans(x, longResult);
} else if (x->type == LONG && y->type == DOUBLE) {
std::vector<long> x_long = get_long(x);
std::vector<double> y_double = get_double(y);
longResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_double.begin(),
std::back_inserter(longResult), std::greater<>());
set_booleans(x, longResult);
} else if (x->type == DOUBLE && y->type == LONG) {
std::vector<double> x_double = get_double(x);
std::vector<long> y_long = get_long(y);
longResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_long.begin(),
std::back_inserter(longResult), std::greater<>());
set_booleans(x, longResult);
} else if (x->type == STRING && y->type == STRING) {
set_boolean(x, get_string(x).compare(get_string(y)) > 0);
} else { // Both are DOUBLE
std::vector<double> x_double = get_double(x);
std::vector<double> y_double = get_double(y);
longResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_double.begin(),
std::back_inserter(longResult), std::greater<double>());
set_booleans(x, longResult);
}
delete y;
return x;
}
Value* less_equal(Value* x, Value* y) {
if (!x || !y) { std::cerr << "Error, uninitialized values being used in <=." << std::endl; }
if (x->type == BOOLEAN || y->type == BOOLEAN) { std::cerr << "Error, cannot numerically compare a boolean." << std::endl; }
if ((x->type == STRING || y->type == STRING) && (x->type != STRING || y->type != STRING)) {
std::cerr << "Error, cannot compare a string with another data type." << std::endl;
}
std::vector<long> longResult;
// Destruct all four cases
if (x->type == LONG && y->type == LONG) {
std::vector<long> x_long = get_long(x);
std::vector<long> y_long = get_long(y);
longResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_long.begin(),
std::back_inserter(longResult), std::less_equal<long>());
set_long(x, longResult);
} else if (x->type == LONG && y->type == DOUBLE) {
std::vector<long> x_long = get_long(x);
std::vector<double> y_double = get_double(y);
longResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_double.begin(),
std::back_inserter(longResult), std::less_equal<>());
set_booleans(x, longResult);
} else if (x->type == DOUBLE && y->type == LONG) {
std::vector<double> x_double = get_double(x);
std::vector<long> y_long = get_long(y);
longResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_long.begin(),
std::back_inserter(longResult), std::less_equal<>());
set_booleans(x, longResult);
} else if (x->type == STRING && y->type == STRING) {
set_boolean(x, get_string(x).compare(get_string(y)) <= 0);
} else { // Both are DOUBLE
std::vector<double> x_double = get_double(x);
std::vector<double> y_double = get_double(y);
longResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_double.begin(),
std::back_inserter(longResult), std::less_equal<double>());
set_booleans(x, longResult);
}
delete y;
return x;
}
Value* greater_equal(Value* x, Value* y) {
if (!x || !y) { std::cerr << "Error, uninitialized values being used in >=." << std::endl; }
if (x->type == BOOLEAN || y->type == BOOLEAN) { std::cerr << "Error, cannot numerically compare a boolean." << std::endl; }
if ((x->type == STRING || y->type == STRING) && (x->type != STRING || y->type != STRING)) {
std::cerr << "Error, cannot compare a string with another data type." << std::endl;
}
std::vector<long> longResult;
// Destruct all four cases
if (x->type == LONG && y->type == LONG) {
std::vector<long> x_long = get_long(x);
std::vector<long> y_long = get_long(y);
longResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_long.begin(),
std::back_inserter(longResult), std::greater_equal<long>());
set_booleans(x, longResult);
} else if (x->type == LONG && y->type == DOUBLE) {
std::vector<long> x_long = get_long(x);
std::vector<double> y_double = get_double(y);
longResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_double.begin(),
std::back_inserter(longResult), std::greater_equal<>());
set_booleans(x, longResult);
} else if (x->type == DOUBLE && y->type == LONG) {
std::vector<double> x_double = get_double(x);
std::vector<long> y_long = get_long(y);
longResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_long.begin(),
std::back_inserter(longResult), std::greater_equal<>());
set_booleans(x, longResult);
} else if (x->type == STRING && y->type == STRING) {
set_boolean(x, get_string(x).compare(get_string(y)) >= 0);
} else { // Both are DOUBLE
std::vector<double> x_double = get_double(x);
std::vector<double> y_double = get_double(y);
longResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_double.begin(),
std::back_inserter(longResult), std::greater_equal<double>());
set_booleans(x, longResult);
}
delete y;
return x;
}
Value* equals(Value* x, Value* y) {
if (!x || !y) { std::cerr << "Error, uninitialized values being used in ==." << std::endl; }
if ((x->type == STRING || y->type == STRING) && (x->type != STRING || y->type != STRING)) {
std::cerr << "Error, cannot compare a string with another data type." << std::endl;
}
std::vector<long> longResult;
// Destruct all four cases
if (x->type == LONG && y->type == LONG) {
std::vector<long> x_long = get_long(x);
std::vector<long> y_long = get_long(y);
longResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_long.begin(),
std::back_inserter(longResult), std::equal_to<long>());
set_booleans(x, longResult);
} else if (x->type == LONG && y->type == DOUBLE) {
std::vector<long> x_long = get_long(x);
std::vector<double> y_double = get_double(y);
longResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_double.begin(),
std::back_inserter(longResult), std::equal_to<>());
set_booleans(x, longResult);
} else if (x->type == DOUBLE && y->type == LONG) {
std::vector<double> x_double = get_double(x);
std::vector<long> y_long = get_long(y);
longResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_long.begin(),
std::back_inserter(longResult), std::equal_to<>());
set_booleans(x, longResult);
} else if (x->type == STRING && y->type == STRING) {
set_boolean(x, get_string(x).compare(get_string(y)) == 0);
} else { // Both are DOUBLE
std::vector<double> x_double = get_double(x);
std::vector<double> y_double = get_double(y);
longResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_double.begin(),
std::back_inserter(longResult), std::equal_to<double>());
set_booleans(x, longResult);
}
delete y;
return x;
}
Value* not_equals(Value* x, Value* y) {
if (!x || !y) { std::cerr << "Error, uninitialized values being used in !=." << std::endl; }
if ((x->type == STRING || y->type == STRING) && (x->type != STRING || y->type != STRING)) {
std::cerr << "Error, cannot compare a string with another data type." << std::endl;
}
std::vector<long> longResult;
// Destruct all four cases
if (x->type == LONG && y->type == LONG) {
std::vector<long> x_long = get_long(x);
std::vector<long> y_long = get_long(y);
longResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_long.begin(),
std::back_inserter(longResult), std::not_equal_to<long>());
set_booleans(x, longResult);
} else if (x->type == LONG && y->type == DOUBLE) {
std::vector<long> x_long = get_long(x);
std::vector<double> y_double = get_double(y);
longResult.reserve(x_long.size());
std::transform(x_long.begin(), x_long.end(), y_double.begin(),
std::back_inserter(longResult), std::not_equal_to<>());
set_booleans(x, longResult);
} else if (x->type == DOUBLE && y->type == LONG) {
std::vector<double> x_double = get_double(x);
std::vector<long> y_long = get_long(y);
longResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_long.begin(),
std::back_inserter(longResult), std::not_equal_to<>());
set_booleans(x, longResult);
} else if (x->type == STRING && y->type == STRING) {
set_boolean(x, get_string(x).compare(get_string(y)) != 0);
} else { // Both are DOUBLE
std::vector<double> x_double = get_double(x);
std::vector<double> y_double = get_double(y);
longResult.reserve(x_double.size());
std::transform(x_double.begin(), x_double.end(), y_double.begin(),
std::back_inserter(longResult), std::not_equal_to<double>());
set_booleans(x, longResult);
}
delete y;
return x;
}
Value* and_value(Value* x, Value* y) {
if (!x || !y) { std::cerr << "Error, uninitialized values being used in &&." << std::endl; }
if (x->type != BOOLEAN || y->type != BOOLEAN) { std::cerr << "Error, cannot use and AND operation with a non-boolean." << std::endl; }
std::vector<long> longResult;
std::vector<long> x_long = get_long(x);
std::vector<long> y_long = get_long(y);
std::transform(x_long.begin(), x_long.end(), y_long.begin(),
std::back_inserter(longResult), std::logical_and<>());
set_booleans(x, longResult);
delete y;
return x;
}
Value* or_value(Value* x, Value* y) {
if (!x || !y) { std::cerr << "Error, uninitialized values being used in ||." << std::endl; }
if (x->type != BOOLEAN || y->type != BOOLEAN) { std::cerr << "Error, cannot use and OR operation with a non-boolean." << std::endl; }
std::vector<long> longResult;
std::vector<long> x_long = get_long(x);
std::vector<long> y_long = get_long(y);
std::transform(x_long.begin(), x_long.end(), y_long.begin(),
std::back_inserter(longResult), std::logical_or<>());
set_booleans(x, longResult);
delete y;
return x;
}
Value* not_value(Value* x) {
if (!x) { std::cerr << "Error, uninitialized values being used in !." << std::endl; }
if (x->type != BOOLEAN) { std::cerr << "Error, cannot NOT a non-boolean." << std::endl; }
std::vector<long> longResult;
std::vector<long> x_long = get_long(x);
std::transform(x_long.begin(), x_long.end(), std::back_inserter(longResult), std::logical_not<>());
set_booleans(x, longResult);
return x;
}
<file_sep>#include <stdlib.h>
#include "value.hpp"
#include <string>
#include <iostream>
#include <variant>
#include <algorithm>
#include "../parser/parser.tab.h"
Value* make_long(std::vector<long> nums) {
std::vector<double> decs;
return new Value(LONG, nums, decs, nullptr, "");
}
Value* make_double(std::vector<double> decs) {
std::vector<long> nums;
return new Value(DOUBLE, nums, decs, nullptr, "");
}
Value* make_true() {
std::vector<long> nums = {1};
std::vector<double> decs;
return new Value(BOOLEAN, nums, decs, nullptr, "");
}
Value* make_false() {
std::vector<long> nums = {0};
std::vector<double> decs;
return new Value(BOOLEAN, nums, decs, nullptr, "");
}
Value* make_boolean(int x) {
return (x) ? make_true() : make_false();
}
Value* make_booleans(std::vector<long> x) {
std::vector<double> decs;
std::vector<long> result;
std::transform(x.begin(), x.end(), std::back_inserter(result),
[] (long n) {
return n != 0;
});
return new Value(BOOLEAN, result, decs, nullptr, "");
}
Value* make_expression(Node* expr) {
std::vector<long> nums;
std::vector<double> decs;
return new Value(LAMBDA, nums, decs, expr, "");
}
Value* make_string(std::string str) {
std::vector<long> nums;
std::vector<double> decs;
return new Value(STRING, nums, decs, nullptr, str);
}
void delete_value(Value* val) {
free(val);
}
std::vector<long> get_long(const Value* val) {
return std::get<std::vector<long>>(val->val);
}
std::vector<double> get_double(const Value* val) {
return std::get<std::vector<double>>(val->val);
}
Node* get_expression(const Value* val) {
return std::get<Node*>(val->val);
}
std::string get_string(const Value* val) {
return std::get<std::string>(val->val);
}
void set_long(Value* val, std::vector<long> num) {
val->type = LONG;
val->val = num;
}
void set_boolean(Value* val, int x) {
val->type = BOOLEAN;
std::vector<long> result;
if (x) { result.push_back(1); } else { result.push_back(0); }
val->val = result;
}
void set_booleans(Value* val, std::vector<long> num) {
val->type = BOOLEAN;
val->val = num;
}
void set_double(Value* val, std::vector<double> dec) {
val->type = DOUBLE;
// val->value.dec = dec;
val->val = dec;
}
void set_expression(Value* val, Node* expr) {
val->type = LAMBDA;
val->val = expr;
}
void set_string(Value* val, std::string str) {
val->type = STRING;
val->val = str;
}
std::string Value::toString() const {
std::string result = "";
if (this->type == BOOLEAN) {
std::vector<long> longVec = get_long(this);
if (longVec.size() == 1) {
result += (longVec[0]) ? "true" : "false";
} else {
result += "[";
for (uint i = 0; i < longVec.size() - 1; i++) {
result += (longVec[i]) ? "true" : "false";
result += ", ";
}
result += (longVec[longVec.size() - 1]) ? "true" : "false";
result += "]";
}
} else if (this->type == LONG) {
std::vector<long> longVec = get_long(this);
if (longVec.size() == 1) {
result += std::to_string(longVec[0]);
} else {
result += "[";
for (uint i = 0; i < longVec.size() - 1; i++) {
result += std::to_string(longVec[i]);
result += ", ";
}
result += std::to_string(longVec[longVec.size() - 1]);
result += "]";
}
} else if (this->type == STRING) {
result += get_string(this);
} else if (this->type == DOUBLE) {
std::vector<double> longVec = get_double(this);
if (longVec.size() == 1) {
result += std::to_string(longVec[0]);
} else {
result += "[";
for (uint i = 0; i < longVec.size() - 1; i++) {
result += std::to_string(longVec[i]);
result += ", ";
}
result += std::to_string(longVec[longVec.size() - 1]);
result += "]";
}
} else { // Assume lambda expression
result += "<LambdaExpression>";
}
return result;
}
std::ostream & operator << (std::ostream &out, const Value* val) {
out << val->toString();
return out;
}
<file_sep>#ifndef NODE_H
#define NODE_H
#include <string>
#include <array>
#include <iostream>
#include <memory>
#include "../variables/value.hpp"
#include "../variables/environment.hpp"
#define MAX_CHILDREN 3
class Value;
class Environment;
/* a tree node definition */
class Node {
public:
int type;
std::unique_ptr<Value> value;
/* the id of the node (used for identifiers only) */
std::string id;
/* at most three children nodes */
uint num_children;
std::array<Node*, MAX_CHILDREN> children;
friend std::ostream & operator << (std::ostream &out, const Node* n);
std::string toString(void) const;
Node(int t, std::unique_ptr<Value> v, std::string s) : value(std::move(v)) {
type = t;
id = s;
num_children = 0;
std::fill(children.begin(), children.end(), nullptr);
}
~Node() {
for (uint i = 0; i < num_children; i++) {
delete children[i];
}
}
};
// Abstract Syntax Tree Functions
void attach_node(Node* parent, Node* child);
std::string tree_string(const Node* node, uint tabs);
// Interpreting AST
void eval_statement(Node* node, Environment* env);
Value* eval_expression(Node* node, Environment* env);
Value* parse_vector(Node* node);
#endif
<file_sep>#ifndef ENVIRONMENT_H
#define ENVIRONMENT_H
#include "variable.hpp"
#include <string>
#include <vector>
class Variable;
class Environment {
public:
std::vector<Variable*> vars;
Environment() { }
~Environment();
};
// Variable Lookup Functions
Variable* find_variable(Environment* env, std::string id);
void add_variable(Environment* env, Variable* var);
#endif
|
7c4e3601799659dbbff8c6995c42b2d8a37bfa57
|
[
"C",
"Makefile",
"C++"
] | 19
|
C++
|
Brandon-Rozek/SLOTH
|
6c8d3ef1c45404c8be8d670ddb06632c0c7a9fe7
|
7ded54cf13b6c6bb797875e2abe322dfa56dc6ac
|
refs/heads/master
|
<file_sep>require "rspec"
require "tdd"
describe "first tdd project" do
describe "#my_uniq" do
it "returns only unique values of an array" do
expect(my_uniq([1, 2, 1, 3, 3])).to eq([1, 2, 3])
end
end
describe "#two_sum" do
it "returns pairs of indices whose element's pairs sum zero" do
expect([-1, 0, 2, -2, 1].two_sum).to eq([[0, 4], [2, 3]])
end
end
describe "#my_transpose" do
it "coverts between row oriented and column oriented representations" do
expect(my_transpose([
[0, 1, 2],
[3, 4, 5],
[6, 7, 8]
])).to eq([[0, 3, 6],
[1, 4, 7],
[2, 5, 8]])
end
end
describe "#stock_picker" do
# it "takes in an array" do
# subject(:stock_picker) {stock_picker(arg)}
# expect(arg).to be_a(Array)
# end
stocks = stock_picker([200,300,400,300,400])
it "returns pair of indices" do
expect(stocks.length).to be(2)
end
it "the first value being less than the second value" do
expect(stocks.first).to be < stocks.last
end
it "returns indices of elements that have the greatest difference" do
expect(stocks).to eq([0,2])
end
end
end<file_sep>
def my_uniq(array)
arr = []
array.each do |ele|
arr << ele unless arr.include?(ele)
end
arr
end
class Array
def two_sum
pairs = []
(0...self.length).each do |i1|
(i1+1...self.length).each do |i2|
pairs << [i1, i2] if self[i1] + self[i2] == 0
end
end
pairs
end
end
def my_transpose(matrix)
transposed = Array.new(matrix.length) {Array.new()}
(0...matrix.length).each do |i|
matrix.each do |sub_arr|
transposed[i] << sub_arr[i]
end
end
transposed
end
def stock_picker(prices)
pairs = []
highest_profit = 0
(0...prices.length).each do |i1|
(i1 +1...prices.length).each do |i2|
profit = prices[i2] - prices[i1]
if profit > highest_profit
pairs = [i1, i2]
highest_profit = profit
end
end
end
pairs
end
<file_sep>class TowersOfHanoi
attr_reader :piles
def initialize
@piles = [[1, 2, 3, 4],[],[]]
end
def move(arr)
selection1 = arr.first.to_i - 1
piece = piles[selection1].shift
selection2 = arr.last.to_i - 1
piles[selection2].unshift(piece)
end
def play
end
def get_player_move
move = []
puts "enter the pile you want to take a piece from"
player_input = gets.chomp
move << player_input
puts "enter the pile you want to add your piece to"
player_input = gets.chomp
move << player_input
move
end
def won?
return true if piles[1] == [1,2,3,4] || piles[2] == [1,2,3,4]
false
end
end<file_sep># three piles (arrays)
# move method, that moves the last element.arrays
# last in, first out. (stack)
# won? method: all the numbers are in one pile and they are in order
# order is [1, 2, 3, 4]
# need to look at each pile to determine if won? == true
require "rspec"
require "towers_of_hanoi"
describe TowersOfHanoi do
subject(:game) {TowersOfHanoi.new}
describe "#initialize" do
it "creates three arrays" do
expect(game.piles).to be_a(Array)
end
it "pile1 should be stacked in ascending order" do
expect(game.piles[0]).to eq([1, 2, 3, 4])
end
it "pile2 and pile3 should start empty" do
expect(game.piles[1]).to be_empty
expect(game.piles[2]).to be_empty
end
end
describe "#move" do
# context "makes the proper move" do
# a = game.piles
# game.move([1,2])
# b = game.piles
# it "piles array is mutated" do
# a = game.piles
# game.move([1,2])
# b = game.piles
# # expect(a).to_not eq(b)
# end
it "removes the first element from the first array within piles" do
game.move([1, 2])
expect(game.piles[0]).to eq([2,3,4])
end
it "adds the element to the correct pile" do
game.move([1, 2])
expect(game.piles[1]).to eq([1])
end
# end
end
describe "#won?" do
it "returns false if the game is not won" do
expect(game.won?).to be false
end
it "returns true if second pile has all the pieces in ascending order" do
game.piles[1] = [1,2,3,4]
expect(game.won?). to be true
end
it "returns true if third pile has all the pieces in ascending order" do
game.piles[2] = [1,2,3,4]
expect(game.won?). to be true
end
end
describe "play" do
it "should repeat game until game.won? equals true" do
end
end
# describe "get_player_move" do
# it "returns an array of two numbers" do
# expect(game.get_player_move.length).to be(2)
# end
# it "the two numbers cannot be the same" do
# expect(game.get_player_move.first).not_to eq(game.get_player_move.last)
# end
# end
# describe "#valid_moves?" do
# it "should return true if it is valid" do
# expect(valid_moves([1, 2])).to
# end
# it "should return false if it is not valid" do
# end
# end
end
# allow(get_player_move).to receive(:gets).and_return(1)
# name = get_player_move.gets
|
b070411434f778b31b9291d76d7034e4c4e08eca
|
[
"Ruby"
] | 4
|
Ruby
|
lifesscholar/W4D4-Classwork
|
b2cff5b9f97666ed0869ed2db9f2eb7bac31b38f
|
0e0be691141d21c84a8e6547c2f7184063868a80
|
refs/heads/main
|
<repo_name>voxifx/voxinewreg<file_sep>/komutlar/isim.js
module.exports = {
kod: "isim",
async run (client, message, args) {
const Discord = require('discord.js')
const voxi = require('../_BOOT/config.json')
if (!message.member.roles.cache.has(voxi.kayıtsrm) && !message.member.hasPermission("ADMINISTRATOR")) return message.channel.send("Hata: `Bu komutunu kullanabilmek için herhangi bir yetkiye sahip değilsin.`").then(x => x.delete({timeout: 10000}));
let voximember = message.mentions.members.first() || message.guild.members.cache.get(args[0]) || message.member;
if(!voximember) return message.channel.send("Lütfen birini etiketleyiniz.")
let nick = args[1];
let age = args[2];
if(!nick) return message.channel.send("Lütfen bir isim belirleyiniz.")
if(!age) return message.channel.send("Lütfen bir yaş belirleyiniz.")
await voximember.setNickname(`${voxi.tag} ${nick.charAt(0).toUpperCase() + nick.slice(1).toLowerCase()}${age ? ` | ${age}` : ``}`).catch();
const embed = new Discord.MessageEmbed()
.setFooter(`${message.author.tag} Tarafından Kullanıldı! | Developed By Voxi ^^`)
.setColor("RANDOM")
.setDescription(`${voximember} adlı kullanıcının ismi ${voxi.tag} ${nick.charAt(0).toUpperCase() + nick.slice(1).toLowerCase()}${age ? ` | ${age}` : ``} olarak değiştirildi.`)
message.channel.send(embed)
}
}<file_sep>/komutlar/neredebumal.js
module.exports = {
kod: "nerede",
async run (client, message, args) {
const Discord = require('discord.js')
const voxi = require('../_BOOT/config.json')
const mongoose = require('mongoose')
const Register = require("../Semalar/Register.js");
let voxiEmb = new Discord.MessageEmbed().setAuthor(message.author.tag,message.author.displayAvatarURL({ dynamic: true }))
if(!message.member.hasPermission('ADMINISTRATOR')) return message.channel.send(voxiEmb.setDescription(`**Bu komutu kullanmaya yetkin yetmiyor.**`)).then(x => x.delete({ timeout: 5000 }));
let member = message.mentions.members.first() || message.guild.members.cache.get(args[0])
if(!member) return message.channel.send(voxiEmb.setDescription(`Bir kullanıcı belirtmelisin.`)).then(x => x.delete({ timeout: 5000 }));
let voiceChannel = member.voice.channel
if(!voiceChannel) return message.channel.send(voxiEmb.setDescription(`Belirttiğin kişi ses kanalında bulunmuyor.`)).then(x => x.delete({ timeout: 5000 }));
let microphone = member.voice.selfMute ? "Kapalı" : "Açık";
let headphones = member.voice.selfDeaf ? "Kapalı" : "Açık";
let sestekiler = message.guild.channels.cache.get(voiceChannel.id).members.map(x => `${x.user} - \`${x.user.id}\``).join("\n")
message.channel.send(voxiEmb.setDescription(`
${member} kişisi <#${voiceChannel.id}> kanalında. **Mikrofonu ${microphone}**, **Kulaklığı ${headphones}**
`).setColor("RANDOM"))
}
}<file_sep>/komutlar/asdasd.js
module.exports = {
kod: "qweqwek",
async run (client, message, args) {
var tags = args.slice(0).join(" ");
message.channel.send(`yasaklı tag ${tags}`)
}
}<file_sep>/komutlar/vip.js
module.exports = {
kod: "vip",
async run (client, message, args) {
const Discord = require('discord.js')
const voxi = require('../_BOOT/config.json')
const mongoose = require('mongoose')
if(!message.member.hasPermission('ADMINISTRATOR')) return message.reply("Hata: Bunu kullanabilmek için yetkin yok.")
let voximember = message.mentions.members.first() || message.guild.members.cache.get(args[0]) || message.member;
if(!voximember) return message.channel.send("Lütfen birini etiketleyiniz.")
message.channel.send(`${voximember.tag} adlı kişiye vip rolü verilmiştir.`)
await voximember.roles.add(voxi.viprol)
}
}<file_sep>/komutlar/erkek.js
module.exports = {
kod: "e",
async run (client, message, args) {
const Discord = require('discord.js')
const voxi = require('../_BOOT/config.json')
const mongoose = require('mongoose')
const Register = require("../Semalar/Register.js");
//<a:voxi_uk:860090952387657739>
let voximember = message.mentions.members.first() || message.guild.members.cache.get(args[0]) || message.member;
if (!message.member.roles.cache.has(voxi.kayıtsrm) && !message.member.hasPermission("ADMINISTRATOR")) return message.channel.send("Hata: `Bu komutunu kullanabilmek için herhangi bir yetkiye sahip değilsin.`").then(x => x.delete({timeout: 10000}));
let registerData = await Register.findOne({ guildId: message.guild.id, userId: voximember.id });
let staffData = await Register.findOne({ guildId: message.guild.id, userId: message.author.id });
if(!voximember) return message.channel.send("Hata: Lütfen bir kullanıcı belirleyiniz @voxi/ID")
let nick = args[1];
let age = args[2];
if(!nick) return message.channel.send("Hata: Lütfen bir isim belirleyiniz.")
if(!age) return message.channel.send("Hata: Lütfen bir yaş belirleyiniz.")
var erkekrol = voxi.erkek
var family = voxi.tagrol
var unregrol = voxi.unregister
await voximember.roles.add(erkekrol)
await voximember.roles.remove(unregrol)
await voximember.setNickname(`${voxi.tag} ${nick.charAt(0).toUpperCase() + nick.slice(1).toLowerCase()}${age ? ` | ${age}` : ``}`).catch();
var chat = voxi.chatId
client.channels.cache.get(chat).send(`${voximember} adlı kişi sunucumuza giriş yaptı hoşgeldiniz.`).then(x => x.delete({timeout: 10000}));
if(!staffData) {
let newStaffData = new Register({
_id: new mongoose.Types.ObjectId(),
guildId: message.guild.id,
userId: message.author.id,
totalRegister: 1,
womanRegister: 0,
manRegister: 1,
userNames: []
}).save();
} else {
staffData.totalRegister++
staffData.manRegister++
staffData.save();
}
if(!registerData) {
let newRegisterData = new Register({
_id: new mongoose.Types.ObjectId(),
guildId: message.guild.id,
userId: voximember.id,
totalRegister: 0,
womanRegister: 0,
manRegister: 0,
userNames: [{ nick: `${voxi.tag} ${nick.charAt(0).toUpperCase() + nick.slice(1).toLowerCase()} `, type: `İsim Değiştirme`}]
}).save();
} else {
registerData.userNames.push({ nick: `${voxi.tag} ${nick.charAt(0).toUpperCase() + nick.slice(1).toLowerCase()} `, type: `İsim Değiştirme`})
registerData.save();
}
message.react(voxi.tikemojiid)
}
}<file_sep>/index.js
const Discord = require('discord.js') // discord.js modülü tanımlıyoruz.
const client = new Discord.Client() // client tanımalamsı
const { readdirSync } = require('fs'); // tanımlamalar
const { join } = require('path'); // tanımlamalar
const voxi = require('./_BOOT/config.json')
const mongoose = require('mongoose')
const moment = require('moment');
const fs = require('fs');
require('moment-duration-format')
const data = require('quick.db')
client.commands= new Discord.Collection(); // komutları alıyoruz
const aliases = client.aliases = new Discord.Collection();
const prefix = voxi.prefix
const commandFiles = readdirSync(join(__dirname, "komutlar")).filter(file => file.endsWith(".js")); // Belli bir klasörden belli .js uzantılı dosyaları buluyor.
for (const file of commandFiles) {
const command = require(join(__dirname, "komutlar", `${file}`));
client.commands.set(command.kod, command); // Komutları Ayarlıyoruz.
}
client.on("error", console.error);
client.on('ready', () => {
client.user.setActivity(voxi.botdurum)
console.log('[VOXİ] Bot Aktif.')
});
Date.prototype.toTurkishFormatDate = function (format) {
let date = this,
day = date.getDate(),
weekDay = date.getDay(),
month = date.getMonth(),
year = date.getFullYear(),
hours = date.getHours(),
minutes = date.getMinutes(),
seconds = date.getSeconds();
let monthNames = new Array("Ocak", "Şubat", "Mart", "Nisan", "Mayıs", "Haziran", "Temmuz", "Ağustos", "Eylül", "Ekim", "Kasım", "Aralık");
let dayNames = new Array("Pazar", "Pazartesi", "Salı", "Çarşamba", "Perşembe", "Cuma", "Cumartesi");
if (!format) {
format = "dd MM yyyy | hh:ii:ss";
};
format = format.replace("mm", month.toString().padStart(2, "0"));
format = format.replace("MM", monthNames[month]);
//" <NAME> Beni Nur İçinde Daldır Beni Can Bedenden Ayrılırken İmanımla Gönder Beni " gece patlamamak için
// "Allahümme ente rabbi la ilahe illa ente aleyke tevekkeltü ve ente rabb'ül-arş'il-azim, maşallahü kane ve ma lem yeşe lem yekün ve la havle ve la kuvvete illa billah'il Aliyy'il Azim." <NAME>
//اللهم احرص على ألا تنفجر جزمة الثأر وأحذية الغار الأخرى يا إلهي هذه الجزمة هي الأفضل إن شاء الله آمين.
if (format.indexOf("yyyy") > -1) {
format = format.replace("yyyy", year.toString());
} else if (format.indexOf("yy") > -1) {
format = format.replace("yy", year.toString().substr(2, 2));
};
format = format.replace("dd", day.toString().padStart(2, "0"));
format = format.replace("DD", dayNames[weekDay]);
//" <NAME> Beni Nur İçinde Daldır Beni Can Bedenden Ayrılırken İmanımla Gönder Beni " gece patlamamak için
// "Allahümme ente rabbi la ilahe illa ente aleyke tevekkeltü ve ente rabb'ül-arş'il-azim, maşallahü kane ve ma lem yeşe lem yekün ve la havle ve la kuvvete illa billah'il Aliyy'il Azim." Tam Koruma
//اللهم احرص على ألا تنفجر جزمة الثأر وأحذية الغار الأخرى يا إلهي هذه الجزمة هي الأفضل إن شاء الله آمين.
if (format.indexOf("HH") > -1) format = format.replace("HH", hours.toString().replace(/^(\d)$/, '0$1'));
if (format.indexOf("hh") > -1) {
if (hours > 24) hours -= 24;
if (hours === 0) hours = 24;
format = format.replace("hh", hours.toString().replace(/^(\d)$/, '0$1'));
};
if (format.indexOf("ii") > -1) format = format.replace("ii", minutes.toString().replace(/^(\d)$/, '0$1'));
if (format.indexOf("ss") > -1) format = format.replace("ss", seconds.toString().replace(/^(\d)$/, '0$1'));
return format;
};
//" <NAME> Beni Nur İçinde Daldır Beni Can Bedenden Ayrılırken İmanımla Gönder Beni " gece patlamamak için
// "Allahümme ente rabbi la ilahe illa ente aleyke tevekkeltü ve ente rabb'ül-arş'il-azim, maşallahü kane ve ma lem yeşe lem yekün ve la havle ve la kuvvete illa billah'il Aliyy'il Azim." <NAME>oruma
//اللهم احرص على ألا تنفجر جزمة الثأر وأحذية الغار الأخرى يا إلهي هذه الجزمة هي الأفضل إن شاء الله آمين.
client.on("message", async message => {
if(message.author.bot) return;
if(message.content.startsWith(prefix)) {
const args = message.content.slice(prefix.length).trim().split(/ +/);
const command = args.shift().toLowerCase();
if(!client.commands.has(command)) return message.channel.send(`Komut dosyamda **${command}** adlı bir komut bulamadım.`);
try {
client.commands.get(command).run(client, message, args);
} catch (error){
console.error(error);
}
}
})
//" <NAME> Beni Nur İçinde Daldır Beni Can Bedenden Ayrılırken İmanımla Gönder Beni " gece patlamamak için
// "Allahümme ente rabbi la ilahe illa ente aleyke tevekkeltü ve ente rabb'ül-arş'il-azim, maşallahü kane ve ma lem yeşe lem yekün ve la havle ve la kuvvete illa billah'il Aliyy'il Azim." Tam Koruma
//اللهم احرص على ألا تنفجر جزمة الثأر وأحذية الغار الأخرى يا إلهي هذه الجزمة هي الأفضل إن شاء الله آمين.
client.on("guildMemberAdd", async (member) => {
await member.roles.add(voxi.unregister)
})
//bura hg mesja yeri cdeğiştir burayı mal kerem
//" <NAME> Beni Nur İçinde Daldır Beni Can Bedenden Ayrılırken İmanımla Gönder Beni " gece patlamamak için
// "Allahümme ente rabbi la ilahe illa ente aleyke tevekkeltü ve ente rabb'ül-arş'il-azim, maşallahü kane ve ma lem yeşe lem yekün ve la havle ve la kuvvete illa billah'il Aliyy'il Azim." <NAME>
//اللهم احرص على ألا تنفجر جزمة الثأر وأحذية الغار الأخرى يا إلهي هذه الجزمة هي الأفضل إن شاء الله آمين.
client.on("guildMemberAdd", async (member) => {
var ytags = data.fetch(`ytag_${member.guild.id}`)
var controltag = member.user.username.includes(ytags)
let guvenilirlik = Date.now()-member.user.createdTimestamp < 1000*60*60*24*7;
member.setNickname(`${voxi.tag} İsim | Yaş`)
let memberGün = moment(member.user.createdAt).format("DD");
let memberTarih = moment(member.user.createdAt).format("YYYY HH:mm:ss");
let memberAylar = moment(member.user.createdAt).format("MM").replace("01", "Ocak").replace("02", "Şubat").replace("03", "Mart").replace("04", "Nisan").replace("05", "Mayıs").replace("06", "Haziran").replace("07", "Temmuz").replace("08", "Ağustos").replace("09", "Eylül").replace("10", "Ekim").replace("11", "Kasım").replace("12", "Aralık");
let üyesayısı = member.guild.members.cache.size.toString().replace(/ /g, " ")
üyesayısı = üyesayısı.replace(/([a-zA-Z])/g, "bilinmiyor").toLowerCase()
if(!guvenilirlik) {
client.channels.cache.get(voxi.registerchat).send(`${voxi.tag} ${member} \`(${member.id})\` sunucumuza hoşgeldin! Seninle birlikte ${üyesayısı} kişi olduk!\n\n${voxi.tag} Hesabını açılış süresi ${memberGün} ${memberAylar} ${memberTarih}!\n\n${voxi.tag} yetkililerimiz seninle ilgilenecektir!\n\nTagımıza ulaşmak için herhangi bir kanala \`.tag\` yazman yeterli! \`(${voxi.tag} - ${voxi.etiketTag})\``)
} else {
member.roles.set([voxi.jailrol])
client.channels.cache.get(voxi.registerchat).send(`${member} \`(${member.id})\` kişisi sunucuya katıldı. Fakat hesabı 7 gün içerisinde açıldığı için jaile atıldı!`)
client.channels.cache.get(voxi.jailchat).send(`${member} \`(${member.id})\` sunucumuza hoşgeldin! Fakat hesabın 7 gün içerisinde açıldığı için jaile düştün. Yasaklı tagı sildikten sonra kayıt olmak için yetkililere ulaşabilirsin!`)
}
})
//" <NAME> <NAME> İçinde Daldır <NAME> Ayrılırken <NAME> " gece patlamamak için
// "Allahümme ente rabbi la ilahe illa ente aleyke tevekkeltü ve ente rabb'ül-arş'il-azim, maşallahü kane ve ma lem yeşe lem yekün ve la havle ve la kuvvete illa billah'il Aliyy'il Azim." Tam Koruma
//اللهم احرص على ألا تنفجر جزمة الثأر وأحذية الغار الأخرى يا إلهي هذه الجزمة هي الأفضل إن شاء الله آمين.
mongoose.connect(``).then(console.log("[Voxi] Mongo bağlandı"))
client.login(voxi.token)
// client.channels.cache.get(voxi.registerchat).send(`${voxi.tag} ${member} \`(${member.id})\` sunucumuza hoşgeldin! Seninle birlikte ${üyesayısı} kişi olduk!\n\n${voxi.tag} Hesabını açılış süresi ${memberGün} ${memberAylar} ${memberTarih}!\n\n${voxi.tag} yetkililerimiz seninle ilgilenecektir!\n\nTagımıza ulaşmak için herhangi bir kanala \`.tag\` yazman yeterli! \`(${voxi.tag} - ${voxi.etiketTag})\``)<file_sep>/komutlar/ytses.js
const Discord = require('discord.js');
const voxi = require('../_BOOT/config.json')
module.exports = {
kod: "ysay",
async run (client, message, args) {
if(!message.member.hasPermission('ADMINISTRATOR')) {
message.channel.send("Hata: `Bu komutunu kullanabilmek için herhangi bir yetkiye sahip değilsin.`").then(x => x.delete({timeout: 10000}));
return }
let embed = new Discord.MessageEmbed().setColor('RANDOM')
let sesdedeğil = message.guild.members.cache.filter(x => x.roles.cache.has(voxi.kayıtsrm)).filter(y => !y.voice.channel&& y.presence.status!="offline")
message.channel.send(embed.setDescription(`
Aktif olup seste olmayan yetkililer aşağıda belirtimiştir dm atmak için \`.yetkilidm\` yazmanız yeterli\n────────────────────────────────────────────────\n
${sesdedeğil.map(s => `${s} <@!${s.user.id}>`).join(' , ')}`))
}
}
<file_sep>/komutlar/kayıttop.js
module.exports = {
kod: "topteyit",
async run (client, message, args) {
const Discord = require('discord.js')
const mongoose = require("mongoose");
const Register = require('../Semalar/Register.js');
exports.execute = async (client, message, args) => {
let embed = new Discord.MessageEmbed().setAuthor(message.author.username, message.author.avatarURL({ dynamic: true }));
let registerTop = await Register.find({ guildId: message.guild.id }).sort([["totalRegister", "descending"]]).exec();
if(!registerTop.length) return message.channel.send(embed.setDescription(`Herhangi bir kayıt verisi bulunamadı!`))
registerTop = registerTop.filter(x => message.guild.members.cache.has(x.userId)).splice(0, 10)
message.channel.send(embed.setDescription(registerTop.map((x, i) => `\`${i+1}.\` <@${x.userId}> Toplam **${x.totalRegister}** teyit (**${x.manRegister}** Erkek, **${x.womanRegister}** Kız)`)))
};
}
}
<file_sep>/komutlar/info.js
module.exports = {
kod: "me",
async run (client, message, args) {
const Discord = require('discord.js')
const mongoose = require("mongoose");
const Register = require('../Semalar/Register.js');
const moment = require('moment')
moment.locale('tr')
exports.execute = async (client, message, args) => {
let user = message.mentions.members.first() || message.guild.members.cache.get(args[0]) || message.member;
let registerData = await Register.findOne({ guildId: message.guild.id, userId: user.id });
let embed = new Discord.MessageEmbed().setAuthor(user.user.username, user.user.avatarURL({ dynamic: true }));
// return message.channel.send(embed.addField(`❯ Kullanıcı Bilgisi`, `\`•\` Hesap: ${user} \n\`•\` Sunucu İsmi: ${user.displayName} \n\`•\` Kullanıcı ID: ${user.id}`).addField(`❯ Kayıt Bilgisi`, `\`•\` Toplam: ${x.totalRegister} \n\`•\` Erkek: ${x.manRegister} \n\`•\` Kız: ${x.womanRegister}`))
if(!registerData) {
let newRegisterData = new Register({
_id: new mongoose.Types.ObjectId(),
guildId: message.guild.id,
userId: user.id,
totalRegister: 0,
womanRegister: 0,
manRegister: 0,
userNames: []
}).save().then(x => {
return message.channel.send(new Discord.MessageEmbed()
.setColor("BLACK")
.addFields(
{ name: '<a:voxi_elmas:875018309177851954> Erkek Kayıt', value: `\`\`\`${x.manRegister}\`\`\``, inline: true },
{ name: '<a:voxi_elmas:875018309177851954> Kız Kayıt', value: `\`\`\`${x.womanRegister}\`\`\``, inline: true },
{ name: '<a:voxi_elmas:875018309177851954> Total Kayıt', value: `\`\`\`${x.totalRegister}\`\`\``, inline: true },
{ name: '<a:voxi_elmas:875018309177851954> Kullanıcı ID', value: `\`\`\`${user.id}\`\`\``, inline: true },
{ name: '<a:voxi_elmas:875018309177851954> Kullanıcı Tag', value: `\`\`\`${user.user.tag}\`\`\``, inline: true },
{ name: '<a:voxi_elmas:875018309177851954> Önceki İsimler', value: `\`\`\`${x.userNames.length}\`\`\``, inline: true },
))
});
}
var kadın = registerData.womanRegister
message.channel.send(new Discord.MessageEmbed()
.setColor("BLACK")
.addFields(
{ name: '<a:voxi_elmas:875018309177851954> Erkek Kayıt', value: `\`\`\`${registerData.manRegister}\`\`\``, inline: true },
{ name: '<a:voxi_elmas:875018309177851954> Kız Kayıt', value: `\`\`\`${kadın}\`\`\``, inline: true },
{ name: '<a:voxi_elmas:875018309177851954> Total Kayıt', value: `\`\`\`${registerData.totalRegister}\`\`\``, inline: true },
{ name: '<a:voxi_elmas:875018309177851954> Kullanıcı ID', value: `\`\`\`${user.id}\`\`\``, inline: true },
{ name: '<a:voxi_elmas:875018309177851954> <NAME>', value: `\`\`\`${user.user.tag}\`\`\``, inline: true },
{ name: '<a:voxi_elmas:875018309177851954> <NAME>', value: `\`\`\`${registerData.userNames.length}\`\`\``, inline: true },
))
};
}
}
<file_sep>/README.md
Voxi reg
Selam bugun 1 saatte yaptıgım reg altyapı paylasıcam
nası kurulur bakalım sımdı _BOOT klasorunde config var orda yerleri dolduruyosun amk
sonra index.js de en altta mongo db url yazıyo bu onemlı bak https://www.mongodb.com/cloud/atlas urlsi ne gidiyosun sonra url alıyosun amk
aldıktan sonra kurmayı bılmıyosan yt de mongodb url almaya bak askım amk
sonra heroku kullanıyosundur sen amk fakiri herokuda procfile felan istiyor onu yaparsın mal sen worker: node index.js fln amk
sonra calıstıyıosun botu bu kadar aksım yanı offff he bu arada altyapı ıstıyosan Voxi.#6573 dc yaz amk
hadı gorusuruz. optum
bu arada içinde dua ekledim patlamassın mrk etme amk
<file_sep>/komutlar/yasaklıtaglar.js
module.exports = {
kod: "yasaklıtags",
async run (client, message, args) {
const Discord = require('discord.js')
const voxi = require('../_BOOT/config.json')
const mongoose = require('mongoose')
const data = require('quick.db')
var ytags = data.fetch(`ytag_${message.guild.id}`)
let voximember = message.mentions.members.first() || message.guild.members.cache.get(args[0]) || message.member;
if (!message.member.hasPermission("ADMINISTRATOR")) return message.channel.send("Hata: `Bu komutunu kullanabilmek için herhangi bir yetkiye sahip değilsin.`").then(x => x.delete({timeout: 10000}));
//VOXİİ EZZZZ
if(!ytags.length) return message.channel.send(`Herhangi bir yasaklı tag verisi bulunamadı!`)
message.channel.send(`Yasaklı taglar; ${ytags}`)
}
}
|
1ed764f4992156869fc7beda058083bac63cf5b8
|
[
"JavaScript",
"Markdown"
] | 11
|
JavaScript
|
voxifx/voxinewreg
|
127b2d51b00efbeaf945d1299a07e70de58a8e8d
|
e706955a4d032889bb4422677db078e05cbd0960
|
refs/heads/master
|
<file_sep><h1>
Segundo modulo cargado - interno.component.html!
</h1>
<p>Este falla tanto la carga del JS, y en el caso que funcionara falla el CrossOrigin</p>
<div style="text-align:center">
<!--
<button
data-cleeng-trigger
data-action="my-account"
data-display-type="inline"
data-container-id="my-account-container"
data-publisher-id="429296420_ES"
data-completed-callback="cleengCallbackHandler(result)">
My Account inline
</button>
-->
<button (click)="quefuncione()">Click Me!</button>
<div id="my-container"></div>
</div>
<file_sep>import { Injectable } from '@angular/core';
import { HttpClient, HttpParams } from '@angular/common/http';
import { Observable } from 'rxjs/Observable';
// tslint:disable-next-line:import-blacklist
import 'rxjs/Rx';
@Injectable()
export class ApiCleengService {
// private url = 'https://sandbox.cleeng.com/js-api/3.0/api.js';
private url = 'https://sandbox.cleeng.com/js-api/3.0/checkout.js.php';
constructor(private http: HttpClient) { }
public checkout() {
const headers = new Headers();
headers.append('Content-Type', 'application/json');
const body = {
'displayType': 'inline',
'containerId': 'my-container',
'publisherId': 321123321,
'locale': 'es_ES',
'completed': function(result){}
};
return this.http.get (this.url).subscribe( res => console.log(res));
}
}
<file_sep>import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { HttpClientModule, HttpClient } from '@angular/common/http';
import { AppComponent } from './app.component';
import { InternoComponent } from './componentes/interno/interno.component';
import { ApiCleengService } from './servicios/api-cleeng.service';
import { Script } from './servicios/script.service';
@NgModule({
declarations: [
AppComponent,
InternoComponent
],
imports: [
BrowserModule,
HttpClientModule
],
providers: [
ApiCleengService,
Script
],
bootstrap: [AppComponent]
})
export class AppModule { }
<file_sep>import { Component, OnInit } from '@angular/core';
import { ApiCleengService } from '../../servicios/api-cleeng.service';
import { Observable } from 'rxjs/Observable';
import { Script } from '../../servicios/script.service';
declare var CleengApi: any;
@Component({
selector: 'app-interno',
templateUrl: './interno.component.html'
})
export class InternoComponent implements OnInit {
url = 'https://sandbox.cleeng.com/js-api/3.0/api.js';
loadAPI: Promise<any>;
constructor(private servicioCleeng: ApiCleengService, private script: Script) {
this.loadAPI = new Promise((resolve) => {
console.log('resolving promise...');
this.loadScript();
});
}
ngOnInit() {}
public loadScript() {
console.log('preparing to load...');
let node = document.createElement('script');
node.type = 'text/javascript';
node.src = this.url;
document.getElementsByTagName('head')[0].appendChild(node);
}
public quefuncione() {
CleengApi.authentication({
displayType: 'inline',
containerId: 'my-container',
publisherId: 429296420,
locale: 'es_ES',
completed : function(result){}
});
}
}
<file_sep>interface Scripts {
name: string;
src: string;
}
export const ScriptStore: Scripts[] = [
{name: 'cleeng', src: 'https://sandbox.cleeng.com/js-api/3.0/api.js'},
{name: 'filepicker', src: 'https://api.filestackapi.com/filestack.js'}
];
|
ac235d8e5201202a844a89f8ad5182edfe835f57
|
[
"TypeScript",
"HTML"
] | 5
|
HTML
|
virkinia/cleeng-prueba
|
a31e24fac9ee10f14bc235955762b6551f3fa503
|
9664133c4ccc6f5111cb6db38d7fc1b5043015f4
|
refs/heads/master
|
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package mochila;
/**
*
* @author <NAME>
*/
public class item {
public int n;
public int peso;
public int beneficio;
public item(int n, int peso, int beneficio) {
this.n = n;
this.peso = peso;
this.beneficio = beneficio;
}
public item(String frase){
String[] partes = frase.split(",");
this.n = Integer.parseInt(partes[0]);
this.peso = Integer.parseInt(partes[1]);
this.beneficio = Integer.parseInt(partes[2]);
}
@Override
public String toString(){
return this.n+","+this.peso+","+this.beneficio;
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package recursivo;
/**
*
* @author <NAME>
*/
public class Fibonacci {
public static double run_Fib_r(long x){
double ti = System.currentTimeMillis();
Fibonacci.Fib_r(x);
double tf = System.currentTimeMillis();
return tf-ti;
}
public static double run_fiboIterativo(long x){
double ti = System.currentTimeMillis();
Fibonacci.fiboIterativo(x);
double tf = System.currentTimeMillis();
return tf-ti;
}
public static double run_fiboIterativoDinamico(long x){
double ti = System.currentTimeMillis();
if(x>2){
Fibonacci.fiboIterativoDinamico(x);
}
double tf = System.currentTimeMillis();
return tf-ti;
}
public static double run_fiboRecursivoDinamico(long x){
double ti = System.currentTimeMillis();
Fibonacci.fiboRecursivoDinamico(x);
double tf = System.currentTimeMillis();
return tf-ti;
}
public static long Fib_r(long n){
if(n==0)
return 0;
else if(n==1)
return 1;
return Fib_r(n-1)+Fib_r(n-2);
}
public static long fiboIterativo(long pos){
long pen = 0;
long ult = 1;
long suma;
for (int x=1; x<pos;x++){
suma = ult + pen;
pen = ult;
ult = suma;
}
return ult;
}
public static long fiboIterativoDinamico(long pos){
long fibos[]=new long[(int)pos+1];
fibos[0] = 0;
fibos[1] = 1;
for (int x=2; x<=pos;x++){
fibos[x] = fibos[x-1] + fibos[x-2];
}
return fibos[(int)pos];
}
public static long fiboRecursivoDinamico(long pos){
if(pos<=1){
return pos;
} else {
long n [] = new long[(int)pos+1];
n[0]= 0;
n[1]= 1;
//
return lookUpFib(n,pos);
}
}
private static long lookUpFib(long[] n, long pos) {
// caso base
// if(n[(int)pos]!=-1)return n[(int)pos];
//if(n[(int)pos]==0 || (n[(int)pos]==1 && pos ==2))return n[(int)pos];
if((pos==0 || pos ==1) || (n[(int)pos]!=0 && pos!=0))return n[(int)pos];
// si no se conoce entonces calcular
n[(int)pos]=lookUpFib(n, pos-1)+lookUpFib(n, pos-2);
return n[(int)pos];
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package recursivo;
/**
*
* @author <NAME>
*/
public class QuickSort implements AlgoritmoOrdenamiento, Runnable {
private double[] arreglo;
private double tt;
private boolean thread;
public QuickSort() {
this.thread = false;
this.arreglo = null;
}
public QuickSort(boolean thread) {
this.thread = thread;
this.arreglo = null;
}
@Override
public double getTt() {
return tt;
}
@Override
public void definirDatos(double[] arreglo) {
this.arreglo = arreglo;
}
public void ordenarDatos() {
if (this.thread){
Thread hilo = new Thread(this);
hilo.start();
}else{
run();
}
}
int partition(double arr[], int low, int high)
{
double pivot = arr[high];
int i = (low-1); // index of smaller element
for (int j=low; j<high; j++)
{
// If current element is smaller than the pivot
if (arr[j] < pivot)
{
i++;
// swap arr[i] and arr[j]
double temp = arr[i];
arr[i] = arr[j];
arr[j] = temp;
}
}
// swap arr[i+1] and arr[high] (or pivot)
double temp = arr[i+1];
arr[i+1] = arr[high];
arr[high] = temp;
return i+1;
}
/* l is for left index and r is right index of the
sub-array of arr to be sorted */
void sort(double arr[], int low, int high)
{
if (low < high)
{
/* pi is partitioning index, arr[pi] is
now at right place */
int pi = partition(arr, low, high);
// Recursively sort elements before
// partition and after partition
sort(arr, low, pi-1);
sort(arr, pi+1, high);
}
}
@Override
public void run() {
double ti = System.currentTimeMillis();
sort(arreglo ,0,arreglo.length-1);
double tf = System.currentTimeMillis();
this.tt = tf - ti;
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package TSP_Dinamico;
/**
*
* @author <NAME>
*/
public class TSP_bruto {
int[][] array;
boolean[] lista={false, false, false, false, false, false, false, false, false, false};
String Sentencia="";
String[] ciudades={"A","B","C","D","E","F","G","H","I","J"};
int camino=0;
String cadena_menor;
public TSP_bruto(int inicio, int[][] array){
this.array=array;
int siguiente=inicio;
int previo;
this.lista[inicio]=true;
Sentencia=ciudades[inicio];
while(!(lista[0]&&lista[1]&&lista[2]&&lista[3]&&lista[4]&&lista[5]&&lista[6]&&lista[7]&&lista[8]&&lista[9])){
for(int x=0;x<10;x++){
if(!lista[x]){
previo=siguiente;
siguiente=Menor( siguiente);
lista[siguiente]=true;
camino+=array[previo][siguiente];
Sentencia=Sentencia+"-"+ciudades[siguiente];
}
}
System.out.println();
}
camino+=array[siguiente][inicio];
System.out.println(this.Sentencia+"-"+ciudades[inicio]);
System.out.println(this.camino);
}
public int Menor( int fila){
int indicemenor=0;
int numeromenor=1000;
for(int i=0; i<10;i++)
{
if(fila != i){
if(!lista[i]){
if(array[fila][i]<numeromenor){
indicemenor=i;
numeromenor=array[fila][i];
}
}
}
}
return indicemenor;
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package mochila;
/**
*
* @author <NAME>
*/
public class generador {
public static item[] generar(int n, boolean txt){
int peso;
int beneficio;
item[] generacion = new item[n];
for (int i=0; i<n; i++){
peso = (int) (Math.random() * 100) + 1;
beneficio = (int) (Math.random() * 150) + 1;
generacion[i]= new item(i,peso,beneficio);
}
if(txt){
Escribir archivo = new Escribir(generacion,"ListaNueva");
}
return generacion;
}
}
<file_sep>#include<stdio.h>
#include<stdlib.h>
int main(){
int situ=0;
int h =0;
int cont;
int conte=0;
int hashcode=0;
char cadena[500];
char empate[500];
printf("Ingrese la cadena donde se va a buscar la cadena \n");
scanf("%s",&cadena);
for(int i =0; i<500;i++){
if(cadena[i] =='\0'){
cont=i;
break;
}
}
printf("Ingrese la cadena que se va a buscar la cadena \n");
scanf("%s",&empate);
for(int i =0; i<500;i++){
if(empate[i]=='\0'){
conte=i;
break;
}
}
for(int i =0; i<conte;i++){
hashcode+=empate[i]+i;
}
for(int i=0;i<cont-conte+1;i++){
for(int j=0;j<conte;j++){
h=h+cadena[i+j]+j;
}
if(h==hashcode){
situ++;
}
h=0;
}
printf("El numero de empate es %i",situ);
return 0;
}
|
1c2ee1c5193b31671abe273df7bfc61479ae3c08
|
[
"Java",
"C"
] | 6
|
Java
|
Sataroto/Proyectos_AA_2019
|
12d999a85f05f480e970e6d782c59311dc9bc78b
|
6d04aea6fb6bad2e82568715a4e09549990abe8c
|
refs/heads/master
|
<file_sep>jQuery(document).ready(function($) {
var slider_images = ["images/slide.png", "images/slide.png", "images/slide.png"];
loadContent();
var menuList = $("nav > ul > li");
var slider = $(".slider");
slider = new Slider(slider);
$(".slider span").on("click", function() {
slider.changeBackground($(this));
});
function Slider(slider) {
this.changeBackground = function(span) {
var active = slider.find("span.active");
if (!span.hasClass("active")) {
slider.fadeOut(300, function() {
slider.css("background-image", "url(" + slider_images[span.index()] + ")")
}).fadeIn(300);
active.removeClass("active");
span.addClass("active");
}
};
};
if ($(window).innerWidth() >= 769) menuList.addClass("hoverDesktop");
else menuList.addClass("clickMobile");
$(window).on("resize", function() {
if ($(window).innerWidth() >= 769) menuList.removeClass("clickMobile").addClass("hoverDesktop");
else menuList.removeClass("hoverDesktop").addClass("clickMobile");
});
$(".mobile-menu, .mobile-close").on("click", function() {
$(".nav-wrapper").toggleClass("sidebar-nav");
$(".mobile-nav").toggle();
});
menuList.on("click", function() {
menu = $(this);
if(menu.hasClass("clickMobile")) menu.has("ul").toggleClass("slide");
});
function loadContent() {
var template = $(".articles-template").html();
$.ajax({
url: "articles.json",
type: "GET",
dataType: "json",
success: function(response) {
$.each(response.data, function(index, element) {
$(".articles-wrapper").append((Mustache.render(template, element)));
});
},
error: function() {
console.log("error");
}
});
};
$("#contact-form").on("submit", (function(event) {
event.preventDefault();
var contact = $("#contact");
var email = $("#email");
var message = $("#message");
var valContact = contact.val();
var valEmail = email.val();
var valMessage = message.val();
if (valContact && valEmail && valMessage)
{
var contactPerson = { "contact" : contact.content, "email" : email.content, "message" : message.content };
$.ajax({
url: "http://www.locastic.com/api/v1/fe-dev",
type: "POST",
data: contactPerson,
success: function(response) {
alert("Validacija uspješna!");
},
error: function(xhr, statusText, status) {
console.log("Status greške: " + statusText);
}
});
}
}));
});
|
cb6a6e8aeed5f3cc31aed729eaa43ae44f0188a6
|
[
"JavaScript"
] | 1
|
JavaScript
|
antonioRoic/Zad2
|
1f7e86fd443e0ffb1d525485eaa7caf446f55a15
|
0928f938628d4d608baf3d1d26321bc39b59735b
|
refs/heads/master
|
<repo_name>Matthieu-Rodrigues/PizzaSpring-test-<file_sep>/src/test/java/WebDriverTest/RecapCommandePage.java
package WebDriverTest;
import static org.junit.Assert.assertTrue;
import org.junit.Before;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.chrome.ChromeDriver;
public class RecapCommandePage {
private WebDriver webDriver;
public RecapCommandePage(WebDriver webDriver2) {
this.webDriver = webDriver;
}
public boolean verifierId(String id) {
return webDriver.findElement(By.id(id)).isDisplayed();
}
public String getValueId(String id) {
return webDriver.findElement(By.id(id)).getText();
}
}<file_sep>/target/site/jacoco/pizza.spring.service/CommandeService.java.html
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"><html xmlns="http://www.w3.org/1999/xhtml" lang="fr"><head><meta http-equiv="Content-Type" content="text/html;charset=UTF-8"/><link rel="stylesheet" href="../jacoco-resources/report.css" type="text/css"/><link rel="shortcut icon" href="../jacoco-resources/report.gif" type="image/gif"/><title>CommandeService.java</title><link rel="stylesheet" href="../jacoco-resources/prettify.css" type="text/css"/><script type="text/javascript" src="../jacoco-resources/prettify.js"></script></head><body onload="window['PR_TAB_WIDTH']=4;prettyPrint()"><div class="breadcrumb" id="breadcrumb"><span class="info"><a href="../jacoco-sessions.html" class="el_session">Sessions</a></span><a href="../index.html" class="el_report">pizza-spring</a> > <a href="index.source.html" class="el_package">pizza.spring.service</a> > <span class="el_source">CommandeService.java</span></div><h1>CommandeService.java</h1><pre class="source lang-java linenums">package pizza.spring.service;
import java.util.Date;
import java.util.List;
import javax.transaction.Transactional;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import pizza.spring.dao.CommandeDao;
import pizza.spring.dao.PizzaDao;
import pizza.spring.exception.PasDeCommandeSuivanteException;
import pizza.spring.modele.Commande;
@Service
public class CommandeService {
private PizzaDao pizzaDao;
private CommandeDao commandeDao;
@Autowired
<span class="fc" id="L23"> public CommandeService(PizzaDao pizzaDao, CommandeDao commandeDao) {</span>
<span class="fc" id="L24"> this.pizzaDao = pizzaDao;</span>
<span class="fc" id="L25"> this.commandeDao = commandeDao;</span>
<span class="fc" id="L26"> }</span>
@Transactional
public Commande commander(CommandeDto commandeDto) {
<span class="fc" id="L30"> Commande commande = new Commande();</span>
<span class="fc" id="L31"> commande.setNom(commandeDto.getNom());</span>
<span class="fc" id="L32"> commande.setTelephone(commandeDto.getTelephone());</span>
<span class="fc" id="L33"> commande.setEnAttente(true);</span>
<span class="fc" id="L34"> commande.setDateEmission(new Date());</span>
<span class="fc bfc" id="L35" title="All 2 branches covered."> for(Long id : commandeDto.getPizzaId()) {</span>
<span class="fc" id="L36"> commande.ajouter(pizzaDao.getById(id));</span>
<span class="fc" id="L37"> }</span>
<span class="fc" id="L38"> commandeDao.sauver(commande);</span>
<span class="fc" id="L39"> return commande;</span>
}
public Commande getRecap(Long idCommande) {
<span class="nc" id="L43"> return commandeDao.getRecapById(idCommande);</span>
}
public Commande getCommandeSuivante() throws PasDeCommandeSuivanteException {
<span class="nc" id="L47"> return commandeDao.getCommandeSuivante();</span>
}
public List<Commande> getCommandesEnAttente() {
<span class="nc" id="L51"> return commandeDao.getCommandesEnAttente();</span>
}
@Transactional
public void signalerCommandePrete(Long id) {
<span class="nc" id="L56"> commandeDao.signalerCommandePrete(id);</span>
<span class="nc" id="L57"> }</span>
}
</pre><div class="footer"><span class="right">Created with <a href="http://www.jacoco.org/jacoco">JaCoCo</a> 0.8.5.201910111838</span></div></body></html>
|
c0c3c63cffff2a1afe28ab244a806cbdb799541b
|
[
"Java",
"HTML"
] | 2
|
Java
|
Matthieu-Rodrigues/PizzaSpring-test-
|
8590496024f7bafb5c09fd247730223d5621079a
|
b2b4badb468de43494d508f17f203d0cb71b57ae
|
refs/heads/master
|
<file_sep>import Vue from 'vue';
import Vuex from 'vuex';
import json from './products.json';
const debug = process.env.NODE_ENV !== 'production';
Vue.use(Vuex);
const store = new Vuex.Store({
strict: debug,
state: {
isLoading: false,
cart: [],
products: {}
},
getters: {
allProducts: state => state.products, // would need action/mutation if data fetched async
getNumberOfProducts: state => (state.products) ? state.products.length : 0,
cartProducts: state => {
return state.cart.map(({ id, quantity }) => {
const product = state.products.find(p => p.id === id)
return {
name: product.name,
price: product.price,
quantity
}
})
}
},
mutations: {
SET_IS_LOADING: (state, loading) => {
/* eslint-disable no-param-reassign */
state.isLoading = loading;
},
SET_PRODUCTS: (state, products) => {
/* eslint-disable no-param-reassign */
state.products = products;
},
ADD_TO_CART: (state, { id }) => {
const record = state.cart.find(p => p.id === id);
if (!record) {
state.cart.push({
id,
quantity: 1
});
} else {
record.quantity++;
}
}
},
actions: {
FETCH_PRODUCTS: ({ commit, state }, query) => {
// console.log(`action FETCH_PRODUCTS: query = ${query}`);
// commit('SET_SEARCH_QUERY', query);
commit('SET_IS_LOADING', true);
if (query) {
const p = new Promise((resolve, reject) => {
/* eslint-disable arrow-body-style */
setTimeout(() => {
return resolve(json);
}, delay);
});
return p.then((products) => {
// console.log('Fetch Promise OK! mutating state...');
commit('SET_PRODUCTS', products);
commit('SET_IS_LOADING', false);
// if (result === '') {
// commit('SET_NORESULTS', true);
// }
});
}
commit('SET_IS_LOADING', false);
return Promise.reject();
},
ADD_TO_CART: ({ commit }, product) => {
commit('ADD_TO_CART', {
id: product.id
});
}
}
});
export default store;
|
d8b1a2b03b5b3dbd0ad54b9a9ac5ca6b0311ea6e
|
[
"JavaScript"
] | 1
|
JavaScript
|
aozora/vue-checkout
|
df12284662f9694baa1a049770c46a3f270c144e
|
656eeaa339887b7be62053c534a6ea32f99da06b
|
refs/heads/master
|
<repo_name>Annie201/pywikibot-core<file_sep>/requests-requirements.txt
requests
# Dependency of pyOpenSSL. Use specific version to avoid expected
# DeprecationWarning
cryptography<=0.8.2 ; python_version < '2.7'
# requests security extra
# Bug T105767 on Python 2.7 release 9+
pyOpenSSL ; python_full_version < '2.7.9' or python_version >= '3'
ndg-httpsclient ; python_full_version < '2.7.9' or python_version >= '3'
pyasn1 ; python_full_version < '2.7.9' or python_version >= '3'
<file_sep>/tests/i18n/pywikibot.py
# -*- coding: utf-8 -*-
"""i18n message bundle called 'pywikibot' to fool the i18n loader."""
from __future__ import unicode_literals
msg = {}
<file_sep>/docs/requirements-py3.txt
# This is a PIP requirements file for building Sphinx documentation of pywikibot
# using sphinx-1.3b1 on python3.4, and is used by readthedocs.org.
sphinx==1.3.1
sphinx-epytext>=0.0.3
# mandatory; see README.conversion.txt
requests
|
60ac697985b50140a38ca7e6b97d674996b8f462
|
[
"Python",
"Text"
] | 3
|
Text
|
Annie201/pywikibot-core
|
d4d49bf6fec81f88879306b90951c58f70003f39
|
6b085e06356473c4f49cdf7713ad36076d7c1fec
|
refs/heads/master
|
<repo_name>hossein-zare/wester-chunk-upload<file_sep>/src/Response.php
<?php
namespace Wester\ChunkUpload;
class Response
{
/**
* Set http resonse code.
*
* @param int status
* @return \Wester\ChunkUpload\Response
*/
public function status($status = null)
{
if ($status !== null)
http_response_code($status);
return $this;
}
/**
* Abort the connection with http status code.
*
* @param null|int status
* @return void
*/
public function abort($status = null)
{
$this->status($status);
die();
}
/**
* Json response.
*
* @param mixed $data
* @return void
*/
public function json($data)
{
header('Content-Type: application/json');
die(json_encode($data));
}
/**
* Create a new instance statically.
*
* @param string $method
* @param array $args
* @return \Wester\ChunkUpload\Response
*/
public static function __callStatic($method, $args)
{
return new self(...$args);
}
}
<file_sep>/src/File.php
<?php
namespace Wester\ChunkUpload;
use Wester\ChunkUpload\Exceptions\FileEmptyException;
use Wester\ChunkUpload\Exceptions\FileNotSingleException;
use Wester\ChunkUpload\Exceptions\FileErrorException;
class File
{
/**
* The name.
*
* @var array
*/
public $name;
/**
* The size.
*
* @var int
*/
public $size;
/**
* The type.
*
* @var string
*/
public $type;
/**
* The error.
*
* @var integer
*/
public $error;
/**
* The temp name.
*
* @var integer
*/
public $tmp_name;
/**
* Create a new instance.
*
* @param string $name
*/
public function __construct($name)
{
$this->setFile($name);
}
/**
* Set file.
*
* @param string $name
* @return void
*/
protected function setFile(string $name)
{
if ($this->isEmpty($name))
throw new FileEmptyException("There's no file.");
if (! $this->isSingle($name))
throw new FileNotSingleException("There are multiple files.");
if (! $this->isValid($name))
throw new FileErrorException("There are some errors.");
$this->setAttributes($name);
}
/**
* Set attributes.
*
* @param string $name
* @return void
*/
private function setAttributes(string $name)
{
$attributes = ['name', 'size', 'type', 'error', 'tmp_name'];
foreach ($attributes as $attribute) {
$this->{$attribute} = $this->getFile($name)->{$attribute};
}
}
/**
* Get the file.
*
* @param string $name
* @return object
*/
protected function getFile(string $name)
{
return json_decode(
json_encode($_FILES[$name]), false
);
}
/**
* Check if there's no file.
*
* @param string $name
* @return bool
*/
protected function isEmpty(string $name): bool
{
return ! isset(
$_FILES[$name]
) || empty(
$_FILES[$name]['name']
);
}
/**
* Check if the user has uploaded a single file.
*
* @param string $name
* @return bool
*/
protected function isSingle(string $name): bool
{
return ! is_array(
$_FILES[$name]['name']
);
}
/**
* Check if the file has any errors.
*
* @param string $name
* @return bool
*/
protected function isValid(string $name): bool
{
return $_FILES[$name]['error'] === 0;
}
}
<file_sep>/src/Exceptions/FileException.php
<?php
namespace Wester\ChunkUpload\Exceptions;
class FileException extends MainException
{
//
}
<file_sep>/src/Drivers/LocalDriver.php
<?php
namespace Wester\ChunkUpload\Drivers;
use Wester\ChunkUpload\Chunk;
use Wester\ChunkUpload\Drivers\Contracts\DriverInterface;
use Wester\ChunkUpload\Exceptions\MainException;
class LocalDriver implements DriverInterface
{
/**
* The chunk.
*
* @var \Wester\ChunkUpload\Chunk
*/
public $chunk;
/**
* Create a new instance.
*
* @param \Wester\ChunkUpload\Chunk $chunk
* @return void
*/
public function __construct(Chunk $chunk)
{
$this->chunk = $chunk;
}
/**
* Open the connection.
*
* @return void
*/
public function open()
{
//
}
/**
* Close the connection.
*
* @return void
*/
public function close()
{
//
}
/**
* Store the file.
*
* @param string $fileName
* @return void
*/
public function store($fileName)
{
try {
$file = fopen($this->chunk->getTempFilePath(), 'a');
fwrite($file, file_get_contents(
$fileName
));
fclose($file);
} catch (\Exception $e) {
throw new MainException($e);
}
}
/**
* Delete a temp chunk.
*
* @return void
*/
public function delete()
{
try {
$path = $this->chunk->getTempFilePath($this->chunk->header->chunkNumber);
if (file_exists($path)) {
unlink($path);
}
if ($this->chunk->header->chunkNumber > 1) {
$path = $this->chunk->getTempFilePath($this->chunk->header->chunkNumber - 1);
if (file_exists($path)) {
unlink($path);
}
}
} catch (\Exception $e) {
throw new MainException($e);
}
}
/**
* Move the file into the path.
*
* @return void
*/
public function move()
{
try {
rename($this->chunk->getTempFilePath(), $this->chunk->getFilePath());
} catch (\Exception $e) {
throw new MainException($e);
}
}
/**
* Increase the chunk number of the file.
*
* @return void
*/
public function increase()
{
try {
if ($this->chunk->header->chunkNumber > 1) {
rename(
$this->chunk->getTempFilePath($this->chunk->header->chunkNumber - 1), $this->chunk->getTempFilePath()
);
}
} catch (\Exception $e) {
throw new MainException($e);
}
}
/**
* Determine whether the previous chunk exists.
*
* @return null|bool
*/
public function prevExists()
{
if ($this->chunk->header->chunkNumber === 1)
return null;
return file_exists(
$this->chunk->getTempFilePath($this->chunk->header->chunkNumber - 1)
);
}
/**
* Determine whether the chunk exists.
*
* @return bool
*/
public function exists()
{
return file_exists($this->chunk->getTempFilePath());
}
}
<file_sep>/src/Validation/Rules/SizeRule.php
<?php
namespace Wester\ChunkUpload\Validation\Rules;
use Wester\ChunkUpload\Validation\Validator;
class SizeRule
{
/**
* The validator.
*
* @var \Wester\ChunkUpload\Validation\Validator
*/
private $validator;
/**
* The name.
*
* @var string
*/
private $name;
/**
* The value.
*
* @var mixed
*/
private $value;
/**
* Create a new instance.
*
* @param \Wester\ChunkUpload\Validation\Validator $validator
* @param string $name
* @param mixed $value
* @param string $data
* @return void
*/
public function __construct(Validator $validator, string $name, $value, string $data)
{
$this->validator = $validator;
$this->name = $name;
$this->value = $value;
$this->data = $data;
}
/**
* Validate the rule.
*
* @return bool
*/
public function isValid(): bool
{
if ($this->validator->exists($this->name)) {
switch ($this->validator->currentDataType) {
case 'numeric':
return (int) $this->value === (int) $this->data;
case 'file':
return (int) $this->value === (int) $this->data;
case 'string':
return strlen($this->value) === (int) $this->data;
}
return false;
}
return true;
}
/**
* Set arguments and create an instance.
*
* @param array $args
* @return \Wester\ChunkUpload\Validation\Rules\SizeRule
*/
public static function set(...$args)
{
return new self(...$args);
}
}
<file_sep>/src/Validation/Rules/Exceptions/ExtensionRuleException.php
<?php
namespace Wester\ChunkUpload\Validation\Rules\Exceptions;
use Wester\ChunkUpload\Validation\Exceptions\ValidationException;
class ExtensionRuleException extends ValidationException
{
//
}
<file_sep>/src/Exceptions/HeaderException.php
<?php
namespace Wester\ChunkUpload\Exceptions;
class HeaderException extends MainException
{
//
}
<file_sep>/src/Validation/ExceptionHandler.php
<?php
namespace Wester\ChunkUpload\Validation;
class ExceptionHandler
{
/**
* Throw a rule exception.
*
* @return void
*
* @throws \Exception
*/
protected function throw(string $rule)
{
$rule = ucfirst($rule);
$exception = "\\Wester\\ChunkUpload\\Validation\\Rules\\Exceptions\\{$rule}Exception";
throw new $exception("The data is invalid.");
}
}
<file_sep>/src/StringHelper.php
<?php
namespace Wester\ChunkUpload;
class StringHelper
{
/**
* Convert camel to kebab.
*
* @param string $string
* @return string
*/
public static function camelToKebab(string $string)
{
if (preg_match ('/[A-Z]/', $string) === 0)
return $string;
return strtolower(
preg_replace_callback('/([a-z])([A-Z])/', function ($match) {
return $match[1] . "-" . strtolower ($match[2]);
}, $string)
);
}
}
<file_sep>/src/Validation/Validator.php
<?php
namespace Wester\ChunkUpload\Validation;
use Exception;
use Wester\ChunkUpload\Validation\Exceptions\ValidationException;
use Wester\ChunkUpload\Exceptions\MainException;
use Wester\ChunkUpload\Language\Language;
class Validator extends ExceptionHandler
{
/**
* The data.
*
* @var array
*/
protected $data = [];
/**
* The parameters.
*
* @var array
*/
protected $parameters = [];
/**
* The current parameter.
*
* @var string
*/
public $currentParameter = [];
/**
* The current rules.
*
* @var array
*/
public $currentRules = [];
/**
* The current data type.
*
* @var string
*/
public $currentDataType = 'string';
/**
* Create a new instance.
*
* @param array $data
* @return void
*/
public function __construct(array $data)
{
$this->data = $data;
}
/**
* Validate parameters.
*
* @param array $parameters
* @return \Wester\ChunkUpload\Validator
*/
public function validate(array $parameters)
{
$messages = [];
$this->setParameters($parameters);
foreach ($parameters as $parameter => $rules) {
$this->setCurrentParameter($parameter)
->setCurrentRules($rules)
->setCurrentDataType($parameter);
// Filter rules
$rules = array_filter($rules, function ($item) {
return $item !== '!';
});
foreach ($rules as $rule) {
$values = $this->slice($rule);
$class = $this->createClassName($values);
$args = [$class, $parameter];
if (isset($values[1])) {
$args[] = $values[1];
}
if (! $this->callRule(...$args)) {
try {
$this->throw($class);
} catch (ValidationException $e) {
if ($this->isThrowable())
throw new MainException("The data is invalid.");
$messages[$parameter][] = $this->getValidationMessage($values);
}
}
}
}
if (count($messages) > 0) {
throw new ValidationException("The data is invalid.", $messages);
}
return $this;
}
/**
* Set parameters.
*
* @param array $parameters
* @return \Wester\ChunkUpload\Validation\Validator
*/
private function setParameters(array $parameters)
{
$this->parameters = $parameters;
return $this;
}
/**
* Set current paramter.
*
* @param string $parameter
* @return \Wester\ChunkUpload\Validation\Validator
*/
private function setCurrentParameter(string $parameter)
{
$this->currentParameter = $parameter;
return $this;
}
/**
* Set current rules.
*
* @param array $rules
* @return \Wester\ChunkUpload\Validation\Validator
*/
private function setCurrentRules(array $rules)
{
$this->currentRules = $rules;
return $this;
}
/**
* Set current data type.
*
* @param string $parameter
* @return \Wester\ChunkUpload\Validation\Validator
*/
private function setCurrentDataType(string $parameter)
{
$this->currentDataType = $this->getDataType($parameter);
return $this;
}
/**
* Determine whether the parameter can throw non-validation exceptions.
*
* @return bool
*/
private function isThrowable(): bool
{
return in_array('!', $this->currentRules);
}
/**
* Get the validation message.
*
* @param array $rule
* @return string
*/
private function getValidationMessage(array $rule): string
{
if (isset($rule[1]) && $this->currentDataType === 'file') {
$value = (int) $rule[1] / 1024;
} else {
$value = $rule[1];
}
$data = [
'attribute' => $this->currentParameter,
'value' => $value
];
if ($rule[0] !== 'extension' && isset($rule[1])) {
$key = "{$rule[0]}.{$this->currentDataType}";
} else if ($rule[0] === 'extension') {
$key = 'mimes';
} else {
$key = $rule[0];
}
return Language::expression($key, $data);
}
/**
* Call the rule.
*
* @param string $method
* @param string $parameter
* @param mixed $data
* @return bool
*/
private function callRule(string $method, string $parameter, $data = null)
{
$method = ucfirst($method);
$rule = "\\Wester\\ChunkUpload\\Validation\\Rules\\{$method}";
return $rule::set($this, $parameter, $this->getAttribute($parameter), $data)->isValid();
}
/**
* Convert the parameters.
*
* @return array
*/
public function convert()
{
foreach($this->data as $key => $value) {
$this->data[$key] = $this->toDataType($key, $this->data[$key]);
}
return $this->data;
}
/**
* Convert data type.
*
* @param string $name
* @param mixed $value
* @return mixed
*/
private function toDataType(string $name, $value)
{
switch ($this->getDataType($name)) {
case 'string':
return (string) $value;
case 'numeric':
return (int) $value;
case 'file':
return (int) $value;
}
return $value;
}
/**
* Get data type.
*
* @param string $name
* @return string
*/
private function getDataType($name)
{
$types = ['string', 'numeric', 'file'];
foreach ($this->parameters[$name] as $rule) {
if (in_array($rule, $types))
return $rule;
}
return 'string';
}
/**
* Get attribute.
*
* @param null|string $name
* @return mixed
*/
protected function getAttribute(string $name = null)
{
if (! $name) {
return $this->getAttribute($this->currentParameter);
}
return $this->data[$name] ?? null;
}
/**
* Check if the header exists.
*
* @param string $name
* @return bool
*/
public function exists(string $name): bool
{
return isset($this->data[$name]);
}
/**
* Create a class name for the rule.
*
* @param array $array
* @return string
*/
protected function createClassName(array $array): string
{
return lcfirst($array[0]) . 'Rule';
}
/**
* Slice the string.
*
* @param string $string
*/
private function slice($string)
{
return explode(':', $string);
}
}
<file_sep>/src/Chunk.php
<?php
namespace Wester\ChunkUpload;
use Wester\ChunkUpload\Language\Language;
use Wester\ChunkUpload\Exceptions\ChunkException;
class Chunk
{
/**
* The file name flags.
*/
const RANDOM_FILE_NAME = 1;
const ORIGINAL_FILE_NAME = 2;
/**
* The file extension flags
*/
const ORIGINAL_FILE_EXTENSION = 1;
/**
* The file.
*
* @var object
*/
public $file;
/**
* The headers.
*
* @var \Wester\ChunkUpload\Header
*/
public $header;
/**
* The driver.
*
* @var object
*/
/**
* The required headers.
*
* @var array
*/
protected $requiredHeaders = [];
/**
* The configs.
*
* @var array
*/
public $configs = [];
/**
* The language.
*
* @var array
*/
private $language = [
'min' => [
'numeric' => 'The :attribute must be at least :min.',
'file' => 'The :attribute must be at least :min kilobytes.',
],
'max' => [
'numeric' => 'The :attribute may not be greater than :max.',
'file' => 'The :attribute may not be greater than :max kilobytes.',
],
'size' => [
'numeric' => 'The :attribute must be :size.',
'file' => 'The :attribute must be :size kilobytes.',
],
'mimes' => 'The :attribute must be a file of type: :values.',
'attributes' => [
'x-file-name' => 'file',
'x-file-size' => 'file',
],
];
/**
* Create a new instance.
*
* @param array $configs
* @return void
*/
public function __construct(array $configs)
{
$this->setConfigs($configs)
->setLanguage($this->language)
->setRequiredHeaders()
->setFile()
->setHeader()
->setDriver()
->header->validate($this->requiredHeaders);
}
/**
* Set configs.
*
* @param array $configs
* @return \Wester\ChunkUpload\Chunk
*/
private function setConfigs(array $configs)
{
$this->configs = $configs;
return $this;
}
/**
* Set required headers.
*
* @return \Wester\ChunkUpload\Chunk
*/
private function setRequiredHeaders()
{
$this->requiredHeaders = [
'x-chunk-number' => ['!', 'required', 'numeric'],
'x-chunk-total-number' => ['!', 'required', 'numeric'],
'x-chunk-size' => ['!', 'required', 'numeric'],
'x-file-name' => ['required', 'string', ...$this->getValidationRule(['extension'])],
'x-file-size' => ['required', 'file', ...$this->getValidationRule(['min', 'max', 'size'])],
'x-file-identity' => ['!', 'required', 'string', 'size:32']
];
return $this;
}
/**
* Create an object of header.
*
* @return \Wester\ChunkUpload\Header
*/
private function getHeader()
{
return new Header(
array_keys($this->requiredHeaders)
);
}
/**
* Set the instance of header.
*
* @return \Wester\ChunkUpload\Header
*/
private function setHeader()
{
$this->header = $this->getHeader();
return $this;
}
/**
* Set language array.
*
* @param array $language
* @return \Wester\ChunkUpload\Chunk
*/
public function setLanguage(array $language)
{
Language::set($language);
return $this;
}
/**
* Create an object of file.
*
* @param string $name
* @return \Wester\ChunkUpload\File
*/
private function getFile(string $name)
{
return new File($name);
}
/**
* Set the instance of file.
*
* @return \Wester\ChunkUpload\Chunk
*/
private function setFile()
{
$this->file = $this->getFile($this->configs['name']);
return $this;
}
/**
* Initialize the driver.
*
* @return object
*/
private function setDriver()
{
if (in_array($this->configs['driver'], ['local', 'ftp'])) {
$name = ucfirst($this->configs['driver']);
$driver = "\\Wester\\ChunkUpload\\Drivers\\{$name}Driver";
} else {
$driver = $this->configs['driver'];
}
$this->driver = new $driver($this);
$this->driver->open();
return $this;
}
/**
* Get driver configs.
*
* @return array
*/
private function getDriverConfigs()
{
switch ($this->configs['driver']) {
case 'local':
return $this->configs['local_driver'];
case 'ftp':
return $this->configs['ftp_driver'];
default:
return $this->configs['custom_driver'];
}
}
/**
* Validate chunks.
*
* @return \Wester\ChunkUpload\Chunk
*/
public function validate()
{
if ($this->header->chunkTotalNumber !== $this->getTotalNumber()) {
$this->revoke("The total number of chunks is invalid.");
}
if ($this->header->chunkNumber < 1 || $this->header->chunkNumber > $this->header->chunkTotalNumber) {
$this->revoke("The chunk number is invalid.");
}
if ($this->file->size !== $this->getSize($this->header->chunkNumber)) {
$this->revoke("The chunk size is invalid.");
}
if (! $this->isChunk()) {
$this->revoke("The uploaded file is not a chunk.");
}
if ($this->driver->prevExists() === false) {
$this->revoke("Previous chunk doesn't exist.");
}
if ($this->driver->exists()) {
$this->revoke("Chunk {$this->header->chunkNumber} already exists.");
}
return $this;
}
/**
* Revoke the action.
*
* @param string $text
* @return void
*
* @throws \Wester\ChunkUpload\ChunkException
*/
private function revoke(string $text): void
{
$this->driver->delete();
throw new ChunkException($text);
}
/**
* Get validation rules.
*
* @param array $rules
* @return array
*/
private function getValidationRule(array $rules)
{
$array = [];
foreach ($this->configs['validation'] as $validation) {
$values = explode(':', $validation);
if (in_array($values[0], $rules))
$array[] = $validation;
}
return $array;
}
/**
* Store the chunk.
*
* @return \Wester\ChunkUpload\Chunk
*/
public function store()
{
$this->driver->increase();
$this->driver->store($this->file->tmp_name);
if ($this->isLast()) {
$this->response()->status(200);
$this->driver->move();
} else {
$this->response()->status(201);
}
$this->driver->close();
return $this;
}
/**
* Get progress.
*
* @return float
*/
public function getProgress()
{
return ($this->header->chunkNumber / $this->header->chunkTotalNumber) * 100;
}
/**
* Get total number of chunks.
*
* @return int
*/
public function getTotalNumber(): int
{
$number = (int) ceil($this->header->fileSize / $this->configs['chunk_size']);
return $number !== 0 ? $number : 1;
}
/**
* Get the size of the specified chunk.
*
* @param int $part
* @return int
*/
public function getSize(int $part): int
{
$total = $this->header->fileSize - ($part * $this->configs['chunk_size']);
if ($total < 0) {
return $this->configs['chunk_size'] + $total;
}
return $this->configs['chunk_size'];
}
/**
* Create a unique temp file name.
*
* @param null|int $part
* @return string
*/
public function createTempFileName(int $part = null): string
{
$mixture = [
$this->header->fileSize,
$this->header->fileName,
$this->header->fileIdentity
];
$identity = [
$this->getFileExtension(),
($part ?: $this->header->chunkNumber),
'tmp'
];
return implode('.', [
hash('ripemd160', implode($mixture)), implode('.', array_filter($identity))
]);
}
/**
* Create a random string.
*
* @return string
*/
public function createRandomString(): string
{
return bin2hex(random_bytes(16));
}
/**
* Create a file name.
*
* @return string
*/
public function createFileName(): string
{
if (is_int($this->configs['file_name'])) {
switch ($this->configs['file_name']) {
case Chunk::RANDOM_FILE_NAME:
$this->configs['file_name'] = $this->createRandomString();
break;
case Chunk::ORIGINAL_FILE_NAME:
$this->configs['file_name'] = pathinfo($this->header->fileName, PATHINFO_FILENAME);
break;
}
}
return $this->getFullFileName();
}
/**
* Get temp file path.
*
* @param null|int $part
* @return string
*/
public function getTempFilePath(int $part = null): string
{
return $this->getDriverConfigs()['tmp_path'] . $this->createTempFileName($part);
}
/**
* Get temp file path.
*
* @return string
*/
public function getFilePath(): string
{
return $this->getDriverConfigs()['path'] . $this->createFileName();
}
/**
* Get file name.
*
* @return string
*/
public function getFileName(): string
{
return $this->configs['file_name'];
}
/**
* Get file name with extension.
*
* @return string
*/
public function getFullFileName(): string
{
return implode('.', array_filter([$this->getFileName(), $this->getFileExtension()]));
}
/**
* Get the file extension.
*
* @return null|string
*/
public function getFileExtension()
{
if ($this->configs['file_extension'] === Chunk::ORIGINAL_FILE_EXTENSION) {
$extension = trim(pathinfo($this->header->fileName, PATHINFO_EXTENSION));
$extension = empty($extension) ? null : $extension;
$this->configs['file_extension'] = $extension;
}
return $this->configs['file_extension'];
}
/**
* Determine whether it's the last chunk.
*
* @return bool
*/
public function isLast(): bool
{
return $this->header->chunkNumber === $this->header->chunkTotalNumber;
}
/**
* Check if the file is a chunk.
*
* @return bool
*/
private function isChunk(): bool
{
return $this->file->name === 'blob'
&& $this->file->type === 'application/octet-stream';
}
/**
* Create a new instance of the response class.
*
* @param null|int $status
* @return \Wester\ChunkUpload\Response
*/
public function response($status = null)
{
return (new Response())->status($status);
}
}
<file_sep>/src/Exceptions/FileEmptyException.php
<?php
namespace Wester\ChunkUpload\Exceptions;
class FileEmptyException extends FileException
{
//
}
<file_sep>/src/Header.php
<?php
namespace Wester\ChunkUpload;
use Wester\ChunkUpload\Validation\Validator;
use CaseConverter\CaseString;
class Header
{
/**
* The headers.
*
* @var array
*/
public $headers = [];
/**
* Create a new instance.
*
* @param array $keys
* @return void
*/
public function __construct(array $keys = null)
{
$this->setHeaders();
$this->only($keys);
}
/**
* Set all headers.
*
* @return void
*/
protected function setHeaders()
{
$this->headers = array_change_key_case(getallheaders(), CASE_LOWER);
}
/**
* Preserve specified headers.
*
* @param null|array $keys
* @return void
*/
public function only($keys)
{
if ($keys) {
$this->filter(function (string $key) use ($keys) {
return in_array($key, $keys);
});
}
}
/**
* Check if the header exists.
*
* @param string $name
* @return bool
*/
public function exists(string $name): bool
{
return isset($this->headers[$name]);
}
/**
* Filter the headers.
*
* @param callable $callback
* @return array
*/
public function filter(callable $callback)
{
$this->headers = array_filter($this->headers, $callback, ARRAY_FILTER_USE_KEY);
return $this->headers;
}
/**
* Validate the headers.
*
* @param array $headers
* @return void
*/
public function validate($headers)
{
$validator = new Validator($this->headers);
$this->headers = $validator->validate($headers)->convert();
}
/**
* Get a header value.
*
* @param string $name
* @return mixed
*/
public function __get(string $name)
{
return $this->headers[
'x-' . StringHelper::camelToKebab($name)
] ?? null;
}
}
<file_sep>/src/Validation/Exceptions/ValidationException.php
<?php
namespace Wester\ChunkUpload\Validation\Exceptions;
use Wester\ChunkUpload\Exceptions\MainException;
class ValidationException extends MainException
{
/**
* The errors.
*
* @var array
*/
private $errors = [];
/**
* Create a new instance.
*
* @param string $message
* @param array $errors
* @return void
*/
public function __construct(string $message, array $errors = [])
{
$this->errors = $errors;
parent::__construct($message);
}
/**
* Get the validation errors.
*
* @return array
*/
public function getErrors(): array
{
return $this->errors;
}
}
<file_sep>/src/Drivers/Exceptions/FtpDriverException.php
<?php
namespace Wester\ChunkUpload\Drivers\Exceptions;
use Wester\ChunkUpload\Exceptions\MainException;
class FtpDriverException extends MainException
{
//
}
<file_sep>/src/Language/Language.php
<?php
namespace Wester\ChunkUpload\Language;
class Language
{
/**
* The language.
*
* @var array
*/
private static $language = [];
/**
* Set language.
*
* @param array $language
* @return void
*/
public static function set(array $language)
{
self::$language = $language;
}
/**
* Get an expression.
*
* @param string $key
* @return null|string
*/
public static function get(string $key)
{
$keys = explode('.', $key);
$value = self::$language;
foreach ($keys as $key) {
$value = $value[$key] ?? null;
if (! is_array($value))
return $value;
}
return null;
}
/**
* Get an attribute.
*
* @param string $attribute
* @return string
*/
public static function getAttribute(string $attribute)
{
return self::$language['attributes'][$attribute] ?? $attribute;
}
/**
* Parse expression.
*
* @param string $key
* @param array $data
*/
public static function expression(string $key, array $data)
{
$expression = self::get($key);
return preg_replace_callback('/(:\S+)/', function ($match) use ($data) {
if ($match[0] === ":attribute")
return self::getAttribute($data['attribute']);
return $data['value'];
}, $expression);
}
}
<file_sep>/src/Validation/Rules/StringRule.php
<?php
namespace Wester\ChunkUpload\Validation\Rules;
use Wester\ChunkUpload\Validation\Validator;
class StringRule
{
/**
* The validator.
*
* @var \Wester\ChunkUpload\Validation\Validator
*/
private $validator;
/**
* The name.
*
* @var string
*/
private $name;
/**
* The value.
*
* @var mixed
*/
private $value;
/**
* Create a new instance.
*
* @param \Wester\ChunkUpload\Validation\Validator $validator
* @param string $name
* @param mixed $value
* @return void
*/
public function __construct(Validator $validator, string $name, $value)
{
$this->validator = $validator;
$this->name = $name;
$this->value = $value;
}
/**
* Validate the rule.
*
* @return bool
*/
public function isValid(): bool
{
return ! $this->validator->exists($this->name) || is_string($this->value);
}
/**
* Set arguments and create an instance.
*
* @param array $args
* @return \Wester\ChunkUpload\Validation\Rules\StringRule
*/
public static function set(...$args)
{
return new self(...$args);
}
}
<file_sep>/src/Exceptions/ChunkException.php
<?php
namespace Wester\ChunkUpload\Exceptions;
class ChunkException extends MainException
{
//
}
<file_sep>/src/Validation/Rules/Exceptions/MinRuleException.php
<?php
namespace Wester\ChunkUpload\Validation\Rules\Exceptions;
use Wester\ChunkUpload\Validation\Exceptions\ValidationException;
class MinRuleException extends ValidationException
{
//
}
<file_sep>/src/Drivers/FtpDriver.php
<?php
namespace Wester\ChunkUpload\Drivers;
use Wester\ChunkUpload\Chunk;
use Wester\ChunkUpload\Header;
use Wester\ChunkUpload\Drivers\Contracts\DriverInterface;
use Wester\ChunkUpload\Drivers\Exceptions\FtpDriverException;
use Wester\ChunkUpload\Exceptions\MainException;
class FtpDriver implements DriverInterface
{
/**
* The chunk.
*
* @var \Wester\ChunkUpload\Chunk
*/
public $chunk;
/**
* The connection.
*
* @var mixed
*/
private $connection;
/**
* Create a new instance.
*
* @param array $configs
* @param \Wester\ChunkUpload\Chunk $chunk
* @return void
*/
public function __construct(Chunk $chunk)
{
$this->chunk = $chunk;
}
/**
* Open the connection.
*
* @return void
*/
public function open()
{
$this->createConnection()->login();
}
/**
* Close the connection.
*
* @return void
*/
public function close()
{
try {
ftp_close($this->connection);
} catch (\Exception $e) {
throw new MainException($e);
}
}
/**
* Create a ftp connection.
*
* @return \Wester\ChunkUpload\Drivers\FtpDriver
*/
private function createConnection()
{
try {
if (! $this->connection = @ftp_connect($this->chunk->configs['ftp_driver']['server']))
throw new FtpDriverException("FTP couldn't connect to the server.");
return $this;
} catch (\Exception $e) {
throw new MainException($e);
}
}
/**
* Login to the ftp account.
*
* @return void
*/
private function login()
{
try {
if (! @ftp_login($this->connection, $this->chunk->configs['ftp_driver']['username'], $this->chunk->configs['ftp_driver']['password']))
throw new FtpDriverException("FTP couldn't login to the server.");
} catch (\Exception $e) {
throw new MainException($e);
}
}
/**
* Store the file.
*
* @param string $fileName
* @return void
*/
public function store($fileName)
{
try {
if (! ftp_append($this->connection, $this->chunk->getTempFilePath(), $fileName)) {
$this->close();
throw new FtpDriverException("FTP Couldn't append to the file.");
}
} catch (\Exception $e) {
throw new MainException($e);
}
}
/**
* Delete a temp chunk.
*
* @return void
*/
public function delete()
{
try {
$path = $this->chunk->getTempFilePath($this->chunk->header->chunkNumber);
if (ftp_size($this->connection, $path) > -1) {
ftp_delete($this->connection, $path);
}
if ($this->chunk->header->chunkNumber > 1) {
$path = $this->chunk->getTempFilePath($this->chunk->header->chunkNumber - 1);
if (ftp_size($this->connection, $path) > -1) {
ftp_delete($this->connection, $path);
}
}
} catch (\Exception $e) {
throw new MainException($e);
}
}
/**
* Move the file into the path.
*
* @return void
*/
public function move()
{
try {
ftp_rename($this->connection, $this->chunk->getTempFilePath(), $this->chunk->getFilePath());
} catch (\Exception $e) {
throw new MainException($e);
}
}
/**
* Increase the chunk number of the file.
*
* @return void
*/
public function increase()
{
try {
if ($this->chunk->header->chunkNumber > 1) {
ftp_rename(
$this->connection, $this->chunk->getTempFilePath($this->chunk->header->chunkNumber - 1), $this->chunk->getTempFilePath()
);
}
} catch (\Exception $e) {
throw new MainException($e);
}
}
/**
* Determine whether the previous chunk exists.
*
* @return null|bool
*/
public function prevExists()
{
try {
if ($this->chunk->header->chunkNumber === 1)
return null;
return ftp_size(
$this->connection, $this->chunk->getTempFilePath($this->chunk->header->chunkNumber - 1)
) > -1;
} catch (\Exception $e) {
throw new MainException($e);
}
}
/**
* Determine whether the chunk exists.
*
* @return bool
*/
public function exists()
{
try {
return ftp_size($this->connection, $this->chunk->getTempFilePath()) > -1;
} catch (\Exception $e) {
throw new MainException($e);
}
}
}
<file_sep>/src/Exceptions/FileNotSingleException.php
<?php
namespace Wester\ChunkUpload\Exceptions;
class FileNotSingleException extends FileException
{
//
}
<file_sep>/src/Drivers/Contracts/DriverInterface.php
<?php
namespace Wester\ChunkUpload\Drivers\Contracts;
interface DriverInterface
{
public function open();
public function close();
public function store($fileName);
public function delete();
public function move();
public function increase();
public function prevExists();
public function exists();
}<file_sep>/src/Exceptions/FileErrorException.php
<?php
namespace Wester\ChunkUpload\Exceptions;
class FileErrorException extends FileException
{
//
}
<file_sep>/README.md
# Wester Chunk Upload Library For PHP
Wester chunk upload is a php library to handle chunked uploads which supports local and ftp file upload out of the box.
You'll feel safe with the built-in file validator.
## Table of contents
* [Installation](#installation)
* [Basic Usage](#basic-usage)
* [Drivers](#drivers)
* [Implement The Driver](#implement-the-driver)
* [Methods](#methods)
* [Properties](#properties)
* [Validation Rules](#validation-rules)
* [Language](#language)
* [Flags](#flags)
* [HTTP Response Status Codes](#http-response-status-codes)
* [Client Side](#client-side)
* [Headers](#headers)
* [Examples](#examples)
* [Javascript](#javascript)
* [Contribution](#contribution)
* [Support Us](#support-us)
## Installation
```bash
composer require wester/chunk-upload
```
## Basic Usage
Here's an example of the package.
```php
// You don't need this line in laravel or some other frameworks.
require("./vendor/autoload.php");
use Wester\ChunkUpload\Chunk;
use Wester\ChunkUpload\Validation\Exceptions\ValidationException;
try {
$chunk = new Chunk([
'name' => 'video', // same as $_FILES['video']
'chunk_size' => 4000, // must be equal to the value specified on the client side
// Driver
'driver' => 'local', // [local, ftp]
// Local driver details
'local_driver' => [
'path' => __DIR__ . '/uploads/', // where to upload the final file
'tmp_path' => __DIR__ . '/uploads/temp/', // where to store the temp chunks
],
// FTP driver details
'ftp_driver' => [
'server' => '',
'username' => '',
'password' => '',
'path' => '/uploads/', // where to upload the final file
'tmp_path' => '/uploads/temp/', // where to store the temp chunks
],
// File details
'file_name' => Chunk::RANDOM_FILE_NAME,
'file_extension' => Chunk::ORIGINAL_FILE_EXTENSION,
// File validation
'validation' => ['extension:mp4,avi'],
]);
$chunk->validate()->store();
if ($chunk->isLast()) {
// done
$chunk->getFilePath();
} else {
$chunk->response()->json([
'progress' => $chunk->getProgress()
]);
}
} catch (ValidationException $e) {
$e->response(422)->json([
'message' => $e->getMessage(),
'data' => $e->getErrors(),
]);
} catch (\Exception $e) {
$e->response(400)->abort();
}
```
## Drivers
This package supports `ftp` file upload out of the box.
`local` and `ftp` or custom drivers can be used.
```php
'driver' => 'ftp',
```
* ### Implement The Driver
Your custom driver should implement the `\Wester\ChunkUpload\Drivers\Contracts\DriverInterface`.
```php
'driver' => \My\Custom\Drivers\DriverName::class,
'custom_driver' => [
'path' => '/uploads/',
'tmp_path' => '/uploads/temp/',
],
```
```php
<?php
namespace My\Custom\Drivers;
class DriverName implements \Wester\ChunkUpload\Drivers\Contracts\DriverInterface
{
public function open() {};
public function close() {};
public function store($fileName) {};
public function delete() {};
public function move() {};
public function increase() {};
public function prevExists() {};
public function exists() {};
}
```
## Methods
* `store()` stores the chunk and merges it.
* `validate()` validates the chunk.
* `getFilePath()` gets the final file path.
* `getProgress()` gets the progress percentage (float).
* `isLast()` checks if its the last chunk.
* `getFileExtension()` gets the file extension.
* `getFileName()` gets the file name without extension.
* `getFullFileName()` gets the full file name with extension.
* `getTempFilePath()` gets the temp file path.
* `getSize()` gets the current chunk size.
* `getTotalNumber()` gets the total number of chunks.
* `setLanguage([...])` sets the language to the provided array
* `response($status = null)` returns an instance of `\Wester\ChunkUpload\Response`
```php
$chunk->response(200)->json([...]);
$chunk->response()->json([...]);
// If an exception is caught...
$e->response(400)->...
$e->response(400)->abort();
$e->response()->abort(400);
...
```
## Properties
* `configs` returns an array of the parsed configs.
```php
$chunk->configs['name'];
...
```
* `header` returns an instance of `\Wester\ChunkUpload\Header`
```php
$chunk->header->chunkNumber;
$chunk->header->chunkTotalNumber;
$chunk->header->chunkSize; // equal to: x-chunk-size
$chunk->header->fileName;
$chunk->header->fileSize;
$chunk->header->fileIdentity;
```
## Validation Rules
* `extension`
```php
'validation' => ['extension:mp4,avi']
```
* `size`
```php
'validation' => ['size:237492']
```
* `min`
```php
'validation' => ['min:10000']
```
* `max`
```php
'validation' => ['max:90000']
```
## Language
You can easily change the validation messages the same as Laravel.
```php
$chunk->setLanguage([
'min' => [
'numeric' => 'The :attribute must be at least :min.',
'file' => 'The :attribute must be at least :min kilobytes.',
],
'max' => [
'numeric' => 'The :attribute may not be greater than :max.',
'file' => 'The :attribute may not be greater than :max kilobytes.',
],
'size' => [
'numeric' => 'The :attribute must be :size.',
'file' => 'The :attribute must be :size kilobytes.',
],
'mimes' => 'The :attribute must be a file of type: :values.',
'attributes' => [
'x-file-name' => 'file',
'x-file-size' => 'file',
],
]);
```
## Flags
* `Chunk::RANDOM_FILE_NAME` creates a random file name.
* `Chunk::ORIGINAL_FILE_NAME` preserves the original file name.
* `Chunk::ORIGINAL_FILE_EXTENSION` preserves the original file extension.
> You can also specify a custom file name and extension.
## HTTP Response Status Codes
This package uses the HTTP response status codes to decide what to do next if the request fails or succeeds when uploading.
* ### Success
* `200` All of the chunks have been uploaded completely.
* `201` The server is waiting for the next chunk to be sent.
* ### Errors
The following status codes will interrupt the process.
* `400`
* `404`
* `415`
* `422`
* `500`
* `501`
> Feel free to add more status codes to your client side.
> If another status code is returned the chunk must be re-uploaded such as `timeout` and `network error`.
## Client Side
### Headers
There are some headers that should be sent to the server.
* `x-chunk-number` The current chunk number which is being uploaded.
* `x-chunk-total-number` The total number of chunks.
* `x-chunk-size` Maximum size of each chunk. (each chunk must be 4000 bytes and only the last chunk can be less than that)
* `x-file-name` The uploaded file name.
* `x-file-size` The uploaded file size.
* `x-file-identity` Random string for the file which must be 32 characters in length.
An example of the headers.
```json
{
"x-chunk-number" : 1,
"x-chunk-total-number" : 5,
"x-chunk-size" : 4000,
"x-file-name" : "my-file-name.mp4",
"x-file-size" : 20000,
"x-file-identity" : "rmghdygvdstcsjglltmbvkynxpeajgcg"
}
```
### Examples & Packages
You can find examples in [wester-chunk-upload-examples](https://github.com/hossein-zare/wester-chunk-upload-examples/) repository.
#### **Javascript**
The client side implementation in Javascript.
* [https://github.com/hossein-zare/wester-chunk-upload-examples/tree/master/javascript](https://github.com/hossein-zare/wester-chunk-upload-examples/tree/master/javascript)
#### **React Native**
The client side implementation in React Native.
* [https://github.com/hossein-zare/react-native-chunk-upload](https://github.com/hossein-zare/react-native-chunk-upload)
#### Contribution
* If you want to add more implementations in other languages please submit your PR to the [wester-chunk-upload-examples](https://github.com/hossein-zare/wester-chunk-upload-examples/) repository.
## Support Us
Just star the repository, that's it! 😉<file_sep>/src/Exceptions/MainException.php
<?php
namespace Wester\ChunkUpload\Exceptions;
use Wester\ChunkUpload\Response;
class MainException extends \Exception
{
/**
* Create a new instance of the response class.
*
* @param null|int $status
* @return \Wester\ChunkUpload\Response
*/
public function response($status = null)
{
return (new Response())->status($status);
}
}
<file_sep>/src/Validation/Rules/FileRule.php
<?php
namespace Wester\ChunkUpload\Validation\Rules;
class FileRule extends NumericRule
{
//
}
|
bc9bda20676c2ba3eff6f1d6d61384208924f915
|
[
"Markdown",
"PHP"
] | 26
|
PHP
|
hossein-zare/wester-chunk-upload
|
dad33bf8331e1bbdf7a6166c949dfbc38c7e726c
|
a4e212ea6421ce811e2b3d00e3c9789fdf2afc69
|
refs/heads/master
|
<repo_name>zhaohaolin/dbay-apns-for-java<file_sep>/src/main/java/com/dbay/apns4j/model/Feedback.java
/*
* Copyright 2013 DiscoveryBay Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.dbay.apns4j.model;
import java.util.Date;
/**
* https://developer.apple.com/library/ios/documentation/NetworkingInternet/
* Conceptual/RemoteNotificationsPG/Chapters/CommunicatingWIthAPS.html
*
* @author RamosLi
*
*/
public class Feedback {
/**
* A timestamp indicating when APNs determined that the application no
* longer exists on the device. This value represents the seconds since
* 12:00 midnight on January 1, 1970 UTC.
*/
private long time;
/**
* The device token
*/
private String token;
public long getTime() {
return time;
}
public void setTime(long time) {
this.time = time;
}
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
public Date getDate() {
return new Date(getTime() * 1000);
}
}
<file_sep>/src/main/java/com/dbay/apns4j/tools/ApnsTools.java
/*
* Copyright 2013 DiscoveryBay Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.dbay.apns4j.tools;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.security.KeyManagementException;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.UnrecoverableKeyException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.Date;
import java.util.Enumeration;
import java.util.List;
import javax.net.SocketFactory;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManagerFactory;
import javax.security.cert.CertificateExpiredException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.DateFormatUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.dbay.apns4j.model.Command;
import com.dbay.apns4j.model.FrameItem;
/**
* @author RamosLi
*
*/
public abstract class ApnsTools {
private final static Logger LOG = LoggerFactory.getLogger(ApnsTools.class);
public final static byte[] generateData(List<FrameItem> list) {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
DataOutputStream os = new DataOutputStream(bos);
int frameLength = 0;
for (FrameItem item : list) {
// itemId length = 1, itemDataLength = 2
frameLength += 1 + 2 + item.getItemLength();
}
try {
os.writeByte(Command.SEND_V2);
os.writeInt(frameLength);
for (FrameItem item : list) {
os.writeByte(item.getItemId());
os.writeShort(item.getItemLength());
os.write(item.getItemData());
}
return bos.toByteArray();
} catch (IOException e) {
e.printStackTrace();
}
throw new RuntimeException();
}
@Deprecated
public final static byte[] generateData(int id, int expire, byte[] token,
byte[] payload) {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
DataOutputStream os = new DataOutputStream(bos);
try {
os.writeByte(Command.SEND);
os.writeInt(id);
os.writeInt(expire);
os.writeShort(token.length);
os.write(token);
os.writeShort(payload.length);
os.write(payload);
os.flush();
return bos.toByteArray();
} catch (IOException e) {
e.printStackTrace();
}
throw new RuntimeException();
}
private final static String[] hexArr = new String[] { "0", "1", "2",
"3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F" };
public final static String encodeHex(byte[] bytes) {
StringBuilder sb = new StringBuilder();
for (byte b : bytes) {
sb.append(hexArr[(b >> 4) & 0x0F]);
sb.append(hexArr[b & 0x0F]);
}
return sb.toString();
}
public final static byte[] decodeHex(String hex) {
byte[] bytes = new byte[hex.length() / 2];
for (int i = 0; i < bytes.length; i++) {
bytes[i] = (byte) ((hexCharIndex(hex.charAt(2 * i)) << 4) | hexCharIndex(hex
.charAt(2 * i + 1)));
}
return bytes;
}
/**
* @param hex
* @return 0---15
*/
private final static int hexCharIndex(char hex) {
int index = 0;
if (hex >= '0' && hex <= '9') {
index = hex - '0';
} else if (hex >= 'a' && hex <= 'f') {
index = hex - 'a' + 10;
} else if (hex >= 'A' && hex <= 'F') {
index = hex - 'A' + 10;
} else {
throw new IllegalArgumentException("Invalid hex char. " + hex);
}
return index;
}
public final static int parse4ByteInt(byte b1, byte b2, byte b3, byte b4) {
return ((b1 << 24) & 0xFF000000) | ((b2 << 16) & 0x00FF0000)
| ((b3 << 8) & 0x0000FF00) | (b4 & 0x000000FF);
}
public final static SocketFactory createSocketFactory(InputStream keyStore,
String password, String keystoreType, String algorithm,
String protocol) throws KeyStoreException,
NoSuchAlgorithmException, CertificateException, IOException,
UnrecoverableKeyException, KeyManagementException,
CertificateExpiredException {
char[] pwdChars = password.toCharArray();
KeyStore ks = KeyStore.getInstance(keystoreType);
ks.load(keyStore, pwdChars);
// 检查证书是否过期
Enumeration<String> enums = ks.aliases();
String alias = "";
if (enums.hasMoreElements()) {
alias = enums.nextElement();
}
if (StringUtils.isNotEmpty(alias)) {
X509Certificate certificate = (X509Certificate) ks
.getCertificate(alias);
if (null != certificate) {
String type = certificate.getType();
int ver = certificate.getVersion();
String name = certificate.getSubjectDN().getName();
String serialNumber = certificate.getSerialNumber()
.toString(16);
String issuerDN = certificate.getIssuerDN().getName();
String sigAlgName = certificate.getSigAlgName();
String publicAlgorithm = certificate.getPublicKey()
.getAlgorithm();
Date before = certificate.getNotBefore();
Date after = certificate.getNotAfter();
String beforeStr = DateFormatUtils.format(before,
"yyyy-MM-dd HH:mm:ss");
String afterStr = DateFormatUtils.format(after,
"yyyy-MM-dd HH:mm:ss");
// 判断证书是否
long expire = DateUtil
.getNumberOfDaysBetween(new Date(), after);
if (expire <= 0) {
if (LOG.isErrorEnabled()) {
LOG.error(
"证书标题:[{}], 类型:[{}], 版本号:[{}], 序列号:[{}], 发行者:[{}], 签名算法:[{}], 公钥算法:[{}], 有效期从:[{}]到[{}], 已经过期:[{}]天",
name, type, ver, serialNumber, issuerDN,
sigAlgName, publicAlgorithm, beforeStr,
afterStr, Math.abs(expire));
}
throw new CertificateExpiredException("证书已经过期:["
+ Math.abs(expire) + "]天");
}
if (LOG.isInfoEnabled()) {
LOG.info(
"证书标题:[{}], 类型:[{}], 版本号:[{}], 序列号:[{}], 发行者:[{}], 签名算法:[{}], 公钥算法:[{}], 有效期从:[{}]到[{}], 证书将在[{}]天后过期",
name, type, ver, serialNumber, issuerDN,
sigAlgName, publicAlgorithm, beforeStr, afterStr,
expire);
}
}
}
KeyManagerFactory kf = KeyManagerFactory.getInstance(algorithm);
kf.init(ks, pwdChars);
TrustManagerFactory tmf = TrustManagerFactory.getInstance(algorithm);
tmf.init((KeyStore) null);
SSLContext context = SSLContext.getInstance(protocol);
context.init(kf.getKeyManagers(), tmf.getTrustManagers(), null);
return context.getSocketFactory();
}
// All data is specified in network order, that is big endian.
public final static byte[] intToBytes(int num, int resultBytesCount) {
byte[] ret = new byte[resultBytesCount];
for (int i = 0; i < resultBytesCount; i++) {
ret[i] = (byte) ((num >> ((resultBytesCount - 1 - i) * 8)) & 0xFF);
}
return ret;
}
}
<file_sep>/src/main/java/com/dbay/apns4j/IApnsService.java
/*
* Copyright 2013 DiscoveryBay Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.dbay.apns4j;
import java.util.List;
import com.dbay.apns4j.model.Feedback;
import com.dbay.apns4j.model.Payload;
import com.dbay.apns4j.model.PushNotification;
/**
* @author RamosLi Email: <EMAIL> 中文名:李志才 微博:
* http://weibo.com/u/1809640367
*
*/
public interface IApnsService {
/**
* @param token deviceToken
* @param payload
*/
public void sendNotification(String token, Payload payload);
/**
* If you want to specify the ID of a notification, use this method
*
* @param notification
*/
public void sendNotification(PushNotification notification);
public void shutdown();
/**
* EN: You should call this interface once an hour, once a day or other time
* as you wish CN: 返回用户在设备上卸载了APP的device token。这个接口最好定期调用,比如一天一次,或者一小时一次等等
*
* @return the device tokens which belong to the app that doesn't exist on
* the device.
*/
public List<Feedback> getFeedbacks();
}
<file_sep>/src/main/java/com/dbay/apns4j/tools/DateUtil.java
package com.dbay.apns4j.tools;
import java.sql.Timestamp;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import org.apache.commons.lang.StringUtils;
/**
*
* 提供日期的加减转换等功能 包含多数常用的日期格式
*
* @author joe.zhao(<EMAIL>)
* @version $Id: DateUtil, v 0.1 2015年3月21日 上午11:14:56 Exp $
*/
public abstract class DateUtil {
/**
* milliseconds in a second.
*/
public static final long SECOND = 1000;
/**
* milliseconds in a minute.
*/
public static final long MINUTE = SECOND * 60;
/**
* milliseconds in a hour.
*/
public static final long HOUR = MINUTE * 60;
/**
* milliseconds in a day.
*/
public static final long DAY = 24 * HOUR;
/** time_begin */
public static final String TIME_BEGIN = " 00:00:00";
/** time_end */
public static final String TIME_END = " 23:59:59";
/** date format yyyyMMdd */
public static final String MONTH_PATTERN = "yyyy-MM";
/** date format yyyyMMdd */
public static final String DEFAULT_PATTERN = "yyyyMMdd";
/** date format yyyyMMddHHmmss */
public static final String FULL_PATTERN = "yyyyMMddHHmmss";
/** date format yyyyMMdd HH:mm:ss */
public static final String FULL_STANDARD_PATTERN = "yyyyMMdd HH:mm:ss";
/** date format yyyy-MM-dd */
public static final String TRADITION_PATTERN = "yyyy-MM-dd";
/** date format yyyy-MM-dd HH:mm:ss */
public static final String FULL_TRADITION_PATTERN = "yyyy-MM-dd HH:mm:ss";
/**
* 返回中文格式的当前日期
*
* @return [yyyy-mm-dd]
*/
public final static String getShortNow() {
return formatDate(TRADITION_PATTERN);
}
/**
* 返回当前时间24小时制式
*
* @return [H:mm]
*/
public final static String getTimeBykm() {
return formatDate("H:mm");
}
/**
* 返回当前月份
*
* @return [MM]
*/
public final static String getMonth() {
return formatDate("MM");
}
/**
* 返回当前日
*
* @return [dd]
*/
public final static String getDay() {
return formatDate("dd");
}
/**
* Format date as "yyyyMMdd".
*
* @param date 日期 @see Date
* @return 格式化后的日期字符串
*/
public final static String formatDate(final Date date) {
return formatDate(date, DEFAULT_PATTERN);
}
/**
* Format date as given date format.
*
* @param date 日期 @see Date
* @param format 日期格式
* @return 格式化后的日期字符串,如果<code>date</code>为<code>null</code>或者
* <code>format</code>为空,则返回<code>null</code>。
*/
public final static String formatDate(final Date date, String format) {
if (null == date || StringUtils.isBlank(format))
return null;
try {
return new SimpleDateFormat(format).format(date);
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
/**
* Format the current date.
*
* @param format 日期格式
* @return 格式化后的日期字符串
*/
public final static String formatDate(String format) {
return formatDate(new Date(), format);
}
/**
* change the string to date
*
* @param date the date string
* @return Date if failed return <code>null</code>
*/
public final static Date parseDate(String date) {
return parseDate(date, DEFAULT_PATTERN, null);
}
/**
* change the string to date
*
* @param date String
* @param df DateFormat
* @return Date
*/
public final static Date parseDate(String date, String df) {
return parseDate(date, df, null);
}
/**
* change the string to date
*
* @param date String
* @param df DateFormat
* @param defaultValue if parse failed return the default value
* @return Date
*/
public final static Date parseDate(String date, String df, Date defaultValue) {
if (date == null || StringUtils.isBlank(df)) {
return defaultValue;
}
SimpleDateFormat formatter = new SimpleDateFormat(df);
try {
return formatter.parse(date);
} catch (ParseException e) {
e.printStackTrace();
}
return defaultValue;
}
/**
* @return the current date without time component
*/
public final static Date currentDate() {
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(System.currentTimeMillis());
calendar.set(Calendar.HOUR, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
return new Date(calendar.getTimeInMillis());
}
/**
* Get start of date.
*
* @param date
* @see Date
* @return the start of date
*/
public final static Date getStartOfDate(final Date date) {
if (date == null)
return null;
Calendar cal = GregorianCalendar.getInstance();
cal.setTime(date);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return new Date(cal.getTime().getTime());
}
/**
* 获取上周的星期一
*
* @return the day previous monday
*/
public final static Date getPreviousMonday() {
Calendar cd = Calendar.getInstance();
// 获得今天是一周的第几天,星期日是第一天,星期二是第二天......
int dayOfWeek = cd.get(Calendar.DAY_OF_WEEK) - 1; // 因为按中国礼拜一作为第一天所以这里减1
Date date;
if (dayOfWeek == 1) {
date = addDays(cd.getTime(), -7);
} else {
date = addDays(cd.getTime(), -6 - dayOfWeek);
}
return getStartOfDate(date);
}
/**
* 获取一个月之前的星期一
*
* @return the monday before one month
*/
public final static Date getMondayBefore4Week() {
Calendar cd = Calendar.getInstance();
// 获得今天是一周的第几天,星期日是第一天,星期二是第二天......
int dayOfWeek = cd.get(Calendar.DAY_OF_WEEK) - 1; // 因为按中国礼拜一作为第一天所以这里减1
Date date;
if (dayOfWeek == 1) {
date = addDays(cd.getTime(), -28);
} else {
date = addDays(cd.getTime(), -27 - dayOfWeek);
}
return getStartOfDate(date);
}
/**
* 获取本周的星期一
*
* @return the day of current monday
*/
public final static Date getCurrentMonday() {
Calendar cd = Calendar.getInstance();
// 获得今天是一周的第几天,星期日是第一天,星期二是第二天......
int dayOfWeek = cd.get(Calendar.DAY_OF_WEEK) - 1; // 因为按中国礼拜一作为第一天所以这里减1
Date date;
if (dayOfWeek == 1) {
date = cd.getTime();
} else {
date = addDays(cd.getTime(), 1 - dayOfWeek);
}
return getStartOfDate(date);
}
/**
* Get date one day before specified one.
*
* @param date1 test date
* @param date2 date when
* @return true if date1 is before date2
*/
public final static boolean beforeDay(final Date date1, final Date date2) {
if (date1 == null)
return true;
return getStartOfDate(date1).before(getStartOfDate(date2));
}
/**
* Get date one day after specified one.
*
* @param date1 Date 1
* @param date2 Date 2
* @return true if after day
*/
public final static boolean afterDay(final Date date1, final Date date2) {
if (date1 == null)
return false;
return getStartOfDate(date1).after(getStartOfDate(date2));
}
/**
* Add specified number of months to the date given.
*
* @param date Date
* @param months Int number of months to add
* @return Date
*/
public final static Date addMonths(Date date, int months) {
if (months == 0)
return date;
if (date == null)
return null;
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.add(Calendar.MONTH, months);
return cal.getTime();
}
/**
* Add specified number of days to the given date.
*
* @param date date
* @param days Int number of days to add
* @return revised date
*/
public final static Date addDays(final Date date, int days) {
if (days == 0)
return date;
if (date == null)
return null;
Calendar cal = GregorianCalendar.getInstance();
cal.setTime(date);
cal.add(Calendar.DAY_OF_MONTH, days);
return new Date(cal.getTime().getTime());
}
public final static Date addMins(final Date date, int mins) {
if (mins == 0)
return date;
if (date == null)
return null;
Calendar cal = GregorianCalendar.getInstance();
cal.setTime(date);
cal.add(Calendar.MINUTE, mins);
return new Date(cal.getTime().getTime());
}
/**
* Compare the two dates whether are in the same month.
*
* @param date1 the first date
* @param date2 the second date
* @return whether are in the same month
*/
public final static boolean isSameMonth(Date date1, Date date2) {
if (date1 == null && date2 == null)
return true;
if (date1 == null || date2 == null)
return false;
Calendar cal1 = GregorianCalendar.getInstance();
cal1.setTime(date1);
Calendar cal2 = GregorianCalendar.getInstance();
cal2.setTime(date2);
return isSameMonth(cal1, cal2);
}
public final static boolean isSameDay(Date date1, Date date2) {
if (date1 == null && date2 == null)
return true;
if (date1 == null || date2 == null)
return false;
Calendar cal1 = GregorianCalendar.getInstance();
cal1.setTime(date1);
Calendar cal2 = GregorianCalendar.getInstance();
cal2.setTime(date2);
return (cal1.get(Calendar.YEAR) == cal2.get(Calendar.YEAR))
&& (cal1.get(Calendar.MONTH) == cal2.get(Calendar.MONTH) && (cal1
.get(Calendar.DATE) == cal2.get(Calendar.DATE)));
}
/**
* Compare the two calendars whether they are in the same month.
*
* @param cal1 the first calendar
* @param cal2 the second calendar
* @return whether are in the same month
*/
public final static boolean isSameMonth(Calendar cal1, Calendar cal2) {
if (cal1 == null && cal2 == null)
return true;
if (cal1 == null || cal2 == null)
return false;
return (cal1.get(Calendar.YEAR) == cal2.get(Calendar.YEAR))
&& (cal1.get(Calendar.MONTH) == cal2.get(Calendar.MONTH));
}
/**
* Return the end of the month based on the date passed as input parameter.
*
* @param date Date
* @return Date endOfMonth
*/
public final static Date getEndOfMonth(final Date date) {
if (date == null)
return null;
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
calendar.set(Calendar.MONTH, calendar.get(Calendar.MONTH) + 1);
calendar.set(Calendar.DATE, 0);
calendar.set(Calendar.HOUR_OF_DAY, 12);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
return new Date(calendar.getTimeInMillis());
}
/**
* Get first day of month.
*
* @param date Date
* @return Date
*/
public final static Date getFirstOfMonth(final Date date) {
Date lastMonth = addMonths(date, -1);
lastMonth = getEndOfMonth(lastMonth);
return addDays(lastMonth, 1);
}
/**
* 检查日期的合法性
*
* @param sourceDate the date @see Date
* @return 如果合法返回<code>true</code>,如果<code>sourceDate</code>为
* <code>null</code>或者<code>format</code>为空,返回<code>false</code>
*/
public final static boolean inFormat(String sourceDate, String format) {
if (sourceDate == null || StringUtils.isBlank(format)) {
return false;
}
try {
SimpleDateFormat dateFormat = new SimpleDateFormat(format);
dateFormat.setLenient(false);
dateFormat.parse(sourceDate);
return true;
} catch (Exception e) {
return false;
}
}
/**
* get date time as "yyyyMMddhhmmss"
*
* @return the current date with time component
*/
public final static String now() {
return formatDate(new Date(), FULL_PATTERN);
}
/**
* 格式化中文日期短日期格式
*
* @param gstrDate 输入欲格式化的日期
* @return [yyyy年MM月dd日]
*/
public final static String formatShortDateC(Date gstrDate) {
if (gstrDate == null)
return null;
SimpleDateFormat formatter = new SimpleDateFormat("yyyy年MM月dd日");
// Date nowc = new Date();
String pid = formatter.format(gstrDate);
return pid;
}
/**
* 返回标准格式的当前时间
*
* @return [yyyy-MM-dd k:mm:ss]
*/
public final static String getNow() {
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd H:mm:ss");
Date nowc = new Date();
String pid = formatter.format(nowc);
return pid;
}
/**
* 返回短日期格式
*
* @return [yyyy-mm-dd]
*/
public final static String formatShort(String strDate) {
String ret = "";
if (strDate != null && !"1900-01-01 00:00:00.0".equals(strDate)
&& strDate.indexOf("-") > 0) {
ret = strDate;
if (ret.indexOf(" ") > -1)
ret = ret.substring(0, ret.indexOf(" "));
}
return ret;
}
/**
* 返回两个时间间隔的秒数
*
* @param d1 起始时间
* @param d2 终止时间
* @return the number of seconds interval,if either <code>d1</code> or
* <code>d2</code> is zero,return <code>-1</code>
*/
public final static int getNumberOfSecondsBetween(final double d1,
final double d2) {
if ((d1 == 0) || (d2 == 0)) {
return -1;
}
return (int) (Math.abs(d1 - d2) / SECOND);
}
/**
* 返回两个时间间隔的月数
*
* @param before 起始时间 @see Date
* @param end 终止时间 @see Date
* @return the number of months interval,if either <code>before</code> or
* <code>end</code> is <code>null</code>,return <code>-1</code>
*/
public final static int getNumberOfMonthsBetween(final Date before,
final Date end) {
if (before == null || end == null)
return -1;
Calendar cal1 = Calendar.getInstance();
cal1.setTime(before);
Calendar cal2 = Calendar.getInstance();
cal2.setTime(end);
return (cal2.get(Calendar.YEAR) - cal1.get(Calendar.YEAR)) * 12
+ (cal2.get(Calendar.MONTH) - cal1.get(Calendar.MONTH));
}
/**
* 返回两个时间间隔的分钟数
*
* @param before 起始时间
* @param end 终止时间
* @return 分钟数,如果<code>before</code>或者<code>end</code>为<code>null</code>,返回
* <code>-1</code>
*/
public final static long getNumberOfMinuteBetween(final Date before,
final Date end) {
if (before == null || end == null)
return -1;
long millisec = end.getTime() - before.getTime();
return millisec / (60 * 1000);
}
/**
* 返回两个时间间隔的小时数
*
* @param before 起始时间
* @param end 终止时间
* @return 小时数,如果<code>before</code>或者<code>end</code>为<code>null</code>,返回
* <code>-1</code>
*/
public final static long getNumberOfHoursBetween(final Date before,
final Date end) {
if (before == null || end == null)
return -1;
long millisec = end.getTime() - before.getTime() + 1;
return millisec / (60 * 60 * 1000);
}
/**
* 返回MM月dd日
*
* @param srcDate the src date @see Date
* @return [MM月dd日]
*/
public final static String formatMonthAndDay(Date srcDate) {
return formatDate("MM月dd日");
}
public final static long getNumberOfDaysBetween(final Date before,
final Date end) {
if (before == null || end == null)
return -1;
Calendar cal = Calendar.getInstance();
cal.setTime(before);
Calendar endCal = Calendar.getInstance();
endCal.setTime(end);
return getNumberOfDaysBetween(cal, endCal);
}
/**
* 计算2个日前直接相差的天数
*
* @param cal1 the before calendar @see Calendar
* @param cal2 the end calendar @see Calendar
* @return 天数,如果<code>cal1</code>或者<code>cal2</code>为<code>null</code>,返回
* <code>-1</code>
*/
public final static long getNumberOfDaysBetween(Calendar cal1, Calendar cal2) {
if (cal1 == null || cal2 == null)
return -1;
cal1.clear(Calendar.MILLISECOND);
cal1.clear(Calendar.SECOND);
cal1.clear(Calendar.MINUTE);
cal1.clear(Calendar.HOUR_OF_DAY);
cal2.clear(Calendar.MILLISECOND);
cal2.clear(Calendar.SECOND);
cal2.clear(Calendar.MINUTE);
cal2.clear(Calendar.HOUR_OF_DAY);
long elapsed = cal2.getTime().getTime() - cal1.getTime().getTime();
return elapsed / DAY;
}
/**
* return current calendar instance
*
* @return Calendar
*/
public final static Calendar getCurrentCalendar() {
return Calendar.getInstance();
}
/**
* return current time
*
* @return current time
*/
public final static Timestamp getCurrentDateTime() {
return new Timestamp(System.currentTimeMillis());
}
/**
* 获取系统日期
*
* @return 系统日期
*/
public final static Date getCurrentDate() {
return new Date(System.currentTimeMillis());
}
/**
* 获取年份
*
* @param date
* @see Date
* @return the year of <code>date</code>,if <code>date</code> is null,return
* -1
*/
public static final int getYear(Date date) {
if (date == null)
return -1;
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
return calendar.get(Calendar.YEAR);
}
/**
* 获取年份
*
* @param millis long
* @return the year of date
*/
public static final int getYear(long millis) {
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(millis);
return calendar.get(Calendar.YEAR);
}
/**
* 获取月份
*
* @param date
* @see Date
* @return the month of <code>date</code>,if <code>date</code> is
* null,return -1
*/
public static final int getMonth(Date date) {
if (date == null)
return -1;
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
return calendar.get(Calendar.MONTH) + 1;
}
/**
* 获取月份
*
* @param millis long
* @return the month of date
*/
public static final int getMonth(long millis) {
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(millis);
return calendar.get(Calendar.MONTH) + 1;
}
/**
* 获取日期
*
* @param date
* @see Date
* @return the day of <code>date</code>,if <code>date</code> is null,return
* -1
*/
public static final int getDate(Date date) {
if (date == null)
return -1;
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
return calendar.get(Calendar.DATE);
}
/**
* 获取日期
*
* @param millis long
* @return the day of date
*/
public static final int getDate(long millis) {
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(millis);
return calendar.get(Calendar.DATE);
}
/**
* 获取小时
*
* @param date
* @see Date
* @return the hour of <code>date</code>,if <code>date</code> is null,return
* -1
*/
public static final int getHour(Date date) {
if (date == null)
return -1;
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
return calendar.get(Calendar.HOUR_OF_DAY);
}
/**
* 获取小时
*
* @param millis long
* @return the hour of date
*/
public static final int getHour(long millis) {
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(millis);
return calendar.get(Calendar.HOUR_OF_DAY);
}
}
<file_sep>/src/main/java/com/dbay/apns4j/ErrorProcessHandler.java
/*
* CopyRight (c) 2012-2015 Hikvision Co, Ltd. All rights reserved.
* Filename: ErrorProcessHandler.java
* Creator: joe.zhao(<EMAIL>)
* Create-Date: 上午11:29:48
*/
package com.dbay.apns4j;
/**
* TODO
*
* @author joe.zhao(<EMAIL>)
* @version $Id: ErrorProcessHandler, v 0.1 2016年7月8日 上午11:29:48 Exp $
*/
public interface ErrorProcessHandler {
void process(int id, int status, String token);
}
|
4fcd3a8d06ab918097d82ecebf965f0f08b66eb5
|
[
"Java"
] | 5
|
Java
|
zhaohaolin/dbay-apns-for-java
|
5212bd158b9cacb87129a710910a81c285da9380
|
6d01a141178296937d5fdf5a1e284eb8534069af
|
refs/heads/main
|
<file_sep># frozen_string_literal: true
module Api
module V2
class UserSerializer < ActiveModel::Serializer
attributes :id,
:email,
:auth_token,
:created_at,
:updated_at
end
end
end
<file_sep># frozen_string_literal: true
module Api
module V2
class TaskSerializer < ActiveModel::Serializer
attributes :id,
:title,
:description,
:done,
:deadline,
:deadline_formatted,
:user_id,
:created_at,
:updated_at
def deadline_formatted
I18n.l(object.deadline, format: :custom_format) if object.deadline.present?
end
end
end
end
<file_sep>FROM ruby:2.4.4-alpine
ENV HOME /app
WORKDIR $HOME
RUN apk add --no-cache --update \
build-base \
linux-headers \
mariadb-dev \
mysql-client \
nodejs \
tzdata
COPY Gemfile Gemfile.lock ./
RUN bundle install --binstubs
COPY . .
CMD [ "rails", "server", "-b", "0.0.0.0" ]
<file_sep># frozen_string_literal: true
module Api
module V2
class BaseController < ApplicationController
include DeviseTokenAuth::Concerns::SetUserByToken
end
end
end
<file_sep>version: '3'
networks:
forest:
volumes:
mysql-db-data:
services:
rails-api:
depends_on:
- mysql-db
build: .
image: rails-api:latest
tty: true
stdin_open: true
volumes:
- .:/app
env_file:
- .env
ports:
- 80:3000
networks:
- forest
mysql-db:
image: mysql:8
command: --default-authentication-plugin=mysql_native_password
environment:
- MYSQL_DATABASE=${DATABASE_DEV_NAME}
- MYSQL_ROOT_PASSWORD=${DATABASE_PWD}
volumes:
- mysql-db-data:/var/lib/mysql
networks:
- forest
|
db192faea51de1e137a5a468f50d2dde522627a7
|
[
"Ruby",
"Dockerfile",
"YAML"
] | 5
|
Ruby
|
afonsir/task-manager-api
|
6c5fc449759b638168d07a1827ddf527ee3e192e
|
f195ab83b75c1be339be6940674b7bdae562d0f8
|
refs/heads/master
|
<repo_name>shiyuugohirao/komorebiGhost2<file_sep>/src/ofApp.h
//
// komorebiGhost
//
// Created by shugohirao on 2018/03/05.
//
#pragma once
#include "ofMain.h"
#include "ofxCv.h"
#include "ofxIterativeBoxBlur.h"
#include "ofxOsc.h"
#include "ofxGui.h"
#include "leaf.h"
#include "findGhost.h"
#include "ghostFace.h"
class ofApp : public ofBaseApp{
public:
void setup();
void update();
void draw();
void keyPressed(int key);
void mousePressed(int x, int y, int button);
ofPoint ofGetCenter(){
return ofPoint((float)ofGetWidth()/2,(float)ofGetHeight()/2);
}
void initFbo(ofFbo &fbo, float w = ofGetWidth(), float h = ofGetHeight(), ofPoint anc = ofPoint(0.5,0.5)){
fbo.allocate(w,h); // w1080 * h1920
fbo.begin();
ofClear(255);
fbo.end();
fbo.setAnchorPercent(anc.x,anc.y);
}
private:
//--- Method ---//
void setupCam(int camW,int camH);
void setupLeaf();
void setupShader();
void setupGui();
void addLeaf(int num);
void deleteLeaf(int num);
void sendOsc(int ghostId,ofRectangle eyeR,ofRectangle eyeL,ofRectangle mouth);
//--- Property ---//
ofFbo komorebiFbo;
ofFbo ghostFbo;
ofFbo brightenFbo;
ofFbo contourFbo;
vector<leaf> leaves;
vector<ofFbo> originalLeaf;
ofxCv::ContourFinder contourFinder;
int globalSec;
ofShader shader;
ofFbo label[2][3];
findGhost findGhostThread;
ofVec2f camScale;
vector<vector<ofVec2f>> allPoints;
bool bGui = true;
float timef;
float sinVal[4];
//--- addons ---//
ofVideoGrabber cam;
ofFbo camFbo;
ofxIterativeBoxBlur blur;
//--- OSC ---//
ofxOscSender oscSender;
string HOST;
int PORT;
//- gui
ofxPanel komorebiGui;
ofxToggle bKomorebi;
ofxColorSlider addColor;
ofxFloatSlider blurRadius;
ofxFloatSlider leavesScale;
ofxFloatSlider leavesThreshold;
ofxColorSlider startColor, endColor;
ofxFloatSlider waveVal1,waveVal2,waveVal3;
ofxPanel ghostGui;
ofxToggle bCam;
ofxToggle bOsc;
ofxToggle bPC;
ofxToggle UPSIDEDOWN;
ofxToggle bShowContour;
ofxFloatSlider minContour,maxContour;
ofxToggle bShowGhost;
ofxToggle bLabel;
ofxFloatSlider eMinDist, eMaxDist, eSizeDiff, mMinDist, mMaxDist, mDistDiff;
ofxFloatSlider camThreshold;
ofxToggle bExhibit;
ofxButton btnClear;
ofxFloatSlider brightness;
};
<file_sep>/src/leaf.h
//
// leaf.h
// komorebiGhost
//
// Created by shugohirao on 2018/03/05.
//
#pragma once
#include "ofMain.h"
#include "ghostFace.h"
class leaf {
private:
ofFbo fbo;
int group;
ofImage img;
ofRectangle leafArea;
ofPoint pos;
ofColor color;
float radian;
float rotate;
float w,h;
float drawScale,scale;
float alpha;
float minDistance;
float threshold = 50;
const ofPoint originalPos;
const float originalScale;
const float originalRotate;
const float MAX_RAD = 90*DEG_TO_RAD;
const float MIN_RAD = -90*DEG_TO_RAD;
public:
leaf(ofFbo _fbo)
:fbo(_fbo)
,group((int)ofRandom(4))
,pos(ofPoint(ofRandomWidth(),ofRandomHeight()))
,scale(ofRandom(0.3,0.8)) //0.3-0.5
,drawScale(scale)
,color(ofColor::darkGray)
,rotate(ofRandom(0,360))
,alpha(200)
,threshold(50)
,minDistance(threshold)
// ,leafArea(ofRectangle(pos, fbo.getWidth(), fbo.getHeight()))
/* set const original value */
,originalScale(scale)
,originalPos(pos)
,originalRotate(rotate)
{
// randomizeLeaf(_fbo);
fbo.setAnchorPercent(0.5, 0.5);
};
~leaf(){};
void randomizeLeaf(ofFbo _fbo){
w=_fbo.getWidth()*scale;
h=_fbo.getHeight()*scale;
}
void update(vector<ghostFace> & ghostFaces, ofVec2f camScale, bool upsidedown){
minDistance = threshold;
leafArea.setFromCenter(pos, fbo.getWidth(), fbo.getHeight());
leafArea.scale(0.5);
alpha += alpha>255 ? 0:10;
if(ghostFaces.size()){
for(auto &gF:ghostFaces){
// if(gF.getRectEyeR().inside(pos) || gF.getRectEyeL().inside(pos) || gF.getRectMouth().inside(pos)){
if(gF.getRectEyeR().intersects(leafArea) || gF.getRectEyeL().intersects(leafArea) || gF.getRectMouth().intersects(leafArea)){
minDistance = 0;
alpha -= alpha<=0 ? 0:40;
// minDistance = min(pos.distance(bR.rect.getCenter()*camScale),minDistance);
}
}
}
}
void calcPos(float v, float xPower,float yPower){
float noise = ofSignedNoise(v);
ofVec2f vec = ofVec2f(noise * xPower,noise * yPower);
pos = originalPos + vec;
}
void draw(){
ofPushStyle();
ofPushMatrix();
{
ofTranslate(pos);
ofRotate(rotate);
ofScale(drawScale,drawScale);
ofSetColor(color,alpha);
fbo.draw(0,0);
}
ofPopMatrix();
ofPopStyle();
}
void setColor(ofColor col){ color = col; }
void setThreshold(float t){ threshold = t; }
void setGroup(int g){ group = g; } //ofAppからRectとかのinsideでsetする
void rescale(float s){
scale = originalScale * s;
drawScale = scale;
}
void shake(ofVec2f v){
}
int getGroup(){ return group; }
};
<file_sep>/src/findGhost.h
//
// findGhost.h
// komorebiGhost2
//
// Created by shugohirao on 2018/03/09.
//
#pragma once
#include "ofMain.h"
#include "ofApp.h"
#include "ofThread.h"
#include "ofThreadChannel.h"
#include "ofxCv.h"
#include "ghostFace.h"
class findGhost : public ofThread{
public:
findGhost():newBlobs(true),newGhosts(true)
{
startThread();
};
~findGhost(){
stopThread();
cfThread.close();
blobRectsThread.close();
analyzedBlobs.close();
analyzedGhosts.close();
waitForThread(true);
};
void setContourFinder(ofxCv::ContourFinder & cf){
cfThread.send(cf);
}
void setBlobRects(vector<blobRect> & r){
blobRectsThread.send(r);
}
void updateBlob(){
newBlobs = false;
while(analyzedBlobs.tryReceive(blobRects)){
newBlobs = true;
}
}
void updateGhost(){
newGhosts = false;
while(analyzedGhosts.tryReceive(ghostFaces)){
newGhosts = true;
}
}
const void drawRectFaces(){
if(ghostFaces.size()){
for(auto & g:ghostFaces){
g.drawRectFace();
}
}
}
const void drawPolylineFaces(){
if(ghostFaces.size()){
ofSetLineWidth(3);
for(auto & g:ghostFaces){
g.drawPolylineFace();
}
}
}
const void drawLabels(bool bPC, ofFbo fbo[2][3]){
if(ghostFaces.size()){
for(auto & g:ghostFaces){
g.drawLabel(bPC,fbo);
}
}
}
const void drawLabels(){
if(ghostFaces.size()){
for(auto & g:ghostFaces){
g.drawLabel();
}
}
}
//--- getter ---//
vector<ghostFace> & getGhosts(){
return ghostFaces;
}
ghostFace & getGhosts(int i){
if(i<ghostFaces.size() && 0<i){
return ghostFaces[i];
}
}
vector<blobRect> & getBlobRects(){
return blobRects;
}
//--- set Parameter ---///
void setParams(float eMinDist, float eMaxDist, float eSizeDiff, float mMinDist, float mMaxDist, float mDistDiff){
eyeMinDist = eMinDist;
eyeMaxDist = eMaxDist;
eyeSizeDiff = eSizeDiff;
mouthMinDist = mMinDist;
mouthMaxDist = mMaxDist;
mouthDistDiff = mDistDiff;
}
private:
ofxCv::ContourFinder cf;
vector<blobRect> blobRects;
vector<ghostFace> ghostFaces;
vector<ofPolyline> polylines;
ofThreadChannel<ofxCv::ContourFinder> cfThread;
ofThreadChannel<vector<blobRect>> blobRectsThread;
ofThreadChannel<vector<blobRect>> analyzedBlobs;
ofThreadChannel<vector<ghostFace>> analyzedGhosts;
bool newBlobs,newGhosts;
float eyeMinDist = 50;
float eyeMaxDist = 100;
float eyeSizeDiff = 50;
float mouthMinDist = 30;
float mouthMaxDist = 100;
float mouthDistDiff = 30;
void threadedFunction(){
ofxCv::ContourFinder _cf;
blobRects.reserve(300);
blobRects.clear();
ghostFaces.reserve(30);
ghostFaces.clear();
while(cfThread.receive(_cf)){
int ghostId = 0;
//--- loacl value in thread ---//
vector<blobRect> _blobRects;
_blobRects.reserve(300);
_blobRects.clear();
vector<ofPolyline> _blobPoly;
_blobPoly.reserve(300);
_blobPoly.clear();
vector<ghostFace> _ghostFaces;
_ghostFaces.reserve(30);
_ghostFaces.clear();
for(const auto & r:_cf.getBoundingRects()){//# all cv::Rects
static int i=0;
blobRect bR(i,ofxCv::toOf(r));
_blobRects.push_back(bR);
i++;
}
_blobPoly = _cf.getPolylines();
int r = 0;
for (auto & _eyeR:_blobRects){ /* first : set blobRect as a candidate of eyeR */
if(_eyeR.part == NONE){
ofPoint posEyeR = _eyeR.rect.getCenter();
int l = 0;
for(auto & _eyeL : _blobRects){
//--- check Size to find Eyes ---//
if(_eyeR.rectId != _eyeL.rectId &&
eyeSizeDiff > abs(_eyeR.rect.getPerimeter() - _eyeL.rect.getPerimeter()) && /* similar Perimeter */
eyeMinDist < posEyeR.distance(_eyeL.rect.getCenter()) && /* eyes MinDistance */
eyeMaxDist > posEyeR.distance(_eyeL.rect.getCenter()) /* eyes MaxDistance */ ){
ofPoint posEyeL = _eyeL.rect.getCenter();
//--- check to find Mouth ---//
int m = 0;
for (auto & _mouth:_blobRects){
ofPoint posMouth = _mouth.rect.getCenter();
float EyeR_Mouth = posEyeR.distance(posMouth);
float EyeL_Mouth = posEyeL.distance(posMouth);
if(_eyeR.rectId != _mouth.rectId && _eyeL.rectId != _mouth.rectId &&
mouthMinDist < EyeR_Mouth && mouthMinDist < EyeL_Mouth && /* Min distance from eyeRL */
mouthMaxDist > EyeR_Mouth && mouthMaxDist > EyeL_Mouth && /* Max distance from eyeRL */
mouthDistDiff > abs(EyeR_Mouth - EyeL_Mouth) /* similar distance from eyeRL*/){
setGhost(ghostId, _blobPoly, r,l,m,_eyeR,_eyeL,_mouth, _ghostFaces);
}
m++;
}// for mouth end
}
l++;
}// for eyeL end
}
r++;
}// for eyeR
analyzedBlobs.send(std::move(_blobRects));
analyzedGhosts.send(std::move(_ghostFaces));
}
}
void setGhost(int & ghostId, const vector<ofPolyline> &_blobPoly, const int &r,const int &l,const int &m, blobRect &_eyeR, blobRect &_eyeL, blobRect &_mouth, vector<ghostFace> &_ghostFaces){
//### recognize as simple Face ###
// cout<<" - find Ghost! ghostID : "<<ghostId<<endl;
ofPolyline _p_eyeR = _blobPoly[r];
ofPolyline _p_eyeL = _blobPoly[l];
ofPolyline _p_mouth = _blobPoly[m];
_eyeR.part = EYE_R;
_eyeL.part = EYE_L;
_mouth.part = MOUTH;
ghostFace gF(ghostId,_eyeR,_eyeL,_mouth,_p_eyeR,_p_eyeL,_p_mouth);
ofColor col;
col.setHsb(ofRandom(255), 100, 200, 200);
gF.setColor(col);
_ghostFaces.push_back(gF);
ghostId++;
}
};
<file_sep>/src/ghostFace.h
//
// ghostFace.h
// komorebiGhost2
//
// Created by shugohirao on 2018/03/09.
//
#pragma once
#include "ofMain.h"
enum Part{
NONE,
EYE_R,
EYE_L,
MOUTH,
};
struct blobRect{
blobRect(int i,ofRectangle r):rectId(i),rect(r){};
int rectId;
ofRectangle rect;
Part part = NONE;
};
class ghostFace{
private:
int ghostId;
ofRectangle eyeR;
ofRectangle eyeL;
ofRectangle mouth;
ofPolyline p_eyeR;
ofPolyline p_eyeL;
ofPolyline p_mouth;
ofColor color;
public:
ghostFace(int Id, blobRect eR, blobRect eL, blobRect m, ofPolyline p_eR, ofPolyline p_eL, ofPolyline p_m)
:ghostId(Id)
,eyeR(eR.rect)
,eyeL(eL.rect)
,mouth(m.rect)
,p_eyeR(p_eR)
,p_eyeL(p_eL)
,p_mouth(p_m)
{};
void setId(int Id){
ghostId = Id;
}
int getId(){
return ghostId;
}
ofRectangle getRect(Part part){
switch(part){
case EYE_R:
return eyeR; break;
case EYE_L:
return eyeL; break;
case MOUTH:
return mouth; break;
case NONE:
ofLog(OF_LOG_WARNING, "no return, you choose NONE");
return;
}
}
ofRectangle getRectEyeR(){
return eyeR;
}
ofRectangle getRectEyeL(){
return eyeL;
}
ofRectangle getRectMouth(){
return mouth;
}
void setBlobRect(blobRect eR, blobRect eL, blobRect m){
eyeR = eR.rect;
eyeL = eL.rect;
mouth = m.rect;
}
void setBlobRect(blobRect bR){
switch(bR.part){
case NONE:
return;
case EYE_R:
eyeR = bR.rect;
break;
case EYE_L:
eyeL = bR.rect;
break;
case MOUTH:
mouth = bR.rect;
break;
}
}
void setColor(ofColor col){
color = col;
}
//--- draw ---//
void drawRectFace(){
ofPushStyle();
{
ofSetColor(color);
ofDrawRectangle(eyeL);
ofDrawRectangle(eyeR);
ofDrawRectangle(mouth);
}
}
void drawPolylineFace(bool Fill = false){
ofPushStyle();
{
ofSetColor(color);
(Fill) ? ofFill() : ofNoFill();
ofSetLineWidth(1);
p_eyeL.draw();
p_eyeR.draw();
p_mouth.draw();
}
ofPopStyle();
}
void drawLabel(bool bPC, ofFbo fbo[2][3]){
ofPushMatrix();
ofPushStyle();
{
if(bPC){
fbo[0][0].draw(eyeR.getPosition());
fbo[0][1].draw(eyeL.getPosition());
fbo[0][2].draw(mouth.getPosition());
}else{
fbo[1][0].draw(eyeR.getPosition());
fbo[1][1].draw(eyeL.getPosition());
fbo[1][2].draw(mouth.getPosition());
}
}
ofPopStyle();
ofPopMatrix();
}
void drawLabel(){
ofPushMatrix();
ofPushStyle();
{
ofDrawBitmapString("RightEye"+to_string(ghostId), eyeR.getPosition());
ofDrawBitmapString("LeftEye"+to_string(ghostId), eyeL.getPosition());
ofDrawBitmapString("Mouth"+to_string(ghostId), mouth.getPosition());
}
ofPopStyle();
ofPopMatrix();
}
};
<file_sep>/README.md
# komorebiGhost2 for ahd
using contour
## caution!!
1. ビルド時、BuildPhasesに
```cp -r bin/data "$TARGET_BUILD_DIR/$PRODUCT_NAME.app/Contents/Resources"```
を追加する必要があります。
## addons
ofxCV
ofxGui
ofxIteraticeBoxBlur
ofxOpenCV
ofxOsc
## dependency
xcode9.2
oF0.9.8
<file_sep>/src/ofApp.cpp
#include "ofApp.h"
//--------------------------------------------------------------
void ofApp::setup(){
ofSetDataPathRoot("../Resources/data/");
ofBackgroundGradient( ofColor(255), ofColor(180), OF_GRADIENT_CIRCULAR);
cout<<"=== Display Settings ==="<<endl;
cout<<" - ofGet Width:"<<ofGetWidth()<<" / Height:"<<ofGetHeight()<<endl;
cout<<" - ofGetScreen Width:"<<ofGetScreenWidth()<<" / Height:"<<ofGetScreenHeight()<<endl;
cout<<" - ofGetWindow Width:"<<ofGetWindowWidth()<<" / Height:"<<ofGetWindowHeight()<<endl;
cout<<" - ofGetWindowMode():"<<ofGetWindowMode()<<endl;
cout<<"========================"<<endl;
const int camW = 960;
const int camH = 540;
setupCam(camW,camH);
if(ofGetWidth()<ofGetHeight()){
/* Vertical Display */
camScale = ofPoint((float)ofGetWidth()/camH, (float)ofGetHeight()/camW);
}else{
/* Horizontal Display */
camScale = ofPoint((float)ofGetWidth()/camW, (float)ofGetHeight()/camH);
}
cout<<"camScale = "<< camScale <<endl;
//--- setupFbo
initFbo(komorebiFbo,ofGetWidth(),ofGetHeight(),ofPoint(0,0));
initFbo(brightenFbo);
initFbo(camFbo);
initFbo(ghostFbo);
initFbo(contourFbo);
//--- setupLeaf
setupLeaf();
leaves.reserve(3000);
leaves.clear();
addLeaf(500);
// --- set Labels ---
string path[] = {"eyeR.png","eyeL.png","mouth.png"};
ofImage img;
for(int i=0; i<3;i++){
img.load("A/" + path[i]);
label[0][i].allocate(img.getWidth(), img.getHeight());
label[0][i].begin();
ofClear(255);
img.draw(0, 0);
label[0][i].end();
img.load("B/" + path[i]);
label[1][i].allocate(img.getWidth(), img.getHeight());
label[1][i].begin();
ofClear(255);
img.draw(0, 0);
label[1][i].end();
}
setupShader();
//--- OSC setting ---//
// HOST = "172.16.65.45";
// PORT = 12345;
string setting = ofBufferFromFile("settings/wifi.txt").getData();
vector<string> str = ofSplitString(setting, "\n");
HOST = str[0];
PORT = ofToInt(str[1]);
cout<< "HOST : " << HOST <<" PORT : " << PORT <<endl;
if(str.size()) oscSender.setup(HOST, PORT);
setupGui();
}
//--------------------------------------------------------------
void ofApp::update(){
timef = ofGetElapsedTimef();
globalSec = (int)timef % 10;
sinVal[0] = 0;
sinVal[1] = sin(timef*TWO_PI/waveVal1);
sinVal[2] = sin(timef*TWO_PI/waveVal2);
sinVal[3] = sin(timef*TWO_PI/waveVal3);
cam.update();
if(cam.isFrameNew()){
//--- camFbo
ofPushMatrix();
camFbo.begin();
ofClear(255);
ofTranslate(ofGetCenter());
float rot = UPSIDEDOWN ? 90 :270;
ofRotate(rot);
ofScale(camScale.x, camScale.y);
cam.draw(0, 0);
camFbo.end();
ofPopMatrix();
ofPixels pix;
camFbo.readToPixels(pix);
contourFinder.setMinAreaRadius(minContour);
contourFinder.setMaxAreaRadius(maxContour);
contourFinder.setThreshold(camThreshold);
contourFinder.findContours(pix);
findGhostThread.setContourFinder(contourFinder);
}
vector<ghostFace> ghosts = findGhostThread.getGhosts();
cout<<"ghosts : "<<ghosts.size()<<endl;
findGhostThread.updateGhost();
findGhostThread.setParams(eMinDist, eMaxDist, eSizeDiff, mMinDist, mMaxDist, mDistDiff);
for(auto & l:leaves){
//--- leaves Parameter ---
l.rescale(leavesScale);
l.setColor(addColor);
l.setThreshold(leavesThreshold);
l.update(ghosts,camScale,UPSIDEDOWN);
if(l.getGroup()) l.calcPos(sinVal[l.getGroup()],30,20);
}
//--- komorebiFbo & brightenFbo ---
if(bKomorebi){
//--- komorebiFbo
komorebiFbo.begin();
ofBackgroundGradient(startColor, endColor,OF_GRADIENT_CIRCULAR);
for(auto & l:leaves) l.draw();
komorebiFbo.end();
//--- blurFbo
blur.setRadius(blurRadius);
blur.process(komorebiFbo);
//--- brightenFbo
brightenFbo.begin();
shader.begin();
{
shader.setUniform1f("brightness", brightness);
shader.setUniformTexture("tex0", komorebiFbo, 0);
komorebiFbo.draw(0,0);
}
shader.end();
brightenFbo.end();
}
//--- contourFbo
contourFbo.begin();
ofClear(255);
contourFinder.draw();
contourFbo.end();
//--- ghostFbo
ghostFbo.begin();
ofClear(255);
findGhostThread.drawPolylineFaces();
// if(bLabel) findGhostThread.drawLabels(bPC,label);
ghostFbo.end();
//--- Osc ---
if(bOsc){
for(auto & g:ghosts){
sendOsc(g.getId(),g.getRect(EYE_R),g.getRect(EYE_L),g.getRect(MOUTH));
}
}
}
//--------------------------------------------------------------
void ofApp::draw(){
ofEnableAlphaBlending();
ofSetColor(255);
if(bKomorebi){
ofPushMatrix();
ofTranslate(ofGetCenter());
brightenFbo.draw(0,0);
ofPopMatrix();
}
if(bCam) {
ofPushMatrix();
ofTranslate(ofGetCenter());
camFbo.draw(0,0);
ofPopMatrix();
}
if(bShowContour){
ofPushMatrix();
ofTranslate(ofGetCenter());
contourFbo.draw(0,0);
ofPopMatrix();
}
if(bShowGhost){
ofPushMatrix();
ofTranslate(ofGetCenter());
ghostFbo.draw(0,0);
ofPopMatrix();
}
if(bLabel){
ofPushMatrix();
findGhostThread.drawLabels();
ofPopMatrix();
}
if (bGui) {
komorebiGui.draw();
ghostGui.draw();
stringstream ss;
ss << "fps : " << ofToString(ofGetFrameRate(), 2) << endl;
ss << "globalSec : " << to_string(globalSec) << endl;
ss << "leaves : " << leaves.size() << endl;
ss << "HOST:" << HOST <<" PORT:" << to_string(PORT) << endl;
ofDrawBitmapStringHighlight(ss.str(), 10, 20);
}
}
//--------------------------------------------------------------
void ofApp::keyPressed(int key){
switch (key) {
case '.':
addLeaf(10);
break;
case ',':
deleteLeaf(10);
break;
case 'f':
ofToggleFullscreen();
//ofGetWindowMode(){}
// ofApp::setup();
break;
case 'o':
bOsc = !bOsc;
break;
case 'g':
bGui = !bGui;
if(bGui){
ofShowCursor();
}else{
ofHideCursor();
}
break;
case 'c':
bCam = !bCam;
break;
// case OF_KEY_LEFT:
// ofSetOrientation(OF_ORIENTATION_90_LEFT);
// break;
// case OF_KEY_RIGHT:
// ofSetOrientation(OF_ORIENTATION_90_RIGHT);
// break;
// case OF_KEY_UP:
// ofSetOrientation(OF_ORIENTATION_DEFAULT);
// break;
case 's':{
komorebiGui.saveToFile("settings/komorebi.xml");
ghostGui.saveToFile("settings/ghost.xml");
break;
}
case 'l':{
komorebiGui.loadFromFile("settings/komorebi.xml");
ghostGui.loadFromFile("settings/ghost.xml");
break;
}
case 'd':{
leaves.clear();
break;
}
case ' ':{
leaves.clear();
addLeaf(500);
break;
}
default:
break;
}
}
//--------------------------------------------------------------
void ofApp::mousePressed(int x, int y, int button){
// cout<<"x:"<<x <<"/ Y:"<<y<<endl;
}
//--------------------------------------------------------------
void ofApp::addLeaf(int num){
for(int i=0;i<num;i++){
int n = (int)ofRandom(originalLeaf.size());
leaf l(originalLeaf[n]);
leaves.push_back(l);
}
}
void ofApp::deleteLeaf(int num){
for(int i=0;i<num;i++){
if(leaves.size()<=0)return;
leaves.pop_back();
}
}
//--------------------------------------------------------------
void ofApp::sendOsc(int ghostId,ofRectangle eyeR,ofRectangle eyeL,ofRectangle mouth){
ofxOscMessage m;
string address = bPC ? "A":"B";
m.setAddress(address);
//--- Id
m.addIntArg(ghostId);
//--- Pos
m.addFloatArg(eyeR.getCenter().x);
m.addFloatArg(eyeR.getCenter().y);
m.addFloatArg(eyeL.getCenter().x);
m.addFloatArg(eyeL.getCenter().y);
m.addFloatArg(mouth.getCenter().x);
m.addFloatArg(mouth.getCenter().y);
//--- Area
m.addFloatArg(eyeR.getArea());
m.addFloatArg(eyeL.getArea());
m.addFloatArg(mouth.getArea());
//--- Mouth w h
m.addFloatArg(mouth.getWidth());
m.addFloatArg(mouth.getHeight());
//--- wind
m.addFloatArg(ofSignedNoise(sinVal[1]));
m.addFloatArg(ofSignedNoise(sinVal[2]));
m.addFloatArg(ofSignedNoise(sinVal[3]));
oscSender.sendMessage(m, false);
}
//=== setup functions ==========================================
//--------------------------------------------------------------
void ofApp::setupCam(int camW, int camH){
vector<ofVideoDevice> devices = cam.listDevices();
for(int i = 0; i < devices.size(); i++){
if(devices[i].bAvailable){
ofLogNotice() << devices[i].id << ": " << devices[i].deviceName;
}else{
ofLogNotice() << devices[i].id << ": " << devices[i].deviceName << " - unavailable ";
}
}
cam.setDeviceID(1);
cam.setup(camW,camH);
cam.setAnchorPercent(0.5, 0.5);
}
//--------------------------------------------------------------
void ofApp::setupLeaf(){
ofDirectory dir;
int nFiles = dir.listDir("leaf");
if(nFiles) {
ofImage img;
ofFbo fbo;
for(int i=0; i<dir.size(); i++) {
string filePath = dir.getPath(i);
img.load(dir.getPath(i));
fbo.allocate(img.getWidth(), img.getHeight());
fbo.begin();
ofClear(255);
img.draw(0,0);
fbo.end();
originalLeaf.push_back(fbo);
}
}
else ofLog(OF_LOG_WARNING) << "Could not find folder";
}
//--------------------------------------------------------------
void ofApp::setupShader(){
string shaderProgram =
OS_STRINGIFY(
uniform sampler2DRect tex0;
uniform float brightness;
void main (void){
vec2 pos = gl_TexCoord[0].st;
vec3 src = texture2DRect(tex0, pos).rgb * brightness;
gl_FragColor = vec4( src , 1.0);
});
shader.setupShaderFromSource(GL_FRAGMENT_SHADER, shaderProgram);
shader.linkProgram();
cout<<"=== shader.isLoaded() : " << (shader.isLoaded() ? "success!":"failed to load")<<endl;
}
//--------------------------------------------------------------
void ofApp::setupGui(){
//--- Komorebi Settings ---//
komorebiGui.setup("=Komorebi=","settings/komorebi.xml",50,100);
komorebiGui.add(bKomorebi.setup("Komorebi",true));
komorebiGui.add(addColor.setup("color", ofColor(22),ofColor(0),ofColor(255)));
komorebiGui.add(blurRadius.setup("blurRadius", 50, 0, 100));
komorebiGui.add(brightness.setup("brightness", 1.5, 0, 5));
komorebiGui.add(leavesScale.setup("scale", 3, 0.5, 10));
komorebiGui.add(waveVal1.setup("waveVal1", 6, 1, 50));
komorebiGui.add(waveVal2.setup("waveVal2", 20, 1, 50));
komorebiGui.add(waveVal3.setup("waveVal3", 10, 1, 50));
komorebiGui.add(leavesThreshold.setup("threshold", 50,10,300));
komorebiGui.add(startColor.setup("startColor", ofColor::lightYellow,ofColor(0),ofColor(255)));
komorebiGui.add(endColor.setup("endColor", ofColor::lightGoldenRodYellow,ofColor(0),ofColor(255)));
//--- Ghost Settings ---//
ghostGui.setup("=Ghost=","settings/ghost.xml",50,120 + komorebiGui.getHeight());
ghostGui.add(bShowGhost.setup("Ghost",true));
ghostGui.add(bCam.setup("Camera",false));
ghostGui.add(bOsc.setup("Osc",true));
ghostGui.add(bPC.setup("PC - A(true)/B(false)",true));
ghostGui.add(UPSIDEDOWN.setup("UPSIDEDOWN",true));
ghostGui.add(bShowContour.setup("ShowContour",false));
ghostGui.add(minContour.setup("minContour", 20, 1, 100));
ghostGui.add(maxContour.setup("maxContour", 100, 1, 500));
ghostGui.add(camThreshold.setup("Threshold", 128, 0, 255));
ghostGui.add(bLabel.setup("bLabel",true));
ghostGui.add(eMinDist.setup("eMinDist", 50, 1, 300));
ghostGui.add(eMaxDist.setup("eMaxDist", 200, 1, 800));
ghostGui.add(eSizeDiff.setup("eSizeDiff", 50, 1, 100));
ghostGui.add(mMinDist.setup("mMinDist", 30, 1, 500));
ghostGui.add(mMaxDist.setup("mMaxDist", 200, 1, 500));
ghostGui.add(mDistDiff.setup("mDistDiff", 30, 1, 500));
// ghostGui.add(bExhibit.setup("EXHIBIT MODE",true));
//--- load XML
komorebiGui.loadFromFile("settings/komorebi.xml");
ghostGui.loadFromFile("settings/ghost.xml");
}
|
f9d13027b7267974a31c24bcf3dc5afafccda6e2
|
[
"Markdown",
"C++"
] | 6
|
C++
|
shiyuugohirao/komorebiGhost2
|
6998916282ac44e3820d98e783b8e4db85f9efe8
|
5cb5e88128a7d1c73f9f9622972e030f8b79bcf0
|
refs/heads/master
|
<repo_name>adamdicarlo/brace-worker-loader<file_sep>/README.md
# Brace (Ace Editor) worker loader (inliner) for webpack
**WARNING** This... _sort of_ works. You end up with the entire Ace (brace) library inside of the worker bundle (as well as your other bundle!) needlessly, and some JS errors as it tries to access `window`, which doesn't exist in that context.
So this was a bad idea and I didn't end up using it.
## Usage
``` javascript
var MyWorker = require("brace-worker!./my-worker.js");
```
## License
MIT (http://www.opensource.org/licenses/mit-license.php)
<file_sep>/index.js
var WebWorkerTemplatePlugin = require('webpack/lib/webworker/WebWorkerTemplatePlugin')
var SingleEntryPlugin = require('webpack/lib/SingleEntryPlugin')
var loaderUtils = require('loader-utils')
var uglify = require('uglify-js')
var util = require('util')
/**
* This loader is based on
* - https://github.com/thlorenz/brace/blob/master/build/stringify-workers.js, and
* - https://github.com/webpack/worker-loader/blob/master/index.js
*/
module.exports = function () {}
module.exports.pitch = function inlineAceWorker (request) {
if (!this.webpack) throw new Error('Only usable with webpack')
this.cacheable && this.cacheable()
this.addDependency(this.resourcePath)
var callback = this.async()
var query = loaderUtils.parseQuery(this.query)
var outputOptions = {
filename: '[hash].worker.js',
chunkFilename: '[id].[hash].worker.js',
namedChunkFilename: null
}
if (this.options && this.options.worker && this.options.worker.output) {
for (var name in this.options.worker.output) {
outputOptions[name] = this.options.worker.output[name]
}
}
var workerCompiler = this._compilation.createChildCompiler('worker', outputOptions)
workerCompiler.apply(new WebWorkerTemplatePlugin(outputOptions))
workerCompiler.apply(new SingleEntryPlugin(this.context, '!!' + request, 'main'))
if (this.options && this.options.worker && this.options.worker.plugins) {
this.options.worker.plugins.forEach(function(plugin) {
workerCompiler.apply(plugin)
})
}
var subCache = 'subcache ' + __dirname + ' ' + request
workerCompiler.plugin('compilation', function(compilation) {
if (compilation.cache) {
if (!compilation.cache[subCache])
compilation.cache[subCache] = {}
compilation.cache = compilation.cache[subCache]
}
})
workerCompiler.runAsChild(function(err, entries, compilation) {
if (err) return callback(err)
compilation.fileDependencies.forEach(function (dep) {
this.addDependency(dep)
}, this)
compilation.contextDependencies.forEach(function (dep) {
this.addContextDependency(dep)
}, this)
if (entries[0]) {
var workerFile = entries[0].files[0]
var workerId = query.id
var code = "module.exports.id = '" + workerId + "'\n" +
'module.exports.src = ' + JSON.stringify(minify(compilation.assets[workerFile].source()))
return callback(null, code)
} else {
return callback(null, null)
}
}.bind(this))
}
function minify (code) {
var compressor = uglify.Compressor()
var ast = uglify.parse(code)
ast.figure_out_scope()
return ast.transform(compressor).print_to_string()
}
|
80ab701e77add737783d250f5fbc154c9e079e2c
|
[
"Markdown",
"JavaScript"
] | 2
|
Markdown
|
adamdicarlo/brace-worker-loader
|
5ad9f146116b7c92eb4147b5bdf9358ce4a98e2a
|
27c088d6327ddb1f629f78edda90a9abf1ee0581
|
refs/heads/master
|
<file_sep>import os
os.environ['TF_CUDNN_USE_AUTOTUNE']='0'
from math import sqrt
import numpy as np
import tensorflow as tf
import tensorflow.contrib.slim as slim
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.contrib import ffmpeg
import sys
import glob
import time
import json
import pdb
from NetBuilder import NetBuilder
from layer_generator import generate
import memory_saving_gradients
from tensorflow.python.ops import gradients
#import mem_util
# monkey patch tf.gradients to point to our custom version, with automatic checkpoint selection
def gradients_memory(ys, xs, grad_ys=None, **kwargs):
return memory_saving_gradients.gradients(ys, xs, grad_ys, checkpoints='memory', **kwargs)
gradients.__dict__["gradients"] = memory_saving_gradients.gradients_collection
def tf_record_CNN_spherical(tone_version,itd_tones,ild_tones,manually_added,freq_label,all_positions_bkgd,testing,branched,zero_padded,stacked_channel,model_version,num_epochs,train_path_pattern,bkgd_train_path_pattern,arch_ID,config_array,files,num_files,newpath):
bkgd_training_paths = glob.glob(bkgd_train_path_pattern)
training_paths = glob.glob(train_path_pattern)
print (bkgd_training_paths)
###Do not change parameters below unless altering network###
BKGD_SIZE = [72,30000]
STIM_SIZE = [72,89999]
TONE_SIZE = [72,59099]
ITD_TONE_SIZE = [72,39690]
if zero_padded:
STIM_SIZE = [72,30000]
if stacked_channel:
STIM_SIZE = [36,30000, 2]
BKGD_SIZE = [36,30000,2]
SNR =10
SNR_max = 40
SNR_min = 5
n_classes_localization=72
n_classes_recognition=780
localization_bin_resolution=5
#Optimization Params
batch_size=16
learning_rate = 1e-3
loss_exponent = 12
loss_scale = 2**loss_exponent
bn_training_state = True
dropout_training_state = True
training_state = True
#Queue parameters
dequeue_min = 80
dequeue_min_main = 1000
#num_epochs = None
#Change for network precision,must match input data type
filter_dtype = tf.float32
#Display interval training statistics
display_step = 25
if itd_tones:
TONE_SIZE = ITD_TONE_SIZE
if ild_tones:
itd_tones = True
if testing:
bn_training_state = False
dropout_training_state = False
training_state= False
num_epochs = 1
SNR_max = 35.0
SNR_min = 30.0
#mean_subbands = np.load("mean_subband_51400.npy")/51400
#tf_mean_subbands = tf.constant(mean_subbands,dtype=filter_dtype)
def check_speed():
for i in range(30):
sess.run(subbands_batch)
start_time = time.time()
for _ in range(30):
time.sleep(0.5)
print(time.time() - start_time)
print("Len:",sess.run('example_queue/random_shuffle_queue_Size:0'))
with tf.device("/cpu:0"):
###READING QUEUE MACHINERY###
def add_labels(paths):
return [(stim,stim.split('_')[-1].split('a')[0]) for stim in
paths]
def rms(wav):
square = tf.square(wav)
mean_val = tf.reduce_mean(square)
return tf.sqrt(mean_val)
def combine_signal_and_noise_stacked_channel(signal,background,snr,delay):
sig_len= signal.shape[1] - delay
sig = tf.slice(signal,[0,0,0],[36,sig_len,2])
max_val = tf.reduce_max(sig)
sig_rms = rms(tf.reduce_sum(sig,[0,2]))
sig = tf.div(sig,sig_rms)
#sig = tf.Print(sig, [tf.reduce_max(sig)],message="\nMax SIG:")
sf = tf.pow(tf.constant(10,dtype=tf.float32),tf.div(snr,tf.constant(20,dtype=tf.float32)))
bak_rms = rms(tf.reduce_sum(background,[0,2]))
#bak_rms = tf.Print(bak_rms, [tf.reduce_max(bak_rms)],message="\nNoise RMS:")
sig_rms = rms(tf.reduce_sum(sig,[0,2]))
scaling_factor = tf.div(tf.div(sig_rms,bak_rms),sf)
#scaling_factor = tf.Print(scaling_factor, [scaling_factor],message="\nScaling Factor:")
noise = tf.scalar_mul(scaling_factor,background)
#noise = tf.Print(noise, [tf.reduce_max(noise)],message="\nMax Noise:")
front = tf.slice(noise,[0,0,0],[36,delay,2])
middle = tf.slice(noise,[0,delay,0],[36,sig_len,2])
end = tf.slice(noise,[0,(delay+int(sig_len)),0],[36,-1,2])
middle_added = tf.add(middle,sig)
new_sig = tf.concat([front,middle_added,end],1)
#new_sig = sig
rescale_factor = tf.div(max_val,tf.reduce_max(new_sig))
#rescale_factor = tf.Print(rescale_factor, [rescale_factor],message="\nRescaling Factor:")
new_sig = tf.scalar_mul(rescale_factor,new_sig)
new_sig_rectified = tf.nn.relu(new_sig)
new_sig_nonlin = tf.pow(new_sig_rectified,0.3)
new_sig_reshaped = tf.reshape(new_sig_nonlin,[36,30000,2])
#new_sig_reshaped = tf.reshape(new_sig,[72,30000,1])
#return (signal, background,noise,new_sig_reshaped)
return new_sig_reshaped
def combine_signal_and_noise(signal,background,snr,delay):
sig_len= signal.shape[1] - delay
sig = tf.slice(signal,[0,0],[72,sig_len])
max_val = tf.reduce_max(sig)
sig_rms = rms(tf.reduce_sum(sig,0))
sig = tf.div(sig,sig_rms)
#sig = tf.Print(sig, [tf.reduce_max(sig)],message="\nMax SIG:")
sf = tf.pow(tf.constant(10,dtype=tf.float32),tf.div(snr,tf.constant(20,dtype=tf.float32)))
bak_rms = rms(tf.reduce_sum(background,0))
#bak_rms = tf.Print(bak_rms, [tf.reduce_max(bak_rms)],message="\nNoise RMS:")
sig_rms = rms(tf.reduce_sum(sig,0))
scaling_factor = tf.div(tf.div(sig_rms,bak_rms),sf)
#scaling_factor = tf.Print(scaling_factor, [scaling_factor],message="\nScaling Factor:")
noise = tf.scalar_mul(scaling_factor,background)
#noise = tf.Print(noise, [tf.reduce_max(noise)],message="\nMax Noise:")
front = tf.slice(noise,[0,0],[72,delay])
middle = tf.slice(noise,[0,delay],[72,sig_len])
end = tf.slice(noise,[0,(delay+int(sig_len))],[72,-1])
middle_added = tf.add(middle,sig)
new_sig = tf.concat([front,middle_added,end],1)
#new_sig = sig
rescale_factor = tf.div(max_val,tf.reduce_max(new_sig))
#rescale_factor = tf.Print(rescale_factor, [rescale_factor],message="\nRescaling Factor:")
new_sig = tf.scalar_mul(rescale_factor,new_sig)
new_sig_rectified = tf.nn.relu(new_sig)
new_sig_nonlin = tf.pow(new_sig_rectified,0.3)
new_sig_reshaped = tf.reshape(new_sig_nonlin,[72,30000,1])
#new_sig_reshaped = tf.reshape(new_sig,[72,30000,1])
#return (signal, background,noise,new_sig_reshaped)
return new_sig_reshaped
#Best to read https://www.tensorflow.org/api_guides/python/reading_data#Reading_from_files
###READING QUEUE MACHINERY###
#Best to read https://www.tensorflow.org/api_guides/python/reading_data#Reading_from_files
feature = {'train/image': tf.FixedLenFeature([], tf.string),
'train/azim': tf.FixedLenFeature([], tf.int64),
'train/elev': tf.FixedLenFeature([], tf.int64),
'train/image_height': tf.FixedLenFeature([], tf.int64),
'train/image_width': tf.FixedLenFeature([], tf.int64)
}
if tone_version:
feature = {'train/image': tf.FixedLenFeature([], tf.string),
'train/label': tf.FixedLenFeature([], tf.int64),
'train/image_height': tf.FixedLenFeature([], tf.int64),
'train/image_width': tf.FixedLenFeature([], tf.int64),
'train/freq': tf.FixedLenFeature([], tf.int64)
}
if freq_label:
feature = {'train/azim': tf.FixedLenFeature([], tf.int64),
'train/elev': tf.FixedLenFeature([], tf.int64),
'train/image': tf.FixedLenFeature([], tf.string),
'train/image_height': tf.FixedLenFeature([], tf.int64),
'train/image_width': tf.FixedLenFeature([], tf.int64),
'train/freq': tf.FixedLenFeature([], tf.int64)
}
if itd_tones:
feature = {'train/azim': tf.FixedLenFeature([], tf.int64),
'train/elev': tf.FixedLenFeature([], tf.int64),
'train/image': tf.FixedLenFeature([], tf.string),
'train/image_height': tf.FixedLenFeature([], tf.int64),
'train/image_width': tf.FixedLenFeature([], tf.int64),
'train/freq': tf.FixedLenFeature([], tf.int64)
}
if branched:
feature = {'train/image': tf.FixedLenFeature([], tf.string),
'train/azim': tf.FixedLenFeature([], tf.int64),
'train/elev': tf.FixedLenFeature([], tf.int64),
'train/class_num': tf.FixedLenFeature([], tf.int64),
'train/image_height': tf.FixedLenFeature([], tf.int64),
'train/image_width': tf.FixedLenFeature([], tf.int64)
}
# Define a reader and read the next record
def parse_tfrecord_example(record):
# Decode the record read by the reader
features = tf.parse_single_example(record, features=feature)
# Convert the image data from string back to the numbers
image = tf.decode_raw(features['train/image'], tf.float32)
#shape = tf.cast(features['train/image_shape'],tf.int32)
height = tf.cast(features['train/image_height'],tf.int32)
width = tf.cast(features['train/image_width'],tf.int32)
# Cast label data into int32
if not tone_version:
azim = tf.cast(features['train/azim'], tf.int32)
elev = tf.cast(features['train/elev'], tf.int32)
label_div_const = tf.constant([localization_bin_resolution])
if not manually_added:
azim = tf.div(azim,label_div_const)
elev = tf.div(elev,label_div_const)
image = tf.reshape(image,STIM_SIZE)
# Reshape image data into the original shape
if branched:
class_num = tf.cast(features['train/class_num'], tf.int32)
return image, azim, elev, class_num
if freq_label:
tone = tf.cast(features['train/freq'],tf.int32)
return image, azim, elev, tone
return image, azim, elev
if tone_version:
image = tf.reshape(image, TONE_SIZE)
tone = tf.cast(features['train/freq'],tf.int32)
if itd_tones:
azim = tf.cast(features['train/azim'], tf.int32)
elev = tf.cast(features['train/elev'], tf.int32)
label_div_const = tf.constant([localization_bin_resolution])
if not manually_added:
azim = tf.div(azim,label_div_const)
elev = tf.div(elev,label_div_const)
return image, azim, elev, tone
else:
label = tf.cast(features['train/label'], tf.int32)
label_div_const = tf.constant([10])
label = tf.div(label,label_div_const)
return image,label, tone
# Creates batches by randomly shuffling tensors
dataset = tf.data.Dataset.list_files(train_path_pattern).shuffle(len(training_paths))
dataset = dataset.apply(tf.contrib.data.parallel_interleave(lambda x:tf.data.TFRecordDataset(x,
compression_type="GZIP").map(parse_tfrecord_example,num_parallel_calls=1),
cycle_length=10, block_length=16))
dataset = dataset.shuffle(buffer_size=200)
dataset = dataset.repeat(num_epochs)
dataset = dataset.prefetch(100)
iterator = dataset.make_one_shot_iterator()
if itd_tones:
images, azims, elevs, tones = iterator.get_next()
elif tone_version:
images,labels,tones = iterator.get_next()
else:
if branched:
images,azims,elevs,class_num= iterator.get_next()
elif freq_label:
images, azims, elevs, tones = iterator.get_next()
else:
images,azims,elevs= iterator.get_next()
###READING QUEUE MACHINERY###
if all_positions_bkgd:
bkgd_feature = {'train/image': tf.FixedLenFeature([], tf.string),
'train/image_height': tf.FixedLenFeature([], tf.int64),
'train/image_width': tf.FixedLenFeature([], tf.int64)
}
else:
bkgd_feature = {'train/label': tf.FixedLenFeature([], tf.int64),
'train/image': tf.FixedLenFeature([], tf.string),
'train/image_height': tf.FixedLenFeature([], tf.int64),
'train/image_width': tf.FixedLenFeature([], tf.int64)
}
# Create a list of filenames and pass it to a queue
bkgd_filename_queue = tf.train.string_input_producer(bkgd_training_paths,
shuffle=True,
capacity=len(bkgd_training_paths))
# Define a reader and read the next record
options = tf.python_io.TFRecordOptions(tf.python_io.TFRecordCompressionType.GZIP)
bkgd_reader = tf.TFRecordReader(options=options)
_, bkgd_serialized_example = bkgd_reader.read(bkgd_filename_queue)
# Decode the record read by the reader
def parse_tfrecord_background(record):
bkgd_features = tf.parse_single_example(record, features=bkgd_feature)
# Convert the image data from string back to the numbers
bkgd_image = tf.decode_raw(bkgd_features['train/image'], tf.float32)
bkgd_height = tf.cast(bkgd_features['train/image_height'],tf.int32)
bkgd_width = tf.cast(bkgd_features['train/image_width'],tf.int32)
# Reshape image data into the original shape
bkgd_image = tf.reshape(bkgd_image, BKGD_SIZE)
if not all_positions_bkgd:
bkgd_label = tf.cast(bkgd_features['train/label'], tf.int32)
return bkgd_image,bkgd_label
return bkgd_image
dataset_bkgd = tf.data.Dataset.list_files(bkgd_train_path_pattern).shuffle(len(bkgd_training_paths))
dataset_bkgd = dataset_bkgd.apply(tf.contrib.data.parallel_interleave(lambda x:tf.data.TFRecordDataset(x,
compression_type="GZIP").map(parse_tfrecord_background,num_parallel_calls=1),
cycle_length=10, block_length=16))
dataset_bkgd = dataset_bkgd.shuffle(buffer_size=200)
dataset_bkgd = dataset_bkgd.repeat()
dataset_bkgd = dataset_bkgd.prefetch(100)
iterator_bkgd = dataset_bkgd.make_one_shot_iterator()
if all_positions_bkgd:
bkgd_images = iterator_bkgd.get_next()
else:
bkgd_images, bkgd_labels = iterator_bkgd.get_next()
SNR = tf.random_uniform([],minval=SNR_min,maxval=SNR_max,name="snr_gen")
if stacked_channel:
images = tf.slice(images,[0,0,0],[36,30000,2])
bkgd_images = tf.slice(bkgd_images,[0,0,0],[36,30000,2])
combined_subbands = combine_signal_and_noise_stacked_channel(images,bkgd_images,SNR,0)
else:
images = tf.slice(images,[0,0],[72,30000])
bkgd_images = tf.slice(bkgd_images,[0,0],[72,30000])
combined_subbands = combine_signal_and_noise(images,bkgd_images,SNR,0)
combined_subbands = tf.cast(combined_subbands, filter_dtype)
if itd_tones:
subbands_batch, azims_batch,elevs_batch,tones_batch = tf.train.shuffle_batch([combined_subbands,azims,elevs,tones],batch_size=batch_size,capacity=2000+batch_size*4,num_threads=5,min_after_dequeue=dequeue_min_main,name="example_queue")
elif tone_version:
subbands_batch, azims_batch,tones_batch = tf.train.shuffle_batch([combined_subbands,labels,tones],batch_size=batch_size,capacity=2000+batch_size*4,num_threads=5,min_after_dequeue=dequeue_min_main,name="example_queue")
elif branched:
subbands_batch, azims_batch, elevs_batch, class_num_batch= tf.train.shuffle_batch([combined_subbands,azims,elevs,class_num],batch_size=batch_size,capacity=2000+batch_size*4,num_threads=5,min_after_dequeue=dequeue_min_main,name="example_queue")
elif freq_label:
subbands_batch, azims_batch,elevs_batch,tones_batch = tf.train.shuffle_batch([combined_subbands,azims,elevs,tones],batch_size=batch_size,capacity=2000+batch_size*4,num_threads=5,min_after_dequeue=dequeue_min_main,name="example_queue")
else:
subbands_batch, azims_batch, elevs_batch = tf.train.shuffle_batch([combined_subbands,azims,elevs],batch_size=batch_size,capacity=2000+batch_size*4,num_threads=5,min_after_dequeue=dequeue_min_main,name="example_queue")
print("queues created")
###END READING QUEUE MACHINERY###
def put_kernels_on_grid (kernel, pad = 1):
'''Visualize conv. filters as an image (mostly for the 1st layer).
Arranges filters into a grid, with some paddings between adjacent filters.
Args:
kernel: tensor of shape [Y, X, NumChannels, NumKernels]
pad: number of black pixels around each filter (between them)
Return:
Tensor of shape [1, (Y+2*pad)*grid_Y, (X+2*pad)*grid_X, NumChannels].
'''
# get shape of the grid. NumKernels == grid_Y * grid_X
def factorization(n):
for i in range(int(sqrt(float(n))), 0, -1):
if n % i == 0:
if i == 1: print('Who would enter a prime number of filters')
return (i, int(n / i))
(grid_Y, grid_X) = factorization (kernel.get_shape()[3].value)
print ('grid: %d = (%d, %d)' % (kernel.get_shape()[3].value, grid_Y, grid_X))
x_min = tf.reduce_min(kernel)
x_max = tf.reduce_max(kernel)
kernel = (kernel - x_min) / (x_max - x_min)
# pad X and Y
x = tf.pad(kernel, tf.constant( [[pad,pad],[pad, pad],[0,0],[0,0]] ), mode = 'CONSTANT')
# X and Y dimensions, w.r.t. padding
Y = kernel.get_shape()[0] + 2 * pad
X = kernel.get_shape()[1] + 2 * pad
x = tf.pad(kernel, tf.constant( [[pad,pad],[pad, pad],[0,0],[0,0]] ), mode = 'CONSTANT')
# X and Y dimensions, w.r.t. padding
Y = kernel.get_shape()[0] + 2 * pad
X = kernel.get_shape()[1] + 2 * pad
channels = kernel.get_shape()[2]
# put NumKernels to the 1st dimension
x = tf.transpose(x, (3, 0, 1, 2))
# organize grid on Y axis
x = tf.reshape(x, tf.stack([grid_X, Y * grid_Y, X, channels]))
# switch X and Y axes
x = tf.transpose(x, (0, 2, 1, 3))
# organize grid on X axis
x = tf.reshape(x, tf.stack([1, X * grid_X, Y * grid_Y, channels]))
# back to normal order (not combining with the next step for clarity)
x = tf.transpose(x, (2, 1, 3, 0))
# to tf.image_summary order [batch_size, height, width, channels],
# where in this case batch_size == 1
x = tf.transpose(x, (3, 0, 1, 2))
# scaling to [0, 255] is not necessary for tensorboard
return x
#Many lines are commented out to allow for quick architecture changes
#TODO:This should be abstracted to arcitectures are defined by some sort of
#config dictionary or file
def gradients_with_loss_scaling(loss, loss_scale):
"""Gradient calculation with loss scaling to improve numerical stability
when training with float16.
"""
grads = [(grad[0] / loss_scale,grad[1]) for grad in
tf.train.AdamOptimizer(learning_rate=learning_rate,epsilon=1e-4).
compute_gradients(loss * loss_scale,colocate_gradients_with_ops=True)]
return grads
def float32_variable_storage_getter(getter, name, shape=None, dtype=None,
initializer=tf.contrib.layers.xavier_initializer(uniform=False),
regularizer=None,trainable=True,*args, **kwargs):
storage_dtype = tf.float32 if trainable else dtype
variable = getter(name, shape, dtype=storage_dtype,initializer=initializer,
regularizer=regularizer,trainable=trainable,*args, **kwargs)
if trainable and dtype != tf.float32:
variable = tf.cast(variable, dtype)
return variable
strides =1
time_stride =1
freq_stride=2
time_pool = 4
freq_pool =1
k=2
k_wide =8
padding='SAME'
# config_array=[[["/gpu:0"],['conv',[2,50,32],[2,1]],['relu'],['pool',[1,4]]],[["/gpu:1"],['conv',[4,20,64],[1,1]],['bn'],['relu'],['pool',[1,4]],['conv',[8,8,128],[1,1]],['bn'],['relu'],['pool',[1,4]],['conv',[8,8,256],[1,1]],['bn'],['relu'],['pool',[1,8]],['fc',512],['fc_bn'],['fc_relu'],['dropout'],['out',]]]
# print(tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES,scope='fp32_storage'))
# print(subbands_batch)
####TMEPORARY OVERRIDE####
net=NetBuilder()
if branched:
out,out2=net.build(config_array,subbands_batch,training_state,dropout_training_state,filter_dtype,padding,n_classes_localization,n_classes_recognition,branched)
else:
out=net.build(config_array,subbands_batch,training_state,dropout_training_state,filter_dtype,padding,n_classes_localization,n_classes_recognition,branched)
##Fully connected Layer 2
#wd2 = tf.get_variable('wd2',[512,512],filter_dtype)
#dense_bias2 = tf.get_variable('wb6',[512],filter_dtype)
#fc2 = tf.add(tf.matmul(fc1_do, wd2), dense_bias2)
#fc2 = tf.nn.relu(fc2)
#fc2_do = tf.layers.dropout(fc2,training=dropout_training_state)
# Construct model
#fix labels dimension to be one less that logits dimension
#Testing small subbatch
labels_batch_cost = tf.squeeze(azims_batch)
#labels_batch_cost = tf.squeeze(subbands_batch_labels,axis=[1,2])
if not tone_version:
labels_batch_sphere = tf.add(tf.scalar_mul(tf.constant(36,dtype=tf.int32),elevs_batch),
azims_batch)
else:
labels_batch_sphere = azims_batch
labels_batch_cost_sphere = tf.squeeze(labels_batch_sphere)
# Define loss and optimizer
# On r1.1 reduce mean doees not work(returns nans) with float16 vals
if branched:
cost1 = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=out,labels=labels_batch_cost_sphere))
cost2 = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=out2,labels=class_num_batch))
cost = cost1 +cost2
else:
cost = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(logits=out,labels=labels_batch_cost_sphere))
#cost = tf.Print(cost, [labels],message="\nLabel:",summarize=32)
cond_dist = tf.nn.softmax(out)
if branched:
cond_dist2 = tf.nn.softmax(out2)
#cost = tf.Print(cost, [tf.argmax(out, 1)],message="\nOut:",summarize=32)
# init_op = tf.group(tf.global_variables_initializer(),
# tf.local_variables_initializer())
# config = tf.ConfigProto(allow_soft_placement=True,
# inter_op_parallelism_threads=0, intra_op_parallelism_threads=0)
# sess = tf.Session(config=config)
# sess.run(init_op)
# coord = tf.train.Coordinator()
# threads = tf.train.start_queue_runners(sess=sess,coord=coord)
# print(sess.run(cost))
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
update_grads = tf.train.AdamOptimizer(learning_rate=learning_rate,epsilon=1e-4).minimize(cost)
# Evaluate model
correct_pred = tf.equal(tf.argmax(out, 1), tf.cast(labels_batch_cost_sphere,tf.int64))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
top_k = tf.nn.top_k(out,5)
if branched:
correct_pred2 = tf.equal(tf.argmax(out2, 1), tf.cast(class_num_batch,tf.int64))
accuracy2 = tf.reduce_mean(tf.cast(correct_pred2, tf.float32))
top_k2 = tf.nn.top_k(out2,5)
#test_pred = conv_net(tf.cast(test_images,tf.float32),weights,biases)
#correct_pred = tf.equal(tf.argmax(test_pred, 1), tf.cast(test_labels,tf.int64))
#test_acc = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
##Check special cases(made by hand in testdata.json
#custom_pred = conv_net(tf_test_data,weights,biases)
#custom_correct_pred = tf.equal(tf.argmax(custom_pred, 1), tf.cast(tf_test_label,tf.int64))
#custom_test_acc = tf.reduce_mean(tf.cast(custom_correct_pred, tf.float32))
# Initializing the variables
#
# Check_op seems to take up a lot of space on the GPU
#check_op = tf.add_check_numerics_ops()
init_op = tf.group(tf.global_variables_initializer(),
tf.local_variables_initializer())
# Launch the graph
#with tf.Session() as sess:
#run_metadata = tf.RunMetadata()
config = tf.ConfigProto(allow_soft_placement=True,
inter_op_parallelism_threads=0, intra_op_parallelism_threads=0)
sess = tf.Session(config=config)
sess.run(init_op)
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess,coord=coord)
print("Filling Queues...")
if branched:
print("Class Labels:" + str(sess.run(class_num_batch)))
#sess.run(cost)
time.sleep(30)
print("Examples in Queue:",sess.run('example_queue/random_shuffle_queue_Size:0'))
# ##This code allows for tracing ops acorss GPUs, you often have to run it twice
# ##to get sensible traces
#
# #sess.run(optimizer,options=tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE),
# # run_metadata=run_metadata)
# #from tensorflow.python.client import timeline
# #trace = timeline.Timeline(step_stats=run_metadata.step_stats)
# #trace_file.close()
if not testing:
saver = tf.train.Saver(max_to_keep=None)
learning_curve = []
errors_count =0
try:
step = 1
while not coord.should_stop():
#sess.run([optimizer,check_op])
try:
if step ==1:
if not num_files == 1:
latest_addition = max(files, key=os.path.getctime)
latest_addition_name = latest_addition.split(".") [1]
saver.restore(sess,newpath+"/model."+latest_addition_name)
step=int(latest_addition_name.split("-") [1])
else:
sess.run(update_grads)
else:
sess.run(update_grads)
# sess.run(update_grads)
except tf.errors.InvalidArgumentError as e:
print(e.message)
errors_count+=1
continue
if step % display_step == 0:
# Calculate batch loss and accuracy
loss, acc, az= sess.run([cost, accuracy,azims_batch])
print("Examples in Queue:",sess.run('example_queue/random_shuffle_queue_Size:0'))
#print("Batch Labels: ",az)
print("Iter " + str(step*batch_size) + ", Minibatch Loss= " + \
"{:.6f}".format(loss) + ", Training Accuracy= " + \
"{:.5f}".format(acc))
if step%5000 ==0:
print("Checkpointing Model...")
saver.save(sess,newpath+'/model.ckpt',global_step=step,write_meta_graph=False)
learning_curve.append([int(step*batch_size),float(acc)])
print("Checkpoint Complete")
#Just for testing the model/call_model
if step == 200000:
print("Break!")
break
step += 1
except tf.errors.OutOfRangeError:
print("Out of Range Error. Optimization Finished")
coord.request_stop()
except tf.errors.DataLossError as e:
print("Corrupted file found!!")
pdb.set_trace()
except tf.errors.ResourceExhaustedError as e:
gpu=e.message
print("Out of memory error")
error= "Out of memory error"
with open(newpath+'/train_error.json','w') as f:
json.dump(arch_ID,f)
json.dump(error,f)
json.dump(gpu,f)
coord.request_stop()
return False
finally:
print(errors_count)
print("Training stopped.")
with open(newpath+'/curve_no_resample_w_cutoff_vary_loc.json', 'w') as f:
json.dump(learning_curve,f)
if testing:
##Testing loop
batch_acc = []
batch_acc2 = []
batch_conditional = []
batch_conditional2 = []
saver = tf.train.Saver(max_to_keep=None)
saver.restore(sess,newpath+"/model.ckpt-"+str(model_version))
step = 0
try:
while not coord.should_stop():
if tone_version or freq_label:
pred, ts, ls, cd = sess.run([correct_pred,tones_batch,azims_batch,cond_dist])
batch_acc += np.dstack((pred,np.squeeze(ts),np.squeeze(ls))).tolist()[0]
batch_conditional += [(cond,label,freq) for cond,label,freq in zip(cd,np.squeeze(ls),np.squeeze(ts))]
if branched:
pred2, ts, ls, cd2 = sess.run([correct_pred2,tones_batch,azims_batch,cond_dist2])
batch_acc2 += np.dstack((pred2,np.squeeze(ts),np.squeeze(ls))).tolist()[0]
batch_conditional2 += [(cond,label,freq) for cond,label,freq in zip(cd2,np.squeeze(ls),np.squeeze(ts))]
else:
pred, ts, cd = sess.run([correct_pred,azims_batch,cond_dist])
batch_acc += np.dstack((pred,np.squeeze(ts))).tolist()[0]
batch_conditional += [(cond,label) for cond,label in zip(cd,np.squeeze(ts))]
if branched:
pred2, ts, cd2 = sess.run([correct_pred2,class_num_batch,cond_dist2])
batch_acc2 += np.dstack((pred2,np.squeeze(ts))).tolist()[0]
batch_conditional2 += [(cond,label) for cond,label in zip(cd2,np.squeeze(ts))]
step+=1
if step % display_step ==0:
print("Iter "+str(step*batch_size))
#if not tone_version:
# print("Current Accuracy:",sum(batch_acc)/len(batch_acc))
if step == 65000:
print ("Break!")
break
except tf.errors.ResourceExhaustedError:
print("Out of memory error")
error= "Out of memory error"
with open(newpath+'/test_error.json','w') as f:
json.dump(arch_ID,f)
json.dump(error,f)
coord.request_stop()
except tf.errors.OutOfRangeError:
print("Out of Range Error. Optimization Finished")
finally:
if tone_version:
np.save(newpath+'/plot_array_test.npy',batch_acc)
np.save(newpath+'/batch_conditional_test.npy',batch_conditional)
acc_corr=[pred[0] for pred in batch_acc]
acc_accuracy=sum(acc_corr)/len(acc_corr)
if branched:
np.save(newpath+'/plot_array_test_2.npy',batch_acc2)
np.save(newpath+'/batch_conditional_test_2.npy',batch_conditional2)
acc_corr2=[pred2[0] for pred2 in batch_acc2]
acc_accuracy2=sum(acc_corr2)/len(acc_corr2)
with open(newpath+'/accuracies_itd.json','w') as f:
json.dump(acc_accuracy,f)
if branched:
json.dump(acc_accuracy2,f)
else:
stimuli_name = train_path_pattern.split("/")[-2]
np.save(newpath+'/plot_array_padded_{}_iter{}.npy'.format(stimuli_name,model_version),batch_acc)
np.save(newpath+'/batch_conditional_{}_iter{}.npy'.format(stimuli_name,model_version),batch_conditional)
acc_corr=[pred[0] for pred in batch_acc]
acc_accuracy=sum(acc_corr)/len(acc_corr)
if branched:
np.save(newpath+'/plot_array_stim_vary_env_2.npy',batch_acc2)
np.save(newpath+'/batch_conditional_test_2.npy',batch_conditional2)
acc_corr2=[pred2[0] for pred2 in batch_acc2]
acc_accuracy2=sum(acc_corr2)/len(acc_corr2)
with open(newpath+'/accuracies_test.json','w') as f:
json.dump(acc_accuracy,f)
if branched:
json.dump(acc_accuracy2,f)
coord.request_stop()
#acc= sess.run(test_acc)
#print("Test Accuracy= "+"{:.5f}".format(acc))
#customs = sess.run(custom_test_acc)
#correct_pred = sess.run(custom_correct_pred)
#with open('custom_out2.json', 'w') as f:
# json.dump([test_data_img,correct_pred.tolist()],f)
#print("ACC for special cases:")
#print(customs)
#first_layer = sess.run(weights['wc1'])
#activation1, activation2 = sess.run([conv1,conv3])
#with open('activations.json','w') as f:
# json.dump([activation1.tolist(),activation2.tolist()],f)
#tf.get_variable_scope().reuse_variables()
#first_layer = [var for var in tf.global_variables() if var.op.name=="wc1"][0]
#second_layer = [var for var in tf.global_variables() if var.op.name=="wc2"][0]
#weights_image = put_kernels_on_grid(first_layer)
#weights_image2 = put_kernels_on_grid(second_layer)
#np_weights1, np_weights2 = sess.run([weights_image,weights_image2])
#with open('conv1weights.json','w') as f:
# json.dump([np_weights1.tolist(),np_weights2.tolist()],f)
#
sess.close()
tf.reset_default_graph()
<file_sep>import os
os.environ['TF_CUDNN_USE_AUTOTUNE']='0'
from math import sqrt,ceil
import numpy as np
import tensorflow as tf
import tensorflow.contrib.slim as slim
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.contrib import ffmpeg
import sys
import glob
import time
import json
import pdb
from NetBuilder_valid_pad import NetBuilder
from layer_generator import generate
from tfrecords_iterator_get_power import build_tfrecords_iterator
from google.protobuf.json_format import MessageToJson
from parse_nested_dictionary import parse_nested_dictionary
import collections
import scipy.signal as signallib
from pycochleagram import erbfilter as erb
from pycochleagram import subband as sb
from scipy.io.wavfile import write
from get_tensor_metrics import *
import memory_saving_gradients
from tensorflow.python.ops import gradients
#import mem_util
# monkey patch tf.gradients to point to our custom version, with automatic checkpoint selection
def gradients_memory(ys, xs, grad_ys=None, **kwargs):
return memory_saving_gradients.gradients(ys, xs, grad_ys, checkpoints='memory', **kwargs)
gradients.__dict__["gradients"] = memory_saving_gradients.gradients_speed
def tf_record_CNN_spherical(all_positions_bkgd,background_textures,testing,zero_padded,stacked_channel,num_epochs,train_path_pattern,bkgd_train_path_pattern,newpath,SNR_max=40,SNR_min=5):
bkgd_training_paths = glob.glob(bkgd_train_path_pattern)
training_paths = glob.glob(train_path_pattern)
###Do not change parameters below unless altering network###
narrowband_noise = False
manually_added = False
BKGD_SIZE = [78,48000]
STIM_SIZE = [78,89999]
TONE_SIZE = [78,59099]
ITD_TONE_SIZE = [78,39690]
if zero_padded:
STIM_SIZE = [78,48000]
if stacked_channel:
STIM_SIZE = [39,48000, 2]
BKGD_SIZE = [39,48000,2]
n_classes_localization=504
n_classes_recognition=780
localization_bin_resolution=5
#Optimization Params
batch_size=16
learning_rate = 1e-3
loss_exponent = 12
loss_scale = 2**loss_exponent
bn_training_state = True
dropout_training_state = True
training_state = True
#Queue parameters
dequeue_min = 8
dequeue_min_main = 10
#num_epochs = None
#Change for network precision,must match input data type
filter_dtype = tf.float32
padding='VALID'
#Downsampling Params
sr=48000
cochleagram_sr=8000
post_rectify=True
#Display interval training statistics
display_step = 1000
if testing:
bn_training_state = False
dropout_training_state = False
training_state= False
num_epochs = 1
#Using these values because 5/40 are the standard training SNRs
if not (SNR_min > 30 or SNR_max > 40):
SNR_max = 35.0
SNR_min = 30.0
print("Testing SNR(dB): Max: "+str(SNR_max)+"Min: "+str(SNR_min))
#mean_subbands = np.load("mean_subband_51400.npy")/51400
#tf_mean_subbands = tf.constant(mean_subbands,dtype=filter_dtype)
def check_speed():
for i in range(30):
sess.run(subbands_batch)
start_time = time.time()
for _ in range(30):
time.sleep(0.5)
print(time.time() - start_time)
print("Len:",sess.run('example_queue/random_shuffle_queue_Size:0'))
with tf.device("/cpu:0"):
###READING QUEUE MACHINERY###
def add_labels(paths):
return [(stim,stim.split('_')[-1].split('a')[0]) for stim in
paths]
def rms(wav):
square = tf.square(wav)
mean_val = tf.reduce_mean(square)
return tf.sqrt(mean_val)
def combine_signal_and_noise_stacked_channel(signals,backgrounds,delay,
sr,cochleagram_sr,post_rectify):
tensor_dict_fg = {}
tensor_dict_bkgd = {}
tensor_dict = {}
snr = tf.random_uniform([],minval=SNR_min,maxval=SNR_max,name="snr_gen")
for path1 in backgrounds:
if path1 == 'train/image':
background = backgrounds['train/image']
else:
tensor_dict_bkgd[path1] = backgrounds[path1]
for path in signals:
if path == 'train/image':
signal = signals['train/image']
sig_len= signal.shape[1] - delay
sig = tf.slice(signal,[0,0,0],[39,sig_len,2])
max_val = tf.reduce_max(sig)
sig_rms = rms(tf.reduce_sum(sig,[0,2]))
sig = tf.div(sig,sig_rms)
#sig = tf.Print(sig, [tf.reduce_max(sig)],message="\nMax SIG:")
sf = tf.pow(tf.constant(10,dtype=tf.float32),
tf.div(snr,tf.constant(20,dtype=tf.float32)))
bak_rms = rms(tf.reduce_sum(background,[0,2]))
#bak_rms = tf.Print(bak_rms, [tf.reduce_max(bak_rms)],message="\nNoise RMS:")
sig_rms = rms(tf.reduce_sum(sig,[0,2]))
scaling_factor = tf.div(tf.div(sig_rms,bak_rms),sf)
#scaling_factor = tf.Print(scaling_factor, [scaling_factor],message="\nScaling Factor:")
noise = tf.scalar_mul(scaling_factor,background)
#noise = tf.Print(noise, [tf.reduce_max(noise)],message="\nMax Noise:")
front = tf.slice(noise,[0,0,0],[39,delay,2])
middle = tf.slice(noise,[0,delay,0],[39,sig_len,2])
end = tf.slice(noise,[0,(delay+int(sig_len)),0],[39,-1,2])
middle_added = tf.add(middle,sig)
new_sig = tf.concat([front,middle_added,end],1)
#new_sig = sig
rescale_factor = tf.div(max_val,tf.reduce_max(new_sig))
#rescale_factor = tf.Print(rescale_factor, [rescale_factor],message="\nRescaling Factor:")
new_sig = tf.scalar_mul(rescale_factor,new_sig)
new_sig_rectified = tf.nn.relu(new_sig)
new_sig_reshaped = tf.reshape(new_sig_rectified,[39,48000,2])
#new_sig_reshaped = tf.reshape(new_sig,[72,30000,1])
#return (signal, background,noise,new_sig_reshaped)
tensor_dict_fg[path] = new_sig_reshaped
else:
tensor_dict_fg[path] = signals[path]
tensor_dict[0] = tensor_dict_fg
tensor_dict[1] = tensor_dict_bkgd
return tensor_dict
def combine_signal_and_noise(signals,backgrounds,delay,
sr,cochleagram_sr,post_rectify):
tensor_dict_fg = {}
tensor_dict_bkgd = {}
tensor_dict = {}
snr = tf.random_uniform([],minval=SNR_min,maxval=SNR_max,name="snr_gen")
for path1 in backgrounds:
if path1 == 'train/image':
background = backgrounds['train/image']
else:
tensor_dict_bkgd[path1] = backgrounds[path1]
for path in signals:
if path == 'train/image':
signal = signals['train/image']
sig_len= signal.shape[1] - delay
sig = tf.slice(signal,[0,0],[78,sig_len])
max_val = tf.reduce_max(sig)
sig_rms = rms(tf.reduce_sum(sig,0))
sig = tf.div(sig,sig_rms)
#sig = tf.Print(sig, [tf.reduce_max(sig)],message="\nMax SIG:")
sf = tf.pow(tf.constant(10,dtype=tf.float32),tf.div(snr,tf.constant(20,dtype=tf.float32)))
bak_rms = rms(tf.reduce_sum(background,0))
#bak_rms = tf.Print(bak_rms, [tf.reduce_max(bak_rms)],message="\nNoise RMS:")
sig_rms = rms(tf.reduce_sum(sig,0))
scaling_factor = tf.div(tf.div(sig_rms,bak_rms),sf)
#scaling_factor = tf.Print(scaling_factor, [scaling_factor],message="\nScaling Factor:")
noise = tf.scalar_mul(scaling_factor,background)
#noise = tf.Print(noise, [tf.reduce_max(noise)],message="\nMax Noise:")
front = tf.slice(noise,[0,0],[78,delay])
middle = tf.slice(noise,[0,delay],[78,sig_len])
end = tf.slice(noise,[0,(delay+int(sig_len))],[78,-1])
middle_added = tf.add(middle,sig)
new_sig = tf.concat([front,middle_added,end],1)
#new_sig = sig
rescale_factor = tf.div(max_val,tf.reduce_max(new_sig))
#rescale_factor = tf.Print(rescale_factor, [rescale_factor],message="\nRescaling Factor:")
new_sig = tf.scalar_mul(rescale_factor,new_sig)
new_sig_rectified = tf.nn.relu(new_sig)
new_sig_reshaped = tf.reshape(new_sig_rectified,[72,48000,1])
#new_sig_reshaped = tf.reshape(new_sig,[72,30000,1])
#return (signal, background,noise,new_sig_reshaped)
tensor_dict_fg[path] = new_sig_reshaped
else:
tensor_dict_fg[path] = signals[path]
tensor_dict[0] = tensor_dict_fg
tensor_dict[1] = tensor_dict_bkgd
return tensor_dict
#Best to read https://www.tensorflow.org/api_guides/python/reading_data#Reading_from_files
###READING QUEUE MACHINERY###
#Best to read https://www.tensorflow.org/api_guides/python/reading_data#Reading_from_files
options = tf.python_io.TFRecordOptions(tf.python_io.TFRecordCompressionType.GZIP)
is_bkgd = False
first = training_paths[0]
for example in tf.python_io.tf_record_iterator(first,options=options):
result = tf.train.Example.FromString(example)
break
jsonMessage = MessageToJson(tf.train.Example.FromString(example))
jsdict = json.loads(jsonMessage)
feature = parse_nested_dictionary(jsdict,is_bkgd)
dataset = build_tfrecords_iterator(num_epochs, train_path_pattern, is_bkgd, feature, narrowband_noise, manually_added, STIM_SIZE, localization_bin_resolution,stacked_channel)
###READING QUEUE MACHINERY###
# Create a list of filenames and pass it to a queue
bkgd_filename_queue = tf.train.string_input_producer(bkgd_training_paths,
shuffle=True,
capacity=len(bkgd_training_paths))
# Define a reader and read the next record
options = tf.python_io.TFRecordOptions(tf.python_io.TFRecordCompressionType.GZIP)
bkgd_reader = tf.TFRecordReader(options=options)
_, bkgd_serialized_example = bkgd_reader.read(bkgd_filename_queue)
is_bkgd = True
bkgd_first = bkgd_training_paths[0]
for bkgd_example in tf.python_io.tf_record_iterator(bkgd_first,options=options):
bkgd_result = tf.train.Example.FromString(bkgd_example)
break
bkgd_jsonMessage = MessageToJson(tf.train.Example.FromString(bkgd_example))
bkgd_jsdict = json.loads(bkgd_jsonMessage)
bkgd_feature = parse_nested_dictionary(bkgd_jsdict,is_bkgd)
dataset_bkgd = build_tfrecords_iterator(num_epochs, bkgd_train_path_pattern, is_bkgd, bkgd_feature, narrowband_noise, manually_added, BKGD_SIZE, localization_bin_resolution, stacked_channel)
dataset_iter = dataset_bkgd.make_initializable_iterator()
dataset_dict = dataset_iter.get_next()
source_cochleagram = dataset_dict['train/image']
mean_cochleagram, mean_cochleagram_update = record_tensor_mean(source_cochleagram)
#new_dataset = tf.data.Dataset.zip((dataset, dataset_bkgd))
##SNR = tf.random_uniform([],minval=SNR_min,maxval=SNR_max,name="snr_gen")
#
#
#if stacked_channel:
# new_dataset = new_dataset.map(lambda x,y: combine_signal_and_noise_stacked_channel(x,y,0,48000,8000,post_rectify=True))
#else:
# new_dataset = new_dataset.map(lambda x,y: combine_signal_and_noise(x,y,0,48000,8000,post_rectify=True))
#batch_sizes = tf.constant(16,dtype=tf.int64)
#new_dataset = new_dataset.shuffle(buffer_size=200).batch(batch_size=batch_sizes,drop_remainder=True)
##combined_iter = new_dataset.make_one_shot_iterator()
#combined_iter = new_dataset.make_initializable_iterator()
#combined_iter_dict = collections.OrderedDict()
#combined_iter_dict = combined_iter.get_next()
#if background_textures:
# bkgd_metadata = [combined_iter_dict[1]['train/azim'],combined_iter_dict[1]['train/elev']]
###END READING QUEUE MACHINERY###
def make_downsample_filt_tensor(SR=16000, ENV_SR=200, WINDOW_SIZE=1001, beta=5.0, pycoch_downsamp=False):
"""
Make the sinc filter that will be used to downsample the cochleagram
Parameters
----------
SR : int
raw sampling rate of the audio signal
ENV_SR : int
end sampling rate of the envelopes
WINDOW_SIZE : int
the size of the downsampling window (should be large enough to go to zero on the edges).
beta : float
kaiser window shape parameter
pycoch_downsamp : Boolean
if true, uses a slightly different downsampling function
Returns
-------
downsample_filt_tensor : tensorflow tensor, tf.float32
a tensor of shape [0 WINDOW_SIZE 0 0] the sinc windows with a kaiser lowpass filter that is applied while downsampling the cochleagram
"""
DOWNSAMPLE = SR/ENV_SR
if not pycoch_downsamp:
downsample_filter_times = np.arange(-WINDOW_SIZE/2,int(WINDOW_SIZE/2))
downsample_filter_response_orig = np.sinc(downsample_filter_times/DOWNSAMPLE)/DOWNSAMPLE
downsample_filter_window = signallib.kaiser(WINDOW_SIZE, beta)
downsample_filter_response = downsample_filter_window * downsample_filter_response_orig
else:
max_rate = DOWNSAMPLE
f_c = 1. / max_rate # cutoff of FIR filter (rel. to Nyquist)
half_len = 10 * max_rate # reasonable cutoff for our sinc-like function
if max_rate!=1:
downsample_filter_response = signallib.firwin(2 * half_len + 1, f_c, window=('kaiser', beta))
else: # just in case we aren't downsampling -- I think this should work?
downsample_filter_response = zeros(2 * half_len + 1)
downsample_filter_response[half_len + 1] = 1
# Zero-pad our filter to put the output samples at the center
# n_pre_pad = int((DOWNSAMPLE - half_len % DOWNSAMPLE))
# n_post_pad = 0
# n_pre_remove = (half_len + n_pre_pad) // DOWNSAMPLE
# We should rarely need to do this given our filter lengths...
# while _output_len(len(h) + n_pre_pad + n_post_pad, x.shape[axis],
# up, down) < n_out + n_pre_remove:
# n_post_pad += 1
# downsample_filter_response = np.concatenate((np.zeros(n_pre_pad), downsample_filter_response, np.zeros(n_post_pad)))
downsample_filt_tensor = tf.constant(downsample_filter_response, tf.float32)
downsample_filt_tensor = tf.expand_dims(downsample_filt_tensor, 0)
downsample_filt_tensor = tf.expand_dims(downsample_filt_tensor, 2)
downsample_filt_tensor = tf.expand_dims(downsample_filt_tensor, 3)
return downsample_filt_tensor
def downsample(signal,current_rate,new_rate,window_size,
beta,post_rectify=True):
downsample = current_rate/new_rate
message = ("The current downsample rate {} is "
"not an integer. Only integer ratios "
"between current and new sampling rates "
"are supported".format(downsample))
assert (current_rate%new_rate == 0), message
message = ("New rate must be less than old rate for this "
"implementation to work!")
assert (new_rate < current_rate), message
# make the downsample tensor
downsample_filter_tensor = make_downsample_filt_tensor(current_rate, new_rate,
window_size, pycoch_downsamp=False)
downsampled_signal = tf.nn.conv2d(signal, downsample_filter_tensor,
strides=[1, 1, downsample, 1], padding='SAME',
name='conv2d_cochleagram_raw')
if post_rectify:
downsampled_signal = tf.nn.relu(downsampled_signal)
return downsampled_signal
init_op = tf.group(tf.global_variables_initializer(),
tf.local_variables_initializer())
# Launch the graph
#with tf.Session() as sess:
#run_metadata = tf.RunMetadata()
config = tf.ConfigProto(allow_soft_placement=True,
inter_op_parallelism_threads=0, intra_op_parallelism_threads=0)
sess = tf.Session(config=config)
sess.run(init_op)
# ##This code allows for tracing ops acorss GPUs, you often have to run it twice
# ##to get sensible traces
#
# #sess.run(optimizer,options=tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE),
# # run_metadata=run_metadata)
# #from tensorflow.python.client import timeline
# #trace = timeline.Timeline(step_stats=run_metadata.step_stats)
# #trace_file.close()
##Used to write out stimuli examples
#
# low_lim=30
# hi_lim=20000
# sr=48000
# sample_factor=1
# scale = 0.1
# i=0
# pad_factor = None
# #invert subbands
# n = int(np.floor(erb.freq2erb(hi_lim) - erb.freq2erb(low_lim)) - 1)
# sess.run(combined_iter.initializer)
# subbands_test,az_label,elev_label = sess.run([combined_iter_dict[0]['train/image'],combined_iter_dict[0]['train/azim'],combined_iter_dict[0]['train/elev']])
#
# filts, hz_cutoffs, freqs=erb.make_erb_cos_filters_nx(subbands_test.shape[2],sr, n,low_lim,hi_lim, sample_factor,pad_factor=pad_factor,full_filter=True)
#
# filts_no_edges = filts[1:-1]
# for batch_iter in range(3):
# for stim_iter in range(16):
# subbands_l=subbands_test[stim_iter,:,:,0]
# subbands_r=subbands_test[stim_iter,:,:,1]
# wavs = np.zeros([subbands_test.shape[2],2])
# wavs[:,0] = sb.collapse_subbands(subbands_l,filts_no_edges).astype(np.float32)
# wavs[:,1] = sb.collapse_subbands(subbands_r,filts_no_edges).astype(np.float32)
# max_val = wavs.max()
# rescaled_wav = wavs/max_val*scale
# name = "stim_{}_{}az_{}elev.wav".format(stim_iter+batch_iter*16,int(az_label[stim_iter])*5,int(elev_label[stim_iter])*5)
# name_with_path = newpath+'/'+name
# write(name_with_path,sr,rescaled_wav)
# pdb.set_trace()
# subbands_test,az_label,elev_label = sess.run([combined_iter_dict[0]['train/image'],combined_iter_dict[0]['train/azim'],combined_iter_dict[0]['train/elev']])
sess.run(dataset_iter.initializer)
stimuli_name = train_path_pattern.split("/")[-2]
step = 0
try:
while True:
sess.run(mean_cochleagram_update)
step+=1
if step % display_step ==0:
print("Iter "+str(step))
except tf.errors.ResourceExhaustedError:
print("Out of memory error")
error= "Out of memory error"
with open(newpath+'/test_error_{}.json'.format(stim),'w') as f:
json.dump(arch_ID,f)
json.dump(error,f)
except tf.errors.OutOfRangeError:
print("Out of Range Error. Calculation Finished")
finally:
np_dataset_mean = sess.run(mean_cochleagram)
np.save(newpath+'/dataset_mean_{}.npy'.format(stimuli_name),np_dataset_mean)
#acc= sess.run(test_acc)
#print("Test Accuracy= "+"{:.5f}".format(acc))
#customs = sess.run(custom_test_acc)
#correct_pred = sess.run(custom_correct_pred)
#with open('custom_out2.json', 'w') as f:
# json.dump([test_data_img,correct_pred.tolist()],f)
#print("ACC for special cases:")
#print(customs)
#first_layer = sess.run(weights['wc1'])
#activation1, activation2 = sess.run([conv1,conv3])
#with open('activations.json','w') as f:
# json.dump([activation1.tolist(),activation2.tolist()],f)
#tf.get_variable_scope().reuse_variables()
#first_layer = [var for var in tf.global_variables() if var.op.name=="wc1"][0]
#second_layer = [var for var in tf.global_variables() if var.op.name=="wc2"][0]
#weights_image = put_kernels_on_grid(first_layer)
#weights_image2 = put_kernels_on_grid(second_layer)
#np_weights1, np_weights2 = sess.run([weights_image,weights_image2])
#with open('conv1weights.json','w') as f:
# json.dump([np_weights1.tolist(),np_weights2.tolist()],f)
#
sess.close()
tf.reset_default_graph()
<file_sep>from tf_record_CNN_spherical_gradcheckpoint_valid_pad import tf_record_CNN_spherical
import tensorflow as tf
import os
import glob
import numpy as np
from layer_generator import generate
import sys
tone_version=False
itd_tones=False
ild_tones=False
#Sends Net builder signals to create a branched network, calculates both
#localization and recognition loss
branched=False
#Sets stim size to 30000 in length
zero_padded=True
#model_version=85000
num_epochs=None
#paths to stimuli and background subbands
#bkgd_train_path_pattern = '/om/scratch/Sat/francl/bkgdRecords_textures_sparse_sampled_same_texture_expanded_set_44.1kHz_stackedCH_upsampled/train*.tfrecords'
#train_path_pattern ='/nobackup/scratch/Sat/francl/stimRecords_convolved_oldHRIRdist140_no_hanning_stackedCH_upsampled/testset/train*.tfrecords'
str2bool = lambda x: True if x == "True" else False
arch_ID=int(sys.argv[1])
init = int(sys.argv[2])
regularizer=str(sys.argv[3])
exec("regularizer = "+ regularizer)
bkgd_train_path_pattern = str(sys.argv[4])
train_path_pattern = str(sys.argv[5])
model_version=[]
model_version = list(map(int,list((str(sys.argv[6]).split(',')))))
model_path = str(sys.argv[7])
SNR_max = int(sys.argv[8])
SNR_min = int(sys.argv[9])
manually_added = str2bool(sys.argv[10])
freq_label = str2bool(sys.argv[11])
sam_tones = str2bool(sys.argv[12])
transposed_tones = str2bool(sys.argv[13])
precedence_effect = str2bool(sys.argv[14])
narrowband_noise = str2bool(sys.argv[15])
stacked_channel = str2bool(sys.argv[16])
all_positions_bkgd = str2bool(sys.argv[17])
background_textures = str2bool(sys.argv[18])
testing = str2bool(sys.argv[19])
#newpath='/om2/user/francl/localization_runs/old_hrirs_no_hanning_window_valid_padding/arch_number_'+str(arch_ID)+'_init_'+str(init)
if regularizer is None:
newpath= model_path+'/arch_number_'+str(arch_ID)+'_init_'+str(init)
else:
newpath= model_path+'/arch_number_'+str(arch_ID)+'_init_'+str(init)+'_reg'
if not os.path.exists(newpath):
os.mkdir(newpath)
if not os.path.isfile(newpath+'/config_array.npy'):
config_array = generate()
np.save(newpath+'/config_array.npy',config_array)
else:
config_array=np.load(newpath+'/config_array.npy')
files=(glob.glob(newpath+'/*'))
num_files=len(files)
if os.path.isfile(newpath+'/curve_no_resample_w_cutoff_vary_loc.json'):
testing = True
test=tf_record_CNN_spherical(tone_version,itd_tones,ild_tones,manually_added,freq_label,sam_tones,transposed_tones,precedence_effect,narrowband_noise,all_positions_bkgd,background_textures,testing,branched,zero_padded,stacked_channel,model_version,num_epochs,train_path_pattern,bkgd_train_path_pattern,arch_ID,config_array,files,num_files,newpath,regularizer,SNR_max,SNR_min)
<file_sep>from tf_record_CNN_spherical_gradcheckpoint_valid_pad import tf_record_CNN_spherical
import tensorflow as tf
import os
import glob
import numpy as np
from layer_generator import generate
import sys
tone_version=False
itd_tones=False
ild_tones=False
#divide azim/elev label by 10 if false
manually_added=False
all_positions_bkgd=True
background_textures = False
testing = False
#Sends Net builder signals to create a branched network, calculates both
#localization and recognition loss
branched = False
#Sets stim size to 30000 in length
zero_padded = True
#Parses record expecting frequency label if True
freq_label = False
#Parses SAM tones and associated labels
sam_tones = False
#Parses transposed tones and associated labels
transposed_tones = False
#Parses spatialized clickas and associated labels for precedence effect
precedence_effect = False
#Parses narrowband noise for pyschoacoustic experiments
narrowband_noise = False
#Parses record expecting [N,M,2] format instead of interleaved [2N,M] format if True
stacked_channel = True
#model_version=85000
num_epochs=None
#paths to stimuli and background subbands
#bkgd_train_path_pattern = '/om/scratch/Sat/francl/bkgdRecords_textures_sparse_sampled_same_texture_expanded_set_44.1kHz_stackedCH_upsampled/train*.tfrecords'
#train_path_pattern ='/nobackup/scratch/Sat/francl/stimRecords_convolved_oldHRIRdist140_no_hanning_stackedCH_upsampled/testset/train*.tfrecords'
#arch_ID=int(sys.argv[1])
#init = int(sys.argv[2])
#regularizer=str(sys.argv[3])
#exec("regularizer = "+ regularizer)
#bkgd_train_path_pattern = str(sys.argv[4])
#train_path_pattern = str(sys.argv[5])
#model_version=[]
#model_version = list(map(int,list((str(sys.argv[6]).split(',')))))
arch_ID = 193
init=0
#regularizer="tf.contrib.layers.l1_regularizer(scale=0.001)"
regularizer=None
bkgd_train_path_pattern='/om/scratch/Sat/francl/bkgdRecords_textures_sparse_sampled_same_texture_expanded_set_44.1kHz_stackedCH_upsampled/train*.tfrecords'
train_path_pattern='/nobackup/scratch/Sat/francl/stimRecords_convolved_oldHRIRdist140_no_hanning_stackedCH_upsampled/train*.tfrecords'
model_version=[85000]
#arch_ID=0
#init = 0
#regularizer=None
#bkgd_train_path_pattern = str(sys.argv[4])
#bkgd_train_path_pattern='/nobackup/scratch/Wed/francl/bkgdRecords_pinknoise_oldHRIRs140_10mshann/train0.tfrecords'
#train_path_pattern='/nobackup/scratch/Wed/francl/samToneRecords_fulllengthTime_jitteredCarrier_jitterSlice/train0.tfrecords'
#train_path_pattern = str(sys.argv[5])
#model_version=[85000]
#model_version = list(map(int,list((str(sys.argv[6]).split(',')))))
#init=0
#regularizer=None
#bkgd_train_path_pattern='/nobackup/scratch/Wed/francl/bkgdRecords_pinknoise_oldHRIRs140_10mshann/train*.tfrecords'
#train_path_pattern='/nobackup/scratch/Wed/francl/samToneRecords_fulllengthTime_jitteredCarrier_jitterSlice/train*.tfrecords'
#model_version=85000
#newpath='/om2/user/francl/localization_runs/old_hrirs_no_hanning_window_valid_padding/arch_number_'+str(arch_ID)+'_init_'+str(init)
if regularizer is None:
newpath='/om2/user/gahlm/dataset_pipeline_test/arch_number_'+str(arch_ID)+'_init_'+str(init)
else:
newpath='/om2/user/gahlm/dataset_pipeline_test/arch_number_'+str(arch_ID)+'_init_'+str(init)+'_reg'
if not os.path.exists(newpath):
os.mkdir(newpath)
if not os.path.isfile(newpath+'/config_array.npy'):
config_array = generate()
np.save(newpath+'/config_array.npy',config_array)
else:
config_array=np.load(newpath+'/config_array.npy')
files=(glob.glob(newpath+'/*'))
num_files=len(files)
if os.path.isfile(newpath+'/curve_no_resample_w_cutoff_vary_loc.json'):
testing = True
test=tf_record_CNN_spherical(tone_version,itd_tones,ild_tones,manually_added,freq_label,sam_tones,transposed_tones,precedence_effect,narrowband_noise,all_positions_bkgd,background_textures,testing,branched,zero_padded,stacked_channel,model_version,num_epochs,train_path_pattern,bkgd_train_path_pattern,arch_ID,config_array,files,num_files,newpath,regularizer)
<file_sep>import tensorflow as tf
def record_tensor_mean(tensor_in,update_collection="metrics_update",
metrics_collection="metrics_out"):
mean_value,mean_update = tf.metrics.mean_tensor(tensor_in,
metrics_collections=metrics_collection,
updates_collections=update_collection)
return mean_value,mean_update
<file_sep>from tf_record_CNN_spherical_gradcheckpoint_valid_pad import tf_record_CNN_spherical
import tensorflow as tf
import os
import glob
import numpy as np
from layer_generator_new import generate
import sys
tone_version=False
itd_tones=False
ild_tones=False
sam_tones=False
transposed_tones=False
precedence_effect=False
narrowband_noise=False
manually_added=False
freq_label=False
all_positions_bkgd=False
background_textures = True
testing=False
branched=False
zero_padded=True
stacked_channel = True
model_version=20000
num_epochs=None
#paths to stimuli and background subbands
#bkgd_train_path_pattern = '/om/scratch/Wed/francl/bkgdRecords_textures_sparse_sampled_same_texture_expanded_set_44.1kHz_stackedCH_upsampled/train*.tfrecords'
#train_path_pattern = '/nobackup/scratch/Wed/francl/stimRecords_convolved_oldHRIRdist140_no_hanning_stackedCH_upsampled/train*.tfrecords'
arch_ID=int(sys.argv[1])
init = int(sys.argv[2])
regularizer = str(sys.argv[3])
exec("regularizer = "+ regularizer)
bkgd_train_path_pattern = str(sys.argv[4])
train_path_pattern = str(sys.argv[5])
if regularizer is None:
newpath='/om2/user/gahlm/dataset_pipeline_test/arch_number_'+str(arch_ID)+'_init_'+str(init)
else:
newpath='/om2/user/gahlm/dataset_pipeline_test/arch_number_'+str(arch_ID)+'_init_'+str(init)+'_reg'
if not os.path.exists(newpath):
os.mkdir(newpath)
if not os.path.isfile(newpath+'/config_array.npy'):
print("GENERATING NEW CONFIG!")
config_array = generate()
np.save(newpath+'/config_array.npy',config_array)
else:
config_array=np.load(newpath+'/config_array.npy')
files=(glob.glob(newpath+'/*'))
num_files=len(files)
if os.path.isfile(newpath+'/curve_no_resample_w_cutoff_vary_loc.json'):
testing = True
if not testing:
train=tf_record_CNN_spherical(tone_version,itd_tones,ild_tones,manually_added,freq_label,sam_tones,transposed_tones,precedence_effect,narrowband_noise,all_positions_bkgd,background_textures,testing,branched,zero_padded,stacked_channel,model_version,num_epochs,train_path_pattern,bkgd_train_path_pattern,arch_ID,config_array,files,num_files,newpath,regularizer)
<file_sep>def generate():
import random
import numpy as np
import numpy.random
min_layers=3
max_layers=8
num_layers=numpy.random.randint(min_layers,max_layers) #number of total layers
freq_stride=1
time_stride=1
freq_pool=1
time_pool=4
act=32
gpu_num=0 #tracks gpu number
dim=512
count=0 #tracks total number of convolutions
#create lists
config_array=[]
gpu_0=[]
gpu=["/gpu:{}".format(gpu_num)]
gpu_num+=1
gpu_0.append(gpu)
possible_pooling_kernels_length = [[2,4,8],[2,4],[2,4],[1,2],[1,2],[1,1,2],[1,1,1,2],[1,1,1,2]]
possible_pooling_kernels_height = [[1,2],[1,2],[1,2],[1,2],[1,1,2],[1,1,2],[1,1,1,2],[1,1,1,2]]
possible_conv_kernels_height = [[1,2,3],[1,2,3],[1,2,3],[1,2,3],[1,2,3],[1,2,3],[1,2,3],[1,2,3]]
possible_conv_kernels_length = [[4,8,16,32,64],[4,8,16,32],[2,4,8,16],[2,4,8],[2,4,8],[2,3,4],[2,3,4],[2,3,4]]
def sample_conv_kernel(layer_idx):
rand_choose_conv_kernel_length = np.random.randint(0,len(possible_conv_kernels_length[layer_idx]))
rand_choose_conv_kernel_height = np.random.randint(0,len(possible_conv_kernels_height[layer_idx]))
return [possible_conv_kernels_height[layer_idx][rand_choose_conv_kernel_height],
possible_conv_kernels_length[layer_idx][rand_choose_conv_kernel_length]]
def sample_conv_strides(layer_idx):
rand_choose_conv_stride_length = np.random.randint(0,len(possible_conv_strides_length[layer_idx]))
rand_choose_conv_stride_height = np.random.randint(0,len(possible_conv_strides_height[layer_idx]))
return [possible_conv_strides_height[layer_idx][rand_choose_conv_stride_height],
possible_conv_strides_length[layer_idx][rand_choose_conv_stride_length]]
def sample_pool_kernel(layer_idx):
# set the pooling for each layer
rand_choose_poolkernel_length = np.random.randint(0,len(possible_pooling_kernels_length[layer_idx]))
rand_choose_poolkernel_height = np.random.randint(0,len(possible_pooling_kernels_height[layer_idx]))
return [possible_pooling_kernels_height[layer_idx][rand_choose_poolkernel_height],
possible_pooling_kernels_length[layer_idx][rand_choose_poolkernel_length]]
#Generate layers on second gpu. No more than 3 convolutions
#allowed before pooling. First layer has to be a convolutional.
#Next layer has a 50% chance of being another convolution.
#Convolution includes convolution, bias, batch norm, and relu.
conv_count=0 #tracks number of convolutions per layer
layer_count= 0
while layer_count <= num_layers:
if conv_count<3:
a=random.random()
if a >= .5 or conv_count==0:
stride=[freq_stride,time_stride]
kernal=sample_conv_kernel(layer_count)
kernal.append(act)
conv=['conv',kernal,stride]
bn=['bn']
relu=['relu']
gpu_0.append(conv)
gpu_0.append(relu)
gpu_0.append(bn)
conv_count+=1
else:
pool_kernel = sample_pool_kernel(layer_count)
pool = ['pool',pool_kernel]
gpu_0.insert(-2,pool)
layer_count+=1
if act < 512 and random.random() > .2:
act*=2
conv_count=0
else:
pool_kernel = sample_pool_kernel(layer_count)
pool = ['pool',pool_kernel]
gpu_0.insert(-2,pool)
num_layers-=1
conv_count=0
if act < 512 and random.random() > .2:
act*=2
#Make last layer before fully connected layers a pool
#if gpu_1[-1]==bn:
# gpu_1.append(pool)
#Generate fully connected layers and add to second gpu
num_fc=1
while num_fc != 0 :
fc=['fc',dim]
fc_bn=['fc_bn']
fc_relu=['fc_relu']
gpu_0.append(fc)
gpu_0.append(fc_relu)
gpu_0.append(fc_bn)
num_fc-=1
#Add dropout and out to second gpu
dropout=['dropout']
out=['out']
gpu_0.append(dropout)
gpu_0.append(out)
#Append lists gpu_0 and gpu_1 to config.array
config_array.append(gpu_0)
return config_array
def uniformly_select_conv_layers(range_num_conv_layers, possible_conv_layer_length, possible_conv_layer_height, possible_conv_layer_nums, possible_pooling_strides_length, possible_pooling_strides_height, possible_conv_strides_length, possible_conv_strides_height):
"""
uniformly selects from the options within the provided inputs
Args
----
range_num_conv_layers (list) : possible depths of the network (number of conv layers)
possible_conv_layer_length (list) : for each convolutional layer, the width of the kernels
possible_conv_layer_height (list) : for each convolutional layer, the height of the kernels
possible_conv_layer_nums (list) : for each convolutional layer, the number of kernels to include
possible_pooling_strides_length (list) : for each pooling layer, the amount of pooling (width)
possible_pooling_strides_height (list) : for each pooling layer, the amount of pooling (height)
possible_conv_strides_length (list) : for each conv layer, the possible strides (width)
possible_conv_strides_height (list) : for each conv layer, the possible stides (height)
"""
num_conv_kernels = []
convstrides = []
poolstrides = []
conv_kernels_sizes = []
num_conv_layers = np.random.randint(range_num_conv_layers[0], range_num_conv_layers[1]+1)
for layer_idx in np.arange(num_conv_layers):
# choose number of conv kernels in each layer
rand_choose_num_conv = np.random.randint(0,len(possible_conv_layer_nums[layer_idx]))
num_conv_kernels.append(possible_conv_layer_nums[layer_idx][rand_choose_num_conv])
# set the stride for each layer
rand_choose_conv_stride_length = np.random.randint(0,len(possible_conv_strides_length[layer_idx]))
rand_choose_conv_stride_height = np.random.randint(0,len(possible_conv_strides_height[layer_idx]))
convstrides.append([possible_conv_strides_length[layer_idx][rand_choose_conv_stride_length],
possible_conv_strides_height[layer_idx][rand_choose_conv_stride_height]])
# set the pooling for each layer
rand_choose_poolstrides_length = np.random.randint(0,len(possible_pooling_strides_length[layer_idx]))
rand_choose_poolstrides_height = np.random.randint(0,len(possible_pooling_strides_height[layer_idx]))
poolstrides.append([possible_pooling_strides_height[layer_idx][rand_choose_poolstrides_height],possible_pooling_strides_length[layer_idx][rand_choose_poolstrides_length]])
# set the filter length and height for each layer
rand_choose_conv_length = np.random.randint(0,len(possible_conv_layer_length[layer_idx]))
rand_choose_conv_height = np.random.randint(0,len(possible_conv_layer_height[layer_idx]))
conv_kernels_sizes.append([possible_conv_layer_height[layer_idx][rand_choose_conv_height],possible_conv_layer_length[layer_idx][rand_choose_conv_length]])
return num_conv_kernels, convstrides, poolstrides, conv_kernels_sizes
<file_sep># BinauralLocalizationCNN
Code to create networks that localize sounds sources in 3D environments
Networks weights can be downloaded at: https://www.dropbox.com/sh/af6vaotxt41i7pe/AACfTzMxMLfv-Edmn33S4gTpa?dl=0
Main training/testing python script is `call_model_training_valid_pad_francl.py`.
Before running, please change the model save folder to point to your directory with the model architecture config file.
<file_sep>def generate():
import random
import numpy as np
import numpy.random
min_layers=5
max_layers=7
num_layers=numpy.random.randint(min_layers,max_layers) #number of total layers
freq_stride=2
time_stride=1
freq_pool=1
time_pool=4
act=32
gpu_num=0 #tracks gpu number
dim=512
count=0 #tracks total number of convolutions
#create lists
config_array=[]
gpu_0=[]
gpu_1=[]
#lists of variable values for first layer
k_width=numpy.random.randint((2**(5-count))/2,((2**(5-count))+4*num_layers)/2)
k_width*=2
k_heighth=numpy.random.randint(1,2)
k_heighth*=2
# freq_stride=numpy.random.randint(1,2)
stride=[freq_stride,time_stride]
kernal=[k_heighth,k_width,act]
pool_size=[freq_pool,time_pool]
gpu=["/gpu:{}".format(gpu_num)]
gpu_num+=1
#generate layers and add to first gpu
conv=['conv',kernal,stride]
bn=['bn']
relu=['relu']
pool=['pool',pool_size]
num_layers-=1
count+=1
gpu_0.append(gpu)
gpu_0.append(conv)
gpu_0.append(pool)
gpu_0.append(relu)
gpu_0.append(bn)
#Make sure there is on 2x1 stride on the first gpu
# if freq_stride == 1:
# freq_stride = 2
# else:
# freq_stride = 1
#Generate layer on first gpu. Before the first pool no more than 2
#convolutions allowed. First layer has to be a convolutional layer.
#Next layer has a 50% chance of being another convolution.
#Convolution includes convolution, bias, batch norm, and relu.
# act*=2
# i=0 #tracks number of convolutions per layer
# while num_layers != 0:
# if i<2:
# a=random.random()
# if a >= .5:
# k_heighth=numpy.random.randint((2**count)/2,((2**count)+2*count)/2)
# k_heighth*=2
# k_width=numpy.random.randint((2**(5-count))/2,((2**(5-count))+2*num_layers)/2)
# k_width*=2
# stride=[freq_stride,time_stride]
# kernal=[k_heighth,k_width,act]
# conv=['conv',kernal,stride]
# bn=['bn']
# relu=['relu']
# gpu_0.append(conv)
# gpu_0.append(bn)
# gpu_0.append(relu)
# count+=1
# i+=1
# else:
# if i==0:
# k_heighth=numpy.random.randint((2**count)/2,((2**count)+2*count)/2)
# k_heighth*=2
# k_width=numpy.random.randint((2**(5-count))/2,((2**(5-count))+2*num_layers)/2)
# k_width*=2
# stride=[freq_stride,time_stride]
# kernal=[k_heighth,k_width,act]
# conv=['conv',kernal,stride]
# bn=['bn']
# relu=['relu']
# gpu_0.append(conv)
# gpu_0.append(bn)
# gpu_0.append(relu)
# count+=1
# i+=1
# else:
# gpu_0.append(pool)
# num_layers-=1
# act*=2
# break
# else:
# gpu_0.append(pool)
# num_layers-=1
# act*=2
# break
gpu=["/gpu:{}".format(gpu_num)]
gpu_1.append(gpu)
#Second layer has one convolution, batch norm, and relu before pooling.
#Kernal sizes variable
freq_stride=1
act*=2
k_heighth=numpy.random.randint((2**count)/2,((2**count)+2*count)/2)
k_heighth*=2
k_width=numpy.random.randint((2**(5-count))/2,((2**(5-count))+2*num_layers)/2)
k_width*=2
stride=[freq_stride,time_stride]
kernal=[k_heighth,k_width,act]
conv=['conv',kernal,stride]
bn=['bn']
relu=['relu']
count+=1
gpu_0.append(conv)
gpu_0.append(pool)
gpu_0.append(relu)
gpu_0.append(bn)
num_layers-=1
act*=2
#Generate layers on second gpu. No more than 3 convolutions
#allowed before pooling. First layer has to be a convolutional.
#Next layer has a 50% chance of being another convolution.
#Convolution includes convolution, bias, batch norm, and relu.
i=0 #tracks number of convolutions per layer
while num_layers != 0:
if i<3:
a=random.random()
if a >= .5:
k_width=numpy.random.randint(3,5)
k_width*=2
k_heighth=k_width
stride=[freq_stride,time_stride]
kernal=[k_heighth,k_width,act]
conv=['conv',kernal,stride]
bn=['bn']
relu=['relu']
gpu_1.append(conv)
gpu_1.append(relu)
gpu_1.append(bn)
count+=1
i+=1
else:
if i==0:
k_width=numpy.random.randint(3,5)
k_width*=2
k_heighth=k_width
stride=[freq_stride,time_stride]
kernal=[k_heighth,k_width,act]
conv=['conv',kernal,stride]
bn=['bn']
relu=['relu']
gpu_1.append(conv)
gpu_1.append(relu)
gpu_1.append(bn)
count+=1
i+=1
else:
gpu_1.insert(-2,pool)
num_layers-=1
if act < 512:
act*=2
i=0
else:
gpu_1.insert(-2,pool)
num_layers-=1
if act < 512:
act*=2
i=0
#Make last layer before fully connected layers a pool
#if gpu_1[-1]==bn:
# gpu_1.append(pool)
#Generate fully connected layers and add to second gpu
num_fc=1
while num_fc != 0 :
fc=['fc',dim]
fc_bn=['fc_bn']
fc_relu=['fc_relu']
gpu_1.append(fc)
gpu_1.append(fc_relu)
gpu_1.append(fc_bn)
num_fc-=1
#Add dropout and out to second gpu
dropout=['dropout']
out=['out']
gpu_1.append(dropout)
gpu_1.append(out)
#Append lists gpu_0 and gpu_1 to config.array
config_array.append(gpu_0)
config_array.append(gpu_1)
return config_array
<file_sep>from tf_record_CNN_spherical_gradcheckpoint_branched import tf_record_CNN_spherical
import os
import glob
import numpy as np
from layer_generator import generate
import sys
tone_version=False
itd_tones=False
ild_tones=False
manually_added=False
freq_label=False
all_positions_bkgd=True
testing=False
branched=True
zero_padded=True
stacked_channel = False
model_version=20000
num_epochs=None
#paths to stimuli and background subbands
bkgd_train_path_pattern = '/om/scratch/Tue/francl/bkgdRecords_vary_env/train*.tfrecords'
train_path_pattern = '/nobackup/scratch/Tue/francl/speechRecords_specfilt_2ord2octvfilt_upsampled_convolvedHRIRdist100/train*.tfrecords'
#arch_ID=int(sys.argv[1])
arch_ID=38
newpath='/om2/user/francl/branchpoint_search/branchpoint_1/arch_number_'+str(arch_ID)
if not os.path.exists(newpath):
os.mkdir(newpath)
if not os.path.isfile(newpath+'/config_array.npy'):
print("GENERATING NEW CONFIG!")
config_array = generate()
np.save(newpath+'/config_array.npy',config_array)
else:
config_array=np.load(newpath+'/config_array.npy')
files=(glob.glob(newpath+'/*'))
num_files=len(files)
if os.path.isfile(newpath+'/curve_no_resample_w_cutoff_vary_loc.json'):
testing = True
if not testing:
train=tf_record_CNN_spherical(tone_version,itd_tones,ild_tones,manually_added,freq_label,all_positions_bkgd,testing,branched,zero_padded,stacked_channel,model_version,num_epochs,train_path_pattern,bkgd_train_path_pattern,arch_ID,config_array,files,num_files,newpath)
<file_sep>from tf_record_CNN_spherical_gradcheckpoint_valid_pad import tf_record_CNN_spherical
import os
import glob
import numpy as np
from layer_generator_new import generate
import sys
tone_version=False
itd_tones=False
ild_tones=False
sam_tones=False
transposed_tones=False
precedence_effect=False
narrowband_noise=False
manually_added=False
freq_label=False
all_positions_bkgd=False
background_textures = True
testing=False
branched=False
zero_padded=True
stacked_channel = True
model_version=20000
num_epochs=None
#paths to stimuli and background subbands
bkgd_train_path_pattern = '/om/scratch/Wed/francl/bkgdRecords_textures_sparse_sampled_same_texture_expanded_set_44.1kHz_stackedCH_upsampled/train*.tfrecords'
train_path_pattern = '/nobackup/scratch/Wed/francl/stimRecords_convolved_oldHRIRdist140_no_hanning_stackedCH_upsampled/train*.tfrecords'
#arch_ID=int(sys.argv[1])
arch_ID=103
init_num = 0
newpath='/om2/user/francl/localization_runs/arch_search_stackedCH_upsampled_sparse_textures_oldHRIR140/arch_number_'+str(arch_ID)
if not os.path.exists(newpath):
os.mkdir(newpath)
if not os.path.isfile(newpath+'/config_array.npy'):
print("GENERATING NEW CONFIG!")
config_array = generate()
np.save(newpath+'/config_array.npy',config_array)
else:
config_array=np.load(newpath+'/config_array.npy')
files=(glob.glob(newpath+'/*'))
num_files=len(files)
if os.path.isfile(newpath+'/curve_no_resample_w_cutoff_vary_loc.json'):
testing = True
if not testing:
train=tf_record_CNN_spherical(tone_version,itd_tones,ild_tones,manually_added,
freq_label,sam_tones,transposed_tones,precedence_effect,
narrowband_noise,all_positions_bkgd,background_textures,
testing,branched,zero_padded,stacked_channel,model_version,
num_epochs,train_path_pattern,bkgd_train_path_pattern,
arch_ID,config_array,files,num_files,newpath)
<file_sep>#!/cm/shared/openmind/anaconda/2.5.0/bin/python
import numpy as np
import sys
print(np.load(sys.stdin.read().rstrip()))
<file_sep>from tf_record_CNN_spherical_gradcheckpoint_valid_pad import tf_record_CNN_spherical
import os
import glob
import numpy as np
from layer_generator_new import generate
import sys
tone_version=False
itd_tones=False
ild_tones=False
sam_tones=True
transposed_tones=False
precedence_effect=False
narrowband_noise=False
manually_added=False
freq_label=True
all_positions_bkgd=False
background_textures = True
testing=True
branched=False
zero_padded=True
stacked_channel = True
model_version=[35000,85000]
num_epochs=None
regularizer = None
#paths to stimuli and background subbands
bkgd_train_path_pattern = '/om/scratch/Wed/francl/bkgdRecords_textures_sparse_sampled_same_texture_expanded_set_44.1kHz_stackedCH_upsampled/train*.tfrecords'
train_path_pattern = '/nobackup/scratch/Wed/francl/samToneRecords_fulllengthTime_jitteredCarrier_jitterSlice_stackedCH_upsampled/train*.tfrecords'
#arch_ID=int(sys.argv[1])
arch_ID=518
init = 0
newpath='/om2/user/francl/localization_runs/arch_search_stackedCH_upsampled_sparse_textures_oldHRIR140/arch_number_'+str(arch_ID)
if not os.path.exists(newpath):
os.mkdir(newpath)
if not os.path.isfile(newpath+'/config_array.npy'):
print("GENERATING NEW CONFIG!")
config_array = generate()
np.save(newpath+'/config_array.npy',config_array)
else:
config_array=np.load(newpath+'/config_array.npy')
files=(glob.glob(newpath+'/*'))
num_files=len(files)
if os.path.isfile(newpath+'/curve_no_resample_w_cutoff_vary_loc.json'):
testing = True
if not testing:
train=tf_record_CNN_spherical(tone_version,itd_tones,ild_tones,manually_added,freq_label,sam_tones,transposed_tones,precedence_effect,narrowband_noise,all_positions_bkgd,background_textures,testing,branched,zero_padded,stacked_channel,model_version,num_epochs,train_path_pattern,bkgd_train_path_pattern,arch_ID,config_array,files,num_files,newpath,regularizer)
<file_sep>from tf_record_CNN_spherical_gradcheckpoint import tf_record_CNN_spherical
import os
import glob
import numpy as np
from layer_generator import generate
import sys
tone_version=False
itd_tones=False
ild_tones=False
#divide azim/elev label by 10 if false
manually_added=True
all_positions_bkgd=True
testing=True
#Sends Net builder signals to create a branched network, calculates both
#localization and recognition loss
branched=False
#Sets stim size to 30000 in length
zero_padded=True
#Parses record expecting frequency label if True
freq_label = True
#Parses record expecting [N,M,2] format instead of interleaved [2N,M] format if True
stacked_channel = False
model_version=70000
num_epochs=None
#paths to stimuli and background subbands
bkgd_train_path_pattern = '/om/scratch/Mon/francl/bkgdRecords_pinknoise_vary_env/train*.tfrecords'
train_path_pattern = '/nobackup/scratch/Mon/francl/noiseRecords_1octv_man-added-ITD_wide_range_no_hanning/train*.tfrecords'
#arch_ID=int(sys.argv[1])
arch_ID=38
newpath='/om2/user/francl/localization_runs/old_hrirs_no_hanning_window/arch_number_'+str(arch_ID)
if not os.path.exists(newpath):
os.mkdir(newpath)
if not os.path.isfile(newpath+'/config_array.npy'):
config_array = generate()
np.save(newpath+'/config_array.npy',config_array)
else:
config_array=np.load(newpath+'/config_array.npy')
files=(glob.glob(newpath+'/*'))
num_files=len(files)
if os.path.isfile(newpath+'/curve_no_resample_w_cutoff_vary_loc.json'):
testing = True
if testing:
test=tf_record_CNN_spherical(tone_version,itd_tones,ild_tones,manually_added,freq_label,all_positions_bkgd,testing,branched,zero_padded,stacked_channel,model_version,num_epochs,train_path_pattern,bkgd_train_path_pattern,arch_ID,config_array,files,num_files,newpath)
<file_sep>from tf_record_CNN_spherical_gradcheckpoint_valid_pad import tf_record_CNN_spherical
import tensorflow as tf
import os
import glob
import numpy as np
from layer_generator import generate
import sys
tone_version=False
itd_tones=False
ild_tones=False
#divide azim/elev label by 10 if false
manually_added=False
all_positions_bkgd=False
background_textures = True
testing=False
#Sends Net builder signals to create a branched network, calculates both
#localization and recognition loss
branched=False
#Sets stim size to 30000 in length
zero_padded=True
#Parses record expecting frequency label if True
freq_label = False
#Parses SAM tones and associated labels
sam_tones = False
#Parses transposed tones and associated labels
transposed_tones = False
#Parses spatialized clickas and associated labels for precedence effect
precedence_effect = False
#Parses narrowband noise for pyschoacoustic experiments
narrowband_noise = False
#Parses record expecting [N,M,2] format instead of interleaved [2N,M] format if True
stacked_channel = True
SNR_max=80
SNR_min=80
#model_version=85000
num_epochs=None
#paths to stimuli and background subbands
#bkgd_train_path_pattern = '/om/scratch/Sat/francl/bkgdRecords_textures_sparse_sampled_same_texture_expanded_set_44.1kHz_stackedCH_upsampled/train*.tfrecords'
#train_path_pattern ='/nobackup/scratch/Sat/francl/stimRecords_convolved_oldHRIRdist140_no_hanning_stackedCH_upsampled/testset/train*.tfrecords'
arch_ID=int(sys.argv[1])
init = int(sys.argv[2])
regularizer=str(sys.argv[3])
exec("regularizer = "+ regularizer)
bkgd_train_path_pattern = str(sys.argv[4])
train_path_pattern = str(sys.argv[5])
model_version=[]
model_version = list(map(int,list((str(sys.argv[6]).split(',')))))
#newpath='/om2/user/francl/localization_runs/old_hrirs_no_hanning_window_valid_padding/arch_number_'+str(arch_ID)+'_init_'+str(init)
if regularizer is None:
newpath='/om2/user/francl/new_task_archs/new_task_archs_no_background_noise_80dBSNR_training/arch_number_'+str(arch_ID)+'_init_'+str(init)
else:
newpath='/om2/user/francl/new_task_archs/new_task_archs_no_background_noise_80dBSNR_training/arch_number_'+str(arch_ID)+'_init_'+str(init)+'_reg'
if not os.path.exists(newpath):
os.mkdir(newpath)
if not os.path.isfile(newpath+'/config_array.npy'):
config_array = generate()
np.save(newpath+'/config_array.npy',config_array)
else:
config_array=np.load(newpath+'/config_array.npy')
files=(glob.glob(newpath+'/*'))
num_files=len(files)
if os.path.isfile(newpath+'/curve_no_resample_w_cutoff_vary_loc.json'):
testing = True
test=tf_record_CNN_spherical(tone_version,itd_tones,ild_tones,manually_added,freq_label,sam_tones,transposed_tones,precedence_effect,narrowband_noise,all_positions_bkgd,background_textures,testing,branched,zero_padded,stacked_channel,model_version,num_epochs,train_path_pattern,bkgd_train_path_pattern,arch_ID,config_array,files,num_files,newpath,regularizer,SNR_max,SNR_min)
|
554834afb4f6bf8b514405bff48f14b89b0c27ab
|
[
"Markdown",
"Python"
] | 15
|
Python
|
Liu-sp/BinauralLocalizationCNN
|
864a58b9fa7e73ea74c832d6dad575aef424b8ac
|
7fec3528d94f5eb719feb0b0a1874a7abd89197f
|
refs/heads/master
|
<file_sep>//
// MovieViewCell.swift
// Cine
//
// Created by <NAME> on 9/25/19.
// Copyright © 2019 sgh. All rights reserved.
//
import UIKit
class MovieViewCell: UICollectionViewCell {
@IBOutlet weak var billboard: UIImageView!
}
<file_sep>//
// MovieCollectionViewCell.swift
// Cine
//
// Created by <NAME> on 10/2/19.
// Copyright © 2019 sgh. All rights reserved.
//
import UIKit
class MovieCollectionViewCell: UICollectionViewCell {
@IBOutlet weak var billboard: UIImageView!
}
<file_sep>//
// Movie.swift
// Cine
//
// Created by <NAME> on 9/25/19.
// Copyright © 2019 sgh. All rights reserved.
//
import UIKit
struct Movie: Codable{
var id: String
var movie_name: String
var rating: String
var duration: Int
var seats: [Seat]
}
struct Seat: Codable{
var seat_number: Int
var taken: Bool
}
<file_sep>//
// MoviesCollectionViewController.swift
// Cine
//
// Created by <NAME> on 10/2/19.
// Copyright © 2019 sgh. All rights reserved.
//
import UIKit
import FirebaseFirestore
import MobileCoreServices
import FirebaseStorage
import FirebaseUI
private let reuseIdentifier = "Cell"
class MoviesCollectionViewController: UICollectionViewController {
var movies = [Movie]()
var ref: DocumentReference!
var getRef: Firestore!
var storageReference: StorageReference!
override func viewDidLoad() {
super.viewDidLoad()
getRef = Firestore.firestore()
storageReference = Storage.storage().reference()
// Register cell classes
self.collectionView!.register(MovieCollectionViewCell.self, forCellWithReuseIdentifier: reuseIdentifier)
getMovies()
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// Get the new view controller using [segue destinationViewController].
// Pass the selected object to the new view controller.
}
*/
// MARK: UICollectionViewDataSource
override func numberOfSections(in collectionView: UICollectionView) -> Int {
// #warning Incomplete implementation, return the number of sections
return 1
}
override func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
// #warning Incomplete implementation, return the number of items
return movies.count
}
override func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: reuseIdentifier, for: indexPath) as! MovieCollectionViewCell
print(movies[indexPath.row].movie_name)
let placeHolder = UIImage(named: "Placeholder")
let userImageRef = storageReference.child("/billboards").child(movies[indexPath.row].id + ".jpg")
userImageRef.downloadURL { (url, error) in
if let error = error {
print(error.localizedDescription)
}else{
URLSession.shared.dataTask(with: url!) { (data, _, _) in
guard let data = data else{ return }
DispatchQueue.main.async{
cell.billboard.image = UIImage(data: data)
}
}.resume()
print(String(describing: url!))
}
}
return cell
}
// MARK: UICollectionViewDelegate
/*
// Uncomment this method to specify if the specified item should be highlighted during tracking
override func collectionView(_ collectionView: UICollectionView, shouldHighlightItemAt indexPath: IndexPath) -> Bool {
return true
}
*/
/*
// Uncomment this method to specify if the specified item should be selected
override func collectionView(_ collectionView: UICollectionView, shouldSelectItemAt indexPath: IndexPath) -> Bool {
return true
}
*/
/*
// Uncomment these methods to specify if an action menu should be displayed for the specified item, and react to actions performed on the item
override func collectionView(_ collectionView: UICollectionView, shouldShowMenuForItemAt indexPath: IndexPath) -> Bool {
return false
}
override func collectionView(_ collectionView: UICollectionView, canPerformAction action: Selector, forItemAt indexPath: IndexPath, withSender sender: Any?) -> Bool {
return false
}
override func collectionView(_ collectionView: UICollectionView, performAction action: Selector, forItemAt indexPath: IndexPath, withSender sender: Any?) {
}
*/
func getMovies(){
getRef.collection("movies").addSnapshotListener { (querySnapshot, error) in
if let error = error{
print(error.localizedDescription)
return
}else{
self.movies = [Movie]()
self.movies.removeAll()
for document in querySnapshot!.documents{
let id = document.documentID
let values = document.data()
let name = values["movie_name"] as? String ?? "pelicula"
let rating = values["rating"] as? String ?? "rate"
let duration = values["duration"] as? Int ?? 1
let dummySeat = Seat(seat_number: 1, taken: false)
let seats = values["seats"] as? [Seat] ?? [dummySeat]
let movie = Movie(id: id, movie_name: name, rating: rating, duration: duration, seats: seats)
self.movies.append(movie)
}
self.collectionView.reloadData()
}
}
}
}
|
32c6af73dc91cdc6f7b1b8743d9af6f323955800
|
[
"Swift"
] | 4
|
Swift
|
luluchc89/cinema
|
fc56ba83fc161e446c114f64d2f300e731163946
|
cb8faa6bf9d50b6dd59a766a1b147337a6f681b4
|
refs/heads/master
|
<repo_name>sscien/MAESTER_WDL<file_sep>/AllRScripts/Dockerfile
# Filename: Dockerfile
# Author: <EMAIL>
# Used for https://github.com/vangalenlab/MAESTER-2021
# - briansha/mitochondrial:4.1.0 - deprecated.
# - briansha/maester:4.1.0
FROM ubuntu:18.04
WORKDIR /cromwell_root/
RUN apt-get update && apt-get install -y --no-install-recommends \
curl \
zip \
git \
unzip \
gzip \
g++ \
make \
gfortran \
zlib1g-dev \
libgfortran4 \
liblapacke-dev \
libopenblas-dev \
libbz2-dev \
liblzma-dev \
libcurl4-openssl-dev \
libpng-dev \
libjpeg-dev \
zlib1g-dev \
libxml2-dev \
libssl-dev \
libfontconfig1-dev \
libgdal-dev \
libharfbuzz-dev \
libfribidi-dev \
libcairo2-dev \
libxt-dev \
gdebi-core \
ca-certificates
# R 4.1.0
RUN curl -O https://cdn.rstudio.com/r/ubuntu-1804/pkgs/r-4.1.0_1_amd64.deb
RUN DEBIAN_FRONTEND=noninteractive gdebi -n r-4.1.0_1_amd64.deb
RUN ln -s /opt/R/4.1.0/bin/R /usr/local/bin/R
RUN ln -s /opt/R/4.1.0/bin/Rscript /usr/local/bin/Rscript
# HTSLib
ADD https://github.com/samtools/htslib/releases/download/1.12/htslib-1.12.tar.bz2 .
RUN tar -xvf htslib-1.12.tar.bz2 && rm htslib-1.12.tar.bz2
WORKDIR /cromwell_root/htslib-1.12
RUN ./configure --prefix=/usr/local
RUN make
RUN make install
WORKDIR /cromwell_root/
RUN rm -r htslib-1.12
# R packages using a script
#COPY mitochondrial.R .
#RUN Rscript mitochondrial.R
# R packages
RUN R -e "install.packages('tidyverse', dependencies=TRUE, repos = 'http://cran.us.r-project.org')"
RUN R -e "install.packages('Matrix', dependencies=TRUE, repos = 'http://cran.us.r-project.org')"
RUN R -e "install.packages('ggforce', dependencies=TRUE, repos = 'http://cran.us.r-project.org')"
RUN R -e "install.packages('data.table', dependencies=TRUE, repos = 'http://cran.us.r-project.org')"
RUN R -e "install.packages('readxl', dependencies=TRUE, repos = 'http://cran.us.r-project.org')"
RUN R -e "install.packages('Seurat', dependencies=TRUE, repos = 'http://cran.us.r-project.org')"
RUN R -e "install.packages('ggrastr', dependencies=TRUE, repos = 'http://cran.us.r-project.org')"
RUN R -e "if (!requireNamespace('BiocManager', quietly = TRUE)) install.packages('BiocManager', repos = 'http://cran.us.r-project.org'); library(BiocManager); BiocManager::install('ShortRead')"
RUN R -e "if (!requireNamespace('BiocManager', quietly = TRUE)) install.packages('BiocManager', repos = 'http://cran.us.r-project.org'); library(BiocManager); BiocManager::install('SummarizedExperiment')"
RUN R -e "if (!requireNamespace('BiocManager', quietly = TRUE)) install.packages('BiocManager', repos = 'http://cran.us.r-project.org'); library(BiocManager); BiocManager::install('ComplexHeatmap')"
<file_sep>/TrimWithHomer/Dockerfile
# Filename: Dockerfile
# Author: <EMAIL>
# Used for https://github.com/vangalenlab/MAESTER-2021
# - briansha/maester_homer:4.11
FROM ubuntu:18.04
WORKDIR /HOMER/
RUN apt-get update && apt-get install -y --no-install-recommends \
curl \
wget \
zip \
git \
unzip \
gzip \
g++ \
make \
gfortran \
zlib1g-dev \
libgfortran4 \
liblapacke-dev \
libopenblas-dev \
libbz2-dev \
liblzma-dev \
libcurl4-openssl-dev \
libpng-dev \
libjpeg-dev \
zlib1g-dev \
gdebi-core \
ca-certificates
# HOMER - http://homer.ucsd.edu/homer/download.html
RUN curl -O http://homer.ucsd.edu/homer/configureHomer.pl
RUN perl configureHomer.pl -install
RUN cp -r bin /usr/local
<file_sep>/TagCbUmi/Dockerfile
# Filename: Dockerfile
# Author: <EMAIL>
# Used for https://github.com/vangalenlab/MAESTER-2021
# - briansha/maester_samtools:1.13
FROM ubuntu:18.04
WORKDIR /cromwell_root/
RUN apt-get update && apt-get install -y --no-install-recommends \
curl \
zip \
git \
unzip \
gzip \
g++ \
make \
gfortran \
zlib1g-dev \
libgfortran4 \
liblapacke-dev \
libopenblas-dev \
libbz2-dev \
liblzma-dev \
libcurl4-openssl-dev \
libpng-dev \
libjpeg-dev \
zlib1g-dev \
libncurses5-dev \
gdebi-core \
ca-certificates
# samtools
ADD https://github.com/samtools/samtools/releases/download/1.13/samtools-1.13.tar.bz2 .
RUN tar -xvf samtools-1.13.tar.bz2 && rm samtools-1.13.tar.bz2
WORKDIR /cromwell_root/samtools-1.13
RUN ./configure --prefix=/usr/local
RUN make
RUN make install
WORKDIR /cromwell_root/
RUN rm -r samtools-1.13
<file_sep>/AllRScripts/MtCoverage/210215_FunctionsGeneral.R
# <NAME>, 210215
# General functions for analyses in the MAESTER project
# General
message("cutf()")
cutf <- function(x, f=1, d="/") sapply(strsplit(x, d), function(i) paste(i[f], collapse=d))
# Function that computes all heteroplasmic variants from MAEGATK output (from Caleb Lareau). Rows represents a position along the mitochondrial genome and the three possible disagreements with the reference (except 3107 has four possible disagreements because the reference is N)
message("computeAFMutMatrix()")
computeAFMutMatrix <- function(SE){
cov <- assays(SE)[["coverage"]]+ 0.000001
ref_allele <- as.character(rowRanges(SE)$refAllele)
getMutMatrix <- function(letter){
mat <- (assays(SE)[[paste0(letter, "_counts_fw")]] + assays(SE)[[paste0(letter, "_counts_rev")]]) / cov
rownames(mat) <- paste0(as.character(1:dim(mat)[1]), "_", toupper(ref_allele), ">", letter)
return(mat[toupper(ref_allele) != letter,])
}
rbind(getMutMatrix("A"), getMutMatrix("C"), getMutMatrix("G"), getMutMatrix("T"))
}
<file_sep>/Maegatk/Dockerfile
# Filename: Dockerfile
# Author: <EMAIL>
# Used for https://github.com/vangalenlab/MAESTER-2021
# - briansha/maester_maegatk:v01
FROM ubuntu:18.04
WORKDIR /cromwell_root/
# Install Python3, Java, and other packages.
RUN apt-get update && apt-get install -y --no-install-recommends \
wget \
curl \
zip \
git \
unzip \
gzip \
g++ \
make \
zlib1g-dev \
libgfortran4 \
liblapacke-dev \
libopenblas-dev \
libbz2-dev \
liblzma-dev \
libcurl4-openssl-dev \
libpng-dev \
libjpeg-dev \
zlib1g-dev \
gdebi-core \
ca-certificates \
python3 \
python3-pip \
python3-setuptools \
python3-dev \
default-jre \
libncurses5-dev
# Maegatk seems to attempt to run some things using the python command, when it actually needs to run python3.
# Alias python3 as python.
# https://stackoverflow.com/questions/36388465/how-to-set-bash-aliases-for-docker-containers-in-dockerfile
# Dockerfile - installation of packages uses a non-interactive shell
# WDL command section - using 'alias python=python3' did not work.
# - The symbolic link at the bottom of this script below (ln -s) does.
# Many scripts use the python command often in their installation, such as bedtools using the python command in its make script.
# However, in maegatk's case, it needs to run python3 whenever it wants to run python. (As some packages it wants to use are only available for python3.)
# The RUN command below works, but only for installation of packages onto the docker image.
# - In the WDL command section, it is prone to some errors:
# - /usr/bin/python: 1: /usr/bin/python: -e: not found
# - python3
#RUN echo 'alias python=python3' >> ~/.bashrc
RUN echo -e '#!/bin/bash\necho python3' > /usr/bin/python && \
chmod +x /usr/bin/python
# bwa
RUN git clone https://github.com/lh3/bwa.git
WORKDIR /cromwell_root/bwa
RUN make
RUN cp bwa /usr/local/bin
WORKDIR /cromwell_root/
# bedtools
RUN wget https://github.com/arq5x/bedtools2/releases/download/v2.29.1/bedtools-2.29.1.tar.gz
RUN tar -zxvf bedtools-2.29.1.tar.gz
WORKDIR /cromwell_root/bedtools2
RUN make
WORKDIR /cromwell_root/bedtools2/bin
RUN cp * /usr/local/bin
WORKDIR /cromwell_root/
# maegatk - https://github.com/caleblareau/maegatk
RUN pip3 install maegatk
# The two below need to be in the WDL command section for maegatk to run properly.
# export LC_ALL=C.UTF-8
# export LANG=C.UTF-8
# Libraries maegatk needs - pysam, R 4.1.0, BiocManager (SummarizedExperiment), data.table, setuptools, ruamel_yaml, samtools
# - These are found out not in the documention, but rather when trying to run an analysis...and it fails.
RUN curl -O https://cdn.rstudio.com/r/ubuntu-1804/pkgs/r-4.1.0_1_amd64.deb
RUN DEBIAN_FRONTEND=noninteractive gdebi -n r-4.1.0_1_amd64.deb
RUN ln -s /opt/R/4.1.0/bin/R /usr/local/bin/R
RUN ln -s /opt/R/4.1.0/bin/Rscript /usr/local/bin/Rscript
RUN pip3 install pysam
RUN R -e "install.packages('dplyr', dependencies=TRUE, repos = 'http://cran.us.r-project.org')"
RUN R -e "if (!requireNamespace('BiocManager', quietly = TRUE)) install.packages('BiocManager', repos = 'http://cran.us.r-project.org')"
RUN R -e "BiocManager::install('SummarizedExperiment')"
RUN R -e "install.packages('data.table', dependencies=TRUE, repos = 'http://cran.us.r-project.org')"
RUN pip3 install setuptools
RUN pip3 install ruamel_yaml
ADD https://github.com/samtools/samtools/releases/download/1.13/samtools-1.13.tar.bz2 .
RUN tar -xvf samtools-1.13.tar.bz2 && rm samtools-1.13.tar.bz2
WORKDIR /cromwell_root/samtools-1.13
RUN ./configure --prefix=/usr/local
RUN make
RUN make install
WORKDIR /cromwell_root/
RUN rm -r samtools-1.13
# Remove the python alias - make a symbolic link instead - this will stick.
RUN rm /usr/bin/python
RUN ln -s /usr/bin/python3 /usr/bin/python
<file_sep>/AllRScripts/MtCoverage/1.2_MT_Coverage.R
# <NAME>, 210924
# Plot coverage of chrM from glioblastoma maegatk data objects
# Prerequisites -----------------------------------------------------------------------------------
options(stringsAsFactors = FALSE)
options(scipen = 999)
library(tidyverse)
library(Matrix)
library(ggforce)
library(SummarizedExperiment)
library(dplyr)
#install.packages("data.table")
#library(data.table)
rm(list=ls())
#setwd("~/DropboxPartners/Projects/Maester/AnalysisPeter/1_MT_Coverage")
### Arguments to be provided when executing script
r_script_source <- commandArgs(trailingOnly=TRUE)[1] # Custom function
experiment_name <- commandArgs(trailingOnly=TRUE)[2] # Experiment name.
maegatk_full <- commandArgs(trailingOnly=TRUE)[3] # RDS file from maegatk.
metadata_df <- commandArgs(trailingOnly=TRUE)[4] # Metadata from scRNA-Seq.
# Functions (available at https://github.com/vangalenlab/MAESTER-2021)
source(r_script_source)
# Load locally saved maegatk data (choose one) ----------------------------------------------------
# This data is available at https://vangalenlab.bwh.harvard.edu/maester-2021/
#experiment.name <- "SW_MGH252_A"
#maegatk.full <- readRDS(file = "SW_MGH252_A_mr3_maegatk.rds")
#metadata.df <- read.table("MGH252_NoM_A_C_PBMC_MetaData.txt")
#cellMerge4 <- rownames(metadata.df)
#experiment.name <- "SW_MGH252_C"
#maegatk.full <- readRDS(file = "SW_MGH252_C_mr3_maegatk.rds")
#metadata.df <- read.table("../8_Glioblastoma/Chadi Slack 210819/MGH252_NoM_A_C_PBMC_MetaData.txt")
#cellMerge4 <- rownames(metadata.df)
#experiment.name <- "SW_MGH252_PBMC"
#maegatk.full <- readRDS(file = "SW_MGH252_PBMC_mr3_maegatk.rds")
#metadata.df <- read.table("../8_Glioblastoma/Chadi Slack 210819/MGH252_NoM_A_C_PBMC_MetaData.txt")
#cellMerge4 <- rownames(metadata.df)
experiment.name <- experiment_name
maegatk.full <- readRDS(file = maegatk_full)
metadata.df <- read.table(metadata_df)
cellMerge4 <- rownames(metadata.df)
# Use common cell barcodes for RNAseq and maegatk. This is not done in 1.2_Compare_MT_coverage.R --
# Only keep cells with a cellMerge id that occurs once, intersect, plot
#cellMerge3 <- tibble(cell = cellMerge4) %>% group_by(cell) %>% filter(n()==1) %>% .[,"cell"] %>% cutf(d = "\\.", f=2)
#testing area
#cellMerge3 <- tibble(cell = cellMerge4) %>% group_by(cell) %>% filter(n()==1) %>% .[,"cell"]
#cutf <- function(x, f=1, d="/") sapply(strsplit(x, d), function(i) paste(i[f], collapse=d))
#new_list <- x %>% cutf(d="e", f=1) # If x is "asfef" - grabs everything before the e - "asf"
#x %>% cutf(d="e", f=2) # If x is "asfef" - grabs everything after the e - "f"
#strsplit(paste(c("a", "b", "c"), collapse="what"), split="#") # "awhatbwhatc"
#test <- strsplit(paste(c("a", "b", "c"), collapse="what"), split="what") # "a" "b" "c"
#cellMerge3 <- cellMerge3[,"cell"] %>% cutf(d = "\\.", f=2) # Error - non-character argument supplied
#cellMerge3 <- cellMerge3$cell %>% cutf(d = "\\.", f=2) # No error - completes
#end testing area
# Original
#cellMerge3 <- tibble(cell = cellMerge4) %>% group_by(cell) %>% filter(n()==1) %>% .$cell %>% cutf(d = "\\.", f=2)
# 6180 - ours
cellMerge3 <- tibble(cell = cellMerge4) %>% group_by(cell) %>% filter(n()==1) %>% .$cell
# Original - Adds a "-1" - AACAACATGAA-1
#cellMerge2 <- str_c(cellMerge3, "-1")
# 6180 - ours
cellMerge2 <- cellMerge3
cellMerge1 <- intersect(cellMerge2, colnames(maegatk.full))
# Barplot
pdf(file = paste0(experiment.name, "_plots.pdf"))
barplot_name <- paste(experiment.name, "barplot.png", sep="_")
#png(barplot_name)
barplot(c(length(cellMerge3), length(cellMerge2), length(cellMerge1)), ylim = c(0, length(cellMerge3)), ylab = "cell number")
axis(side = 1, at = c(0.7, 1.9, 3.1), labels = c("RNAseq_all", "occur_once", "common"))
#dev.off()
# Subset maegatk data for high quality cells
maegatk <- maegatk.full[,cellMerge1]
ncol(maegatk.full); ncol(maegatk)
# Plot coverage per position ----------------------------------------------------------------------
# Set y axis parameters
ymax <- 200
# Gene locations
GenePos.tib <- tibble(Names = c("MT.ATP6", "MT.ATP8", "MT.CO1", "MT.CO2", "MT.CO3", "MT.CYB", "MT.ND1", "MT.ND2", "MT.ND3",
"MT.ND4", "MT.ND4L", "MT.ND5", "MT.ND6", "MT.RNR1", "MT.RNR2"),
start = c(8527, 8366, 5904, 7586, 9207, 14747, 3307, 4470, 10059, 10760, 10470, 12337, 14149, 648, 1671),
end = c(9207, 8572, 7445, 8269, 9990, 15887, 4262, 5511, 10404, 12137, 10766, 14148, 14673, 1601, 3229))
GenePos.tib <- GenePos.tib %>% arrange(start) %>%
mutate(mid = round((end-start)/2+start,0), ycoord = rep(c(ymax*1.2,ymax*1.1), length.out = 15))
# Plot
base.tib <- tibble(base = 1:16569, depth = rowMeans(assays(maegatk)[["coverage"]]))
mean_coverage_name <- paste(experiment.name, "mean_coverage.png", sep="_")
#png(mean_coverage_name)
print(
base.tib %>% ggplot() +
geom_bar(aes(x = base, y = ifelse(depth > 1, yes = depth, no = NA)), stat = "identity", fill = "#64b53b", width = 1) +
#geom_bar(aes(x = base, y = ifelse(rnaseq_depth > 1, yes = rnaseq_depth, no = NA)), stat = "identity", fill = "#fdcb25", width = 1) +
coord_cartesian(ylim = c(1, ymax), xlim = c(700, 15900)) +
scale_y_continuous(trans = "log10") +
geom_segment(data = GenePos.tib, aes(x = start, y = ycoord, xend = end, yend = ycoord)) +
geom_text(data = GenePos.tib, aes(x = mid, y = ycoord-ymax*0.2, label = cutf(Names, d = "\\.", f = 2)), size = 3) +
ylab("Mean coverage per cell") + xlab("Position along chrM") +
theme_classic() +
theme(aspect.ratio = 0.5)
)
#dev.off()
# Plot mean coverage for top 500 cells ------------------------------------------------------------
cells.tib <- tibble(cell = colnames(maegatk),
depth = maegatk$depth)
topcells.tib <- cells.tib %>% slice_max(order_by = depth, n = 500)
mean_coverage_top_500_name <- paste(experiment.name, "mean_coverage_top_500.png", sep="_")
#png(mean_coverage_top_500_name)
print(
ggplot(topcells.tib, aes(x = 1, y = depth)) +
geom_violin() +
geom_sina(size = 0.3) +
coord_cartesian(ylim = c(0.1, 800)) +
scale_y_continuous(trans = "log10") +
ylab("Mean coverage per cell") + xlab("") +
annotate("text", x = 1, y = max(topcells.tib$depth)*1.5,
label = round(mean(topcells.tib$depth), 2)) +
theme_classic() +
theme(aspect.ratio = 2, plot.title = element_text(hjust = 0.5)) +
ggtitle("Mean coverage of top 500 cells")
)
#dev.off()
# Plot mean depth for top 5000 bases --------------------------------------------------------------
top.tib <- base.tib %>% arrange(desc(depth)) %>% mutate(key = row_number(), .before = 1) %>% filter(key %in% 1:5000)
mean_depth_top_5000_name <- paste(experiment.name, "mean_depth_top_5000.png", sep="_")
#png(mean_depth_top_5000_name)
print(
ggplot(top.tib) +
geom_bar(aes(x = key, y = depth), stat = "identity", fill = "#64b53b", width = 1) +
coord_cartesian(ylim = c(1, ymax)) +
scale_y_continuous(trans = "log10") +
geom_label(data = data.frame(), aes(x = 2500, y = mean(top.tib$depth), label = round(mean(top.tib$depth), 2)),
fill = "#64b53b") +
ylab("Mean coverage per cell") + xlab("Rank sorted position") +
ggtitle("Mean coverage of top 5000 bases") +
theme_classic() +
theme(aspect.ratio = 2)
)
dev.off()
<file_sep>/TagCbUmi/Tag_CB_UMI.sh
#!/bin/bash
# <NAME>, 191004
# Move cell barcode (CB) and unique molecular identifier (UMI) from read identifier to sam tags.
# Resulting bam file will have tags for cell barcode (CB) and UMI (UB) as per 10X convention, https://support.10xgenomics.com/single-cell-gene-expression/software/pipelines/latest/output/bam
# These tags are required to run maegatk software
# Example execution:
# Tag_CB_UMI.PvG191004.sh <bam>
# use -q Samtools
# First variable is bam file to convert
INPUT=$1
# Second variable is bam file to write (automatically named)
OUTPUT="$(echo "${INPUT/bam/10x.bam}")"
echo "Converting $INPUT into $OUTPUT..."
samtools view -h $INPUT | awk 'BEGIN{FS="\t"; OFS="\t"} {
if (substr($1,1,1) == "@") {
print $0
} else {
split($1, a, "_")
$1=""
print a[1]"_"a[2]$0"\tCB:Z:"a[3]"-1\tUB:Z:"a[4]
} }' | samtools view -bh > $OUTPUT
echo "Done!"
date
exit 0
<file_sep>/STAR/Dockerfile
# Filename: Dockerfile
# Author: <EMAIL>
# Used for https://github.com/vangalenlab/MAESTER-2021
# - briansha/star:2.7.9
FROM ubuntu:18.04
WORKDIR /cromwell_root/
RUN apt-get update && apt-get install -y --no-install-recommends \
curl \
zip \
git \
unzip \
gzip \
g++ \
make \
zlib1g-dev \
libgfortran4 \
liblapacke-dev \
libopenblas-dev \
libbz2-dev \
liblzma-dev \
libcurl4-openssl-dev \
libpng-dev \
libjpeg-dev \
zlib1g-dev \
ca-certificates
# STAR
RUN git clone https://github.com/alexdobin/STAR.git
WORKDIR /cromwell_root/STAR/source
RUN make STAR
RUN cp STAR /usr/local/bin
WORKDIR /cromwell_root/
<file_sep>/README.md
# MAESTER WDL
This is a workflow for MAESTER.
Written using WDL - https://github.com/openwdl/wdl/blob/main/versions/development/SPEC.md#scatter
MAESTER: https://github.com/vangalenlab/MAESTER-2021
Dockerfiles are provided.
|
452282e7656a053744c302f2e684ea68a42d364e
|
[
"Markdown",
"R",
"Dockerfile",
"Shell"
] | 9
|
Dockerfile
|
sscien/MAESTER_WDL
|
ef6f56cb50b8a4fbd13ee06c70281c8dc24b4b38
|
1f04254485a21e1efa6579931ced55ea94ec904b
|
refs/heads/master
|
<file_sep>package com.app.hr.dao;
import com.app.hr.model.Employee;
import org.springframework.data.jpa.datatables.repository.DataTablesRepository;
import org.springframework.transaction.annotation.Transactional;
/**
* @Author fz
* @Date 2017-08-16 16:30
*/
@Transactional
public interface EmployeeDao extends DataTablesRepository<Employee, Long> {
}
<file_sep>package com.app.hr.model;
import org.hibernate.annotations.CreationTimestamp;
import org.hibernate.annotations.UpdateTimestamp;
import javax.persistence.*;
import java.io.Serializable;
import java.util.Date;
/**
* @Author fz
* @Date 2017-08-14 18:30
*/
@Entity
public class PersonInfo implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy= GenerationType.AUTO)
private int personInfoId;
/** 姓名 **/
@Column
private String name;
/** 昵称**/
@Column
private String nickname;
/** 性别**/
@Column
private int sex;
/** 生日**/
@Column
private Date birthday;
/** 农历生日**/
@Column
private Date lunarBirthday;
/** 年龄**/
@Column
private int age;
/** 学历**/
@Column
private int degree;
/** 婚姻状况**/
@Column
private int marriage;
/** 手机号码**/
@Column
private int mobile;
/** 邮箱**/
@Column
private String email;
/** 现住址**/
@Column
private String address;
/** 身份证号**/
@Column
private String idNumber;
/** 毕业院校**/
@Column
private String graduateSchool;
/** 政治面貌**/
@Column
private String politicalExpierence;
/** 户籍地址**/
@Column
private String censusRegisterAddress;
/** 户籍类型**/
@Column
private int censusRegisterType;
/** 标签**/
@Column
private String tags;
/** QQ号**/
@Column
private String qqNum;
/** 微信号**/
@Column
private String weichatNum;
/** 参加工作时间**/
@Column
private Date startWorkDate;
/** 员工类型**/
@Column
private int type;
/** 状态**/
@Column
private int status;
@CreationTimestamp
private Date recCreateTime;
@UpdateTimestamp
private Date recCreateUser;
public int getPersonInfoId() {
return personInfoId;
}
public void setPersonInfoId(int personInfoId) {
this.personInfoId = personInfoId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getNickname() {
return nickname;
}
public void setNickname(String nickname) {
this.nickname = nickname;
}
public int getSex() {
return sex;
}
public void setSex(int sex) {
this.sex = sex;
}
public Date getBirthday() {
return birthday;
}
public void setBirthday(Date birthday) {
this.birthday = birthday;
}
public Date getLunarBirthday() {
return lunarBirthday;
}
public void setLunarBirthday(Date lunarBirthday) {
this.lunarBirthday = lunarBirthday;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public int getDegree() {
return degree;
}
public void setDegree(int degree) {
this.degree = degree;
}
public int getMarriage() {
return marriage;
}
public void setMarriage(int marriage) {
this.marriage = marriage;
}
public int getMobile() {
return mobile;
}
public void setMobile(int mobile) {
this.mobile = mobile;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public String getIdNumber() {
return idNumber;
}
public void setIdNumber(String idNumber) {
this.idNumber = idNumber;
}
public String getGraduateSchool() {
return graduateSchool;
}
public void setGraduateSchool(String graduateSchool) {
this.graduateSchool = graduateSchool;
}
public String getPoliticalExpierence() {
return politicalExpierence;
}
public void setPoliticalExpierence(String politicalExpierence) {
this.politicalExpierence = politicalExpierence;
}
public String getCensusRegisterAddress() {
return censusRegisterAddress;
}
public void setCensusRegisterAddress(String censusRegisterAddress) {
this.censusRegisterAddress = censusRegisterAddress;
}
public int getCensusRegisterType() {
return censusRegisterType;
}
public void setCensusRegisterType(int censusRegisterType) {
this.censusRegisterType = censusRegisterType;
}
public String getTags() {
return tags;
}
public void setTags(String tags) {
this.tags = tags;
}
public String getQqNum() {
return qqNum;
}
public void setQqNum(String qqNum) {
this.qqNum = qqNum;
}
public String getWeichatNum() {
return weichatNum;
}
public void setWeichatNum(String weichatNum) {
this.weichatNum = weichatNum;
}
public Date getStartWorkDate() {
return startWorkDate;
}
public void setStartWorkDate(Date startWorkDate) {
this.startWorkDate = startWorkDate;
}
public int getType() {
return type;
}
public void setType(int type) {
this.type = type;
}
public int getStatus() {
return status;
}
public void setStatus(int status) {
this.status = status;
}
}
<file_sep>package com.app.hr.controller;
import com.app.hr.dao.PersonInfoDao;
import com.app.hr.model.PersonInfo;
import com.fasterxml.jackson.annotation.JsonView;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.jpa.datatables.mapping.DataTablesInput;
import org.springframework.data.jpa.datatables.mapping.DataTablesOutput;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import javax.validation.Valid;
import java.util.List;
/**
* @Author fz
* @Date 2017-08-14 20:02
*/
@Controller
public class PersonInfoController {
@Autowired
private PersonInfoDao personInfoDao;
@RequestMapping("/getbyname")
@ResponseBody
public String getByName(String name) {
// String userId;
// PersonInfo user = personInfoDao.findByName(name);
// if (user != null) {
// userId = String.valueOf(user.getId());
// return "The user id is: " + userId;
// }
// return "user " + name + " is not exist.";
return null;
}
@RequestMapping("/addpersion")
@ResponseBody
public String getPerson() {
PersonInfo user = new PersonInfo();
user.setName("张三");
PersonInfo user2 = personInfoDao.save(user);
if (user2 != null) {
return "The user id is: " + user2.getPersonInfoId();
}
return "user id is not exist.";
}
// @RequestMapping("/article-list")
// public String listArticle(ModelMap map) {
// List<PersonInfo> test = personInfoDao.findByName("张三");
// map.addAttribute("persons", test);
// return "article-list";
// }
@RequestMapping("/index")
public String index(ModelMap map) {
return "index";
}
@ResponseBody
@RequestMapping("/all")
public DataTablesOutput<PersonInfo> querytest(@Valid DataTablesInput input) {
PersonInfo test = personInfoDao.findByName("cc");
return null;
}
}
<file_sep>package com.app.hr.model;
import org.hibernate.annotations.CreationTimestamp;
import org.hibernate.annotations.UpdateTimestamp;
import java.io.Serializable;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
/**
* 机构
*
* @author fz
* @date 2015-11-3
*/
@Entity
public class Department implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
@Column(name = "departmentId", nullable = false)
private int departmentId;
//部门代码
@Column(name = "deptCode")
private String deptCode;
//部门名称
@Column(name = "deptName")
private String deptName;
//上级部门
@Column(name = "parentDeptCode")
private String parentDeptCode;
//层次
@Column(name = "level")
private String level;
//有效标志
@Column(name = "aliveFlag")
private boolean aliveFlag;
//标志
@Column(name = "flag")
private String flag;
//标志1
@Column(name = "flag1")
private String flag1;
//标志2
@Column(name = "flag2")
private String flag2;
//标志3
@Column(name = "flag3")
private String flag3;
//备注
@Column(name = "remark")
private String remark;
//备注1
@Column(name = "remark1")
private String remark1;
//备注2
@Column(name = "remark2")
private String remark2;
//备注3
@Column(name = "remark3")
private String remark3;
//备注4
@Column(name = "remark4")
private String remark4;
@CreationTimestamp
private Date recCreateTime;
@UpdateTimestamp
private Date recCreateUser;
// @ManyToOne
// @JoinColumn(name = "parentId")
// private Department parent;
//
// @Transient
// @OneToMany(mappedBy = "parent",fetch = FetchType.LAZY)
// private Set<Department> children;
public int getDepartmentId() {
return departmentId;
}
public void setDepartmentId(int departmentId) {
this.departmentId = departmentId;
}
public String getDeptCode() {
return deptCode;
}
public void setDeptCode(String deptCode) {
this.deptCode = deptCode;
}
public String getDeptName() {
return deptName;
}
public void setDeptName(String deptName) {
this.deptName = deptName;
}
public String getParentDeptCode() {
return parentDeptCode;
}
public void setParentDeptCode(String parentDeptCode) {
this.parentDeptCode = parentDeptCode;
}
public String getLevel() {
return level;
}
public void setLevel(String level) {
this.level = level;
}
public boolean isAliveFlag() {
return aliveFlag;
}
public void setAliveFlag(boolean aliveFlag) {
this.aliveFlag = aliveFlag;
}
public String getFlag() {
return flag;
}
public void setFlag(String flag) {
this.flag = flag;
}
public String getFlag1() {
return flag1;
}
public void setFlag1(String flag1) {
this.flag1 = flag1;
}
public String getFlag2() {
return flag2;
}
public void setFlag2(String flag2) {
this.flag2 = flag2;
}
public String getFlag3() {
return flag3;
}
public void setFlag3(String flag3) {
this.flag3 = flag3;
}
public String getRemark() {
return remark;
}
public void setRemark(String remark) {
this.remark = remark;
}
public String getRemark1() {
return remark1;
}
public void setRemark1(String remark1) {
this.remark1 = remark1;
}
public String getRemark2() {
return remark2;
}
public void setRemark2(String remark2) {
this.remark2 = remark2;
}
public String getRemark3() {
return remark3;
}
public void setRemark3(String remark3) {
this.remark3 = remark3;
}
public String getRemark4() {
return remark4;
}
public void setRemark4(String remark4) {
this.remark4 = remark4;
}
}
|
7af8d0eb91153363905490f11afc41491a63cfec
|
[
"Java"
] | 4
|
Java
|
jsnjfz/MyHR
|
49ab732cd644ca868cf15332b385845e5fbfc5f7
|
986628b1f37b6ed2cfc8e2672c7833e455098187
|
refs/heads/master
|
<file_sep># lego
Simple library to create Lego panels from images.
<file_sep>// Copyright 2014 Leonardo "Bubble" Mesquita
package lego
import (
"fmt"
"github.com/nfnt/resize"
"image"
"image/color"
"image/draw"
)
type Color struct {
name string
color color.Color
}
var (
// Names and values retrieved from:
// http://www.peeron.com/cgi-bin/invcgis/colorguide.cgi
// Selected colors from http://shop.lego.com that are available for
// 1x1 bricks, so any images are doable.
WHITE = Color{"White (#1)", color.NRGBA{242, 243, 242, 255}}
BRIGHT_RED = Color{"Bright red (#21)", color.NRGBA{196, 40, 27, 255}}
BRIGHT_BLUE = Color{"Bright blue (#23)", color.NRGBA{13, 105, 171, 255}}
BLACK = Color{"Black (#26)", color.NRGBA{27, 42, 52, 255}}
DARK_GREEN = Color{"Dark green (#28)", color.NRGBA{40, 127, 70, 255}}
BRIGHT_YELLOW = Color{"Bright yellow (#24)", color.NRGBA{245, 205, 47, 255}}
BRICK_YELLOW = Color{"Brick yellow (#5)", color.NRGBA{215, 197, 153, 255}}
BRIGHT_ORANGE = Color{"Bright orange (#106)", color.NRGBA{218, 133, 64, 255}}
MEDIUM_BLUE = Color{"Medium blue (#102)", color.NRGBA{110, 153, 201, 255}}
DARK_STONE_GREY = Color{"Dark stone grey (#199)", color.NRGBA{99, 95, 97, 255}}
REDDISH_BROWN = Color{"Reddish brown (#192)", color.NRGBA{105, 64, 39, 255}}
MEDIUM_STONE_GREY = Color{"Medium stone grey (#194)", color.NRGBA{163, 162, 164, 255}}
BRIGHT_YELLOWISH_GREEN = Color{"Bright yellowish green (#119)", color.NRGBA{164, 189, 70, 255}}
LIGHT_PURPLE = Color{"Light purple (#222)", color.NRGBA{228, 173, 200, 255}}
BRIGHT_REDDISH_VIOLET = Color{"Bright reddish violet (#124)", color.NRGBA{146, 57, 120, 255}}
)
func (c *Color) Name() string {
return c.name
}
func (c *Color) Color() color.Color {
return c.color
}
type Brick struct {
Size image.Point
Color Color
}
func generateBricks(shapes []image.Point, colors ...Color) []*Brick {
var result []*Brick
for _, color := range colors {
for _, shape := range shapes {
result = append(result, &Brick{shape, color})
}
}
return result
}
var (
basicShapes = []image.Point{
{1, 1}, {1, 2}, {1, 4}, {2, 2}, {2, 4},
}
BASIC_BRICKS = generateBricks(basicShapes, WHITE, BRIGHT_RED, BRIGHT_BLUE,
BLACK, DARK_GREEN, BRIGHT_YELLOW, BRICK_YELLOW, BRIGHT_ORANGE,
)
ADVANCED_BRICKS = append(
generateBricks(basicShapes, DARK_STONE_GREY, REDDISH_BROWN,
MEDIUM_STONE_GREY, BRIGHT_YELLOWISH_GREEN, LIGHT_PURPLE),
append(generateBricks([]image.Point{{1, 1}, {1, 2}, {1, 4}}, MEDIUM_BLUE),
generateBricks([]image.Point{{1, 1}, {1, 2}}, BRIGHT_REDDISH_VIOLET)...,
)...,
)
ALL_BRICKS = append(BASIC_BRICKS, ADVANCED_BRICKS...)
)
func (b Brick) String() string {
return fmt.Sprintf("%dx%d %s", b.Size.X, b.Size.Y, b.Color.name)
}
func (b Brick) canonical() Brick {
if b.Size.X <= b.Size.Y {
return b
}
return Brick{image.Point{b.Size.Y, b.Size.X}, b.Color}
}
type Panel struct {
bricks map[image.Point]*Brick
bounds image.Rectangle
}
type Options struct {
Width uint
Bricks []*Brick
Dither bool
}
type helper struct {
visited map[image.Point]bool
panel *Panel
bricks map[Brick]bool
img image.Image
}
func newHelper(bricks []*Brick, img image.Image, p *Panel) *helper {
ret := &helper{
visited: make(map[image.Point]bool),
panel: p,
bricks: make(map[Brick]bool),
img: img,
}
for _, brick := range bricks {
ret.bricks[*brick] = true
}
return ret
}
func (h *helper) fit(p image.Point, brick Brick) bool {
for y := 0; y < brick.Size.Y; y++ {
for x := 0; x < brick.Size.X; x++ {
pt := p.Add(image.Point{x, y})
if h.visited[pt] {
return false
}
if h.img.At(pt.X, pt.Y) != brick.Color.color {
return false
}
}
}
return true
}
func (h *helper) placeBrick(p image.Point, color Color) {
if h.visited[p] {
return
}
for i := range basicShapes {
shape := basicShapes[len(basicShapes)-1-i]
brick := Brick{shape, color}
if !h.bricks[brick] {
continue
}
if !h.fit(p, brick) {
if shape.X == shape.Y {
continue
}
brick = Brick{image.Point{shape.Y, shape.X}, color}
if !h.fit(p, brick) {
continue
}
}
for y := 0; y < brick.Size.Y; y++ {
for x := 0; x < brick.Size.X; x++ {
h.visited[p.Add(image.Point{x, y})] = true
}
}
h.panel.bricks[p] = &brick
return
}
panic("Impossible fit")
}
func NewPanel(img image.Image, opt *Options) *Panel {
scale := float64(opt.Width) / float64(img.Bounds().Dx())
height := uint(scale * float64(img.Bounds().Dy()))
var palette color.Palette
m := make(map[color.Color]Color)
for _, brick := range opt.Bricks {
if _, ok := m[brick.Color.color]; !ok {
m[brick.Color.color] = brick.Color
palette = append(palette, brick.Color.color)
}
}
src := resize.Resize(opt.Width, height, img, resize.Lanczos3)
dst := image.NewPaletted(src.Bounds(), palette)
if opt.Dither {
draw.FloydSteinberg.Draw(dst, dst.Bounds(), src, src.Bounds().Min)
} else {
draw.Draw(dst, dst.Bounds(), src, src.Bounds().Min, draw.Src)
}
ret := &Panel{make(map[image.Point]*Brick), dst.Bounds()}
helper := newHelper(opt.Bricks, dst, ret)
for y := dst.Bounds().Min.Y; y < dst.Bounds().Max.Y; y++ {
for x := dst.Bounds().Min.X; x < dst.Bounds().Max.X; x++ {
helper.placeBrick(image.Point{x, y}, m[dst.At(x, y)])
}
}
return ret
}
func (p *Panel) Draw(scale int, outline bool) image.Image {
out := image.NewNRGBA(image.Rectangle{image.ZP, p.bounds.Size().Mul(scale)})
draw.Draw(out, out.Bounds(), &image.Uniform{color.White}, image.ZP, draw.Src)
for pos, brick := range p.bricks {
min := pos.Mul(scale)
max := min.Add(brick.Size.Mul(scale))
if outline {
draw.Draw(out, image.Rectangle{min, max}, &image.Uniform{color.NRGBA{0, 0, 0, 255}},
image.ZP, draw.Src)
min = min.Add(image.Point{1, 1})
max = max.Sub(image.Point{1, 1})
draw.Draw(out, image.Rectangle{min, max}, &image.Uniform{color.NRGBA{255, 255, 255, 255}},
image.ZP, draw.Src)
min = min.Add(image.Point{1, 1})
max = max.Sub(image.Point{1, 1})
}
draw.Draw(out, image.Rectangle{min, max}, &image.Uniform{brick.Color.color},
image.ZP, draw.Src)
}
return out
}
func (p *Panel) Size() image.Point {
return p.bounds.Size()
}
func (p *Panel) CountBricks() map[Brick]int {
result := make(map[Brick]int)
for _, brick := range p.bricks {
result[brick.canonical()] += 1
}
return result
}
|
8485109f2bfede86d9e198f8d193404ee1275149
|
[
"Markdown",
"Go"
] | 2
|
Markdown
|
mrbubble/lego
|
f9f36cb48b17568f0d18524cc8e52502c180b152
|
61e8aadc8f3df8b923a1670173a54482bf93aa2b
|
refs/heads/master
|
<file_sep>#include<stdio.h>
int main(void)
{
long ns, nt, nl; // new space, new tab, new line
char c;
ns = nt = nl = 0;
while ((c = getchar()) != EOF)
{
if (c == ' ')
{
ns++;
}
else if (c == '\t')
{
nt++;
}
else if (c == '\n')
{
nl++;
}
}
printf("new space = %ld, new line = %ld, new tab = %ld", ns, nl, nt);
return 0;
}<file_sep>#include<stdio.h>
#define OUT 0
#define IN 1
int main(void)
{
int c, state, nc, nw, nl;
nc = nw = nl = 0;
state = OUT;
while ((c = getchar()) != EOF)
{
nc++;
if (c == '\n')
{
nl++;
}
if (c == ' ' || c == '\t' || c == '\n')
{
if (state == IN)
{
printf("\n");
state = OUT;
}
}
else if (state == OUT)
{
state = IN;
nw++;
putchar(c);
}
else
{
putchar(c);
}
}
return 0;
}<file_sep>#include<stdio.h>
#define LOWER 0
#define UPPER 300
#define STEP 20
int main(void)
{
int fahr;
for (fahr = LOWER; fahr <= UPPER; fahr += STEP)
{
printf("%6d %6.1f\n", fahr, (5.0 / 9.0) * (fahr - 32));
}
return 0;
}<file_sep>## The c programme Language ##
阅读The c programme Language时敲的demo和练习
<file_sep>//第一个C语言程序
#include<stdio.h>
int main(void)
{
printf("hello world\n");
//第二种写法
printf("hello, ");
printf("world");
printf("\n");
return 0;
}
<file_sep>void strcat(char from[], char to[])
{
for (int i = 0; to[i] = from[i]; ++i)
;
to[i] = '\0';
}<file_sep>#include<stdio.h>
#define STEP 20
#define UPPER 300
#define LOWER 0
int convert();
int main(void)
{
convert();
return 0;
}
int convert()
{
double fahr;
for (fahr = LOWER; fahr <= UPPER; fahr += STEP)
{
printf("%6.1f %6.1f\n", fahr, (5.0 / 9.0) * (fahr - 32));
}
return 0;
}
<file_sep>#include<stdio.h>
int main(void)
{
int prec = 0, c;
while ((c = getchar()) != EOF)
{
if (prec == ' ' && c == prec)
{
continue;
}
else
{
prec = c;
putchar(c);
}
}
return 0;
}<file_sep>//打印长度大于80个字符的所有输入行
#include<stdio.h>
int main(void)
{
return 0;
}
int getLin(char s[])
{
}<file_sep>#include<stdio.h>
#define MAXLINE 1000
int main(void)
{
char s[MAXLINE];
for(int i = 0; i < MAXLINE-1; i++)
{
scanf("%c", &s[i]);
}
return 0;
}
//strlen函数
int strlen(char s[])
{
int i = 0;
while(s[i] != '\0')
{
i++;
}
return i;
}<file_sep>//error´ı½â¾ö
#include<stdio.h>
#define IN 1
#define OUT 0
int main(void)
{
int c, state = OUT;
int nc = 0;
while ((c = getchar()) != EOF)
{
if (c == '\n' || c == ' ' || c == '\t')
{
state = OUT;
}
if (state == OUT)
{
state = IN;
}
if (state == IN)
{
nc++;
}
}
return 0;
}<file_sep>//打印摄氏度转华氏度
#include<stdio.h>
int main(void)
{
float cel, fahr;
float lower, upper, step;
lower = 0;
upper = 300;
step = 20;
cel = lower;
while (cel <= 300)
{
fahr = cel * 9.0 / 5.0 + 32;
printf("%6.1f %6.1f\n", cel, fahr);
cel += step;
}
return 0;
}<file_sep>#include<string.h>
void reverse(char s[])
{
int c, j, i;
for (i = 0, i < strlen(s)-1;; i++)
{
}
}<file_sep>/*
当fahr = 0,20,30...时,分别打印华氏温度与摄氏温度对照表
*/
#include<stdio.h>
/*
int main(void)
{
int fahr, celsius;
int lower, upper, step;
lower = 0;
upper = 300;
step = 20;
fahr = lower;
while (fahr <= upper)
{
celsius = 5 * (fahr - 32) / 9;
//printf("%d\t%d\n", fahr, celsius);
printf("%3d %6d\n", fahr, celsius);//输出数字右对齐
fahr = fahr + step;
}
return 0;
}*/
//上面的版本不严谨,使用的int类型变量,下面进行改进
int main(void)
{
float fahr, celsius;
float lower, upper, step;
lower = 0;
upper = 300;
step = 20;
fahr = lower;
while (fahr <= upper)
{
celsius = (5.0/ 9.0) * (fahr - 32) ;//5.0和9.0时两个浮点数相除,结果不会舍位
printf("%3.0f %6.1f\n", fahr, celsius);//输出数字右对齐
fahr = fahr + step;
}
return 0;
}<file_sep>int atoi(char s[])
{
int n = 0;
for (int i = 0; s[i] >= '0' || s[i] <= '9'; i++)
{
n = 10 * i + s[i] - '0';
}
return n;
}
int main(void)
{
char s[10];
scanf("%s", s);
atoi(s);
return 0;
}<file_sep>#include<stdio.h>
int power(int base, int n);
int power2(int base, int n);
int main(void)
{
for (int i = 0; i < 10; i++)
{
printf("%6d %6d %6d\n", i, power(2, i), power(-3, i));
}
printf("-------------------------------------------\n");
for (int i = 0; i < 10; i++)
{
printf("%6d %6d %6d\n", i, power2(2, i), power2(-3, i));
}
return 0;
}
//版本1
int power(int base, int n)
{
int result = 1;
if (n == 0)
{
return 1;
}
else
{
for (int i = 0; i < n; i++)
{
result *= base;
}
}
return result;
}
//版本2
int power2(int base, int n)
{
int result;
if (n == 0)
{
return 1;
}
else
{
for (result = 1; n > 0; --n)
{
result *= base;
}
}
return result;
}
<file_sep>int binsearch(int target, int v[], int n)
{
}<file_sep>#include<stdio.h>
int main(void)
{
float fahr, celsius;
float lower, upper, step;
lower = 0;
upper = 300;
step = 20;
fahr = lower;
printf("convert C° to F°\n");
while (fahr <= upper)
{
celsius = (5.0 / 9.0) * (fahr - 32);//5.0和9.0时两个浮点数相除,结果不会舍位
printf("%3.0f %6.1f\n", fahr, celsius);//输出数字右对齐
fahr = fahr + step;
}
return 0;
}
|
9c039b21574b479efcaf8939a99c6b4449c60ecf
|
[
"Markdown",
"C"
] | 18
|
C
|
debugyes/The-c-programming-language-src
|
61619ed537caa5f9276bd5f04564f0decdd75f34
|
8b1852f640100534a00f4f06738ec3968e3abb95
|
refs/heads/master
|
<file_sep>## one-dimensional-array
One-Dimensional array ADT written in C++.
## Motivation
This project was created for further developing my understanding of the mechanics of a STL vector.
<file_sep>#include "Exception.h"
Exception::Exception()
: m_msg(nullptr)
{
}
Exception::Exception(char * msg)
{
m_msg = new char[strlen(msg) + 1];
strcpy(m_msg, msg);
}
Exception::Exception(const Exception & copy)
{
this->m_msg = new char[strlen(copy.m_msg) + 1];
strcpy(this->m_msg, copy.m_msg);
}
Exception::~Exception()
{
if (m_msg != nullptr)
delete[] m_msg;
}
Exception & Exception::operator= (const Exception & rhs)
{
if (this != &rhs)
{
if (this->m_msg != nullptr)
delete[] this->m_msg;
this->m_msg = new char[strlen(rhs.m_msg) + 1];
strcpy(this->m_msg, rhs.m_msg);
}
return *this;
}
const char * Exception::getMessage()
{
return this->m_msg;
}
void Exception::setMessage(char * msg)
{
if (this->m_msg != nullptr)
delete[] this->m_msg;
this->m_msg = new char[strlen(msg) + 1];
strcpy(this->m_msg, msg);
}
ostream & operator<< (ostream & stream, const Exception & except)
{
stream << except.m_msg << '\n';
return stream;
}<file_sep>#include "pch.h"
#include "CppUnitTest.h"
#include "../One_Dimensional_Array/Array.h"
#include "../One_Dimensional_Array/Exception.h"
using namespace Microsoft::VisualStudio::CppUnitTestFramework;
namespace ArrayADTTests
{
TEST_CLASS(ArrayADTTests)
{
public:
TEST_METHOD(TestDefaultArrayConstructor)
{
int expectedLength = 0;
int expectedStartIndex = 0;
Array<int> ra;
Assert::AreEqual(expectedLength, ra.getLength());
Assert::AreEqual(expectedStartIndex, ra.getStartIndex());
}
TEST_METHOD(TestOneArgConstructorValidLength)
{
int expectedLength = 5;
int expectedStartIndex = 0;
Array<int> ra(expectedLength);
Assert::AreEqual(expectedLength, ra.getLength());
Assert::AreEqual(expectedStartIndex, ra.getStartIndex());
}
void CreateInvalidLengthArray()
{
int expectedLength = -5;
Array<int> ra(expectedLength, 0);
}
TEST_METHOD(TestOneArgConstructorInvalidLength)
{
// Syntax for lambda expression
auto func = [this] { CreateInvalidLengthArray(); };
Assert::ExpectException<Exception>(func);
}
TEST_METHOD(TestNonZeroPositiveStartingIndex)
{
int expectedLength = 5;
int expectedStartingIndex = 10;
int expectedValue = 25;
Array<int> ra(expectedLength, expectedStartingIndex);
ra[expectedStartingIndex] = expectedValue;
Assert::AreEqual(expectedStartingIndex, ra.getStartIndex());
Assert::AreEqual(expectedValue, ra[expectedStartingIndex]);
}
TEST_METHOD(TestNonZeroNegativeStartingIndex)
{
int expectedLength = 5;
int expectedStartingIndex = -10;
int expectedValue = 25;
Array<int> ra(expectedLength, expectedStartingIndex);
ra[expectedStartingIndex] = expectedValue;
Assert::AreEqual(expectedStartingIndex, ra.getStartIndex());
Assert::AreEqual(expectedValue, ra[expectedStartingIndex]);
}
TEST_METHOD(TestUpdatedStartingIndex)
{
int expectedLength = 5;
int expectedStartingIndex = 5;
int expectedValue = 25;
Array<int> ra(expectedLength, expectedStartingIndex);
ra[expectedStartingIndex] = expectedValue;
int expectedNewStartIndex = 10;
ra.setStartIndex(expectedNewStartIndex);
Assert::AreEqual(expectedLength, ra.getLength());
Assert::AreEqual(expectedNewStartIndex, ra.getStartIndex());
Assert::AreEqual(expectedValue, ra[expectedNewStartIndex]);
}
TEST_METHOD(TestCopyConstructor)
{
int expectedLength = 5;
int expectedStartingIndex = 10;
int expectedValue = 25;
Array<int> ra(expectedLength, expectedStartingIndex);
ra[expectedStartingIndex] = expectedValue;
Array<int> copyOfRa(ra);
Assert::AreEqual(expectedLength, copyOfRa.getLength());
Assert::AreEqual(expectedStartingIndex, copyOfRa.getStartIndex());
Assert::AreEqual(expectedValue, copyOfRa[expectedStartingIndex]);
}
TEST_METHOD(TestOverloadedAssignmentOperator)
{
int expectedLength = 5;
int expectedStartingIndex = 10;
int expectedValue = 25;
Array<int> ra(expectedLength, expectedStartingIndex);
ra[expectedStartingIndex] = expectedValue;
Array<int> raAssignment = ra;
Assert::AreEqual(expectedLength, raAssignment.getLength());
Assert::AreEqual(expectedStartingIndex, raAssignment.getStartIndex());
Assert::AreEqual(expectedValue, raAssignment[expectedStartingIndex]);
}
void TriggerLowerOutOfBoundsException()
{
int expectedLength = 5;
Array<int> ra(expectedLength);
ra[-3];
}
TEST_METHOD(TestOutOfLowerBoundsIndexException)
{
auto func = [this] { TriggerLowerOutOfBoundsException(); };
Assert::ExpectException<Exception>(func);
}
void TriggerUpperOutOfBoundsException()
{
int expectedLength = 5;
Array<int> ra(expectedLength);
ra[expectedLength + 1];
}
TEST_METHOD(TestOutOfUpperBoundsException)
{
auto func = [this] { TriggerUpperOutOfBoundsException(); };
Assert::ExpectException<Exception>(func);
}
TEST_METHOD(TestResizeArrayToSmallerSize)
{
int expectedLength = 10;
int expectedStartIndex = 0;
int expectedValue = 25;
Array<int> ra(expectedLength);
ra[4] = expectedValue;
int expectedNewLength = 5;
ra.setLength(expectedNewLength);
Assert::AreEqual(expectedNewLength, ra.getLength());
Assert::AreEqual(expectedStartIndex, ra.getStartIndex());
Assert::AreEqual(expectedValue, ra[expectedNewLength - 1]);
}
void TriggerOutOfBoundsExceptionAfterResize()
{
int expectedLength = 10;
int expectedStartIndex = 0;
int expectedValue = 25;
Array<int> ra(expectedLength, expectedStartIndex);
ra[4] = expectedValue;
int expectedNewLength = 5;
ra.setLength(expectedNewLength);
ra[expectedLength - 1];
}
TEST_METHOD(TestResizeArrayToSmallerSizeWithUpperOutOfBoundException)
{
auto func = [this] { TriggerOutOfBoundsExceptionAfterResize(); };
Assert::ExpectException<Exception>(func);
}
TEST_METHOD(TestConstantArrayMethods)
{
int expectedLength = 5;
int expectedStartingIndex = 10;
int expectedValue = 25;
Array<int> ra(expectedLength, expectedStartingIndex);
ra[expectedStartingIndex] = expectedValue;
const Array<int> raAssignment = ra;
Assert::AreEqual(expectedLength, raAssignment.getLength());
Assert::AreEqual(expectedStartingIndex, raAssignment.getStartIndex());
Assert::AreEqual(expectedValue, raAssignment[expectedStartingIndex]);
}
};
}
<file_sep>#include <crtdbg.h>
#include <Windows.h>
#include <cassert>
#include <iostream>
#include "Array.h"
#include "Exception.h"
#include <string>
using std::string;
#define _CRTDBG_MAP_ALLOC
const size_t GREEN = 10;
const size_t RED = 12;
const size_t WHITE = 23;
static HANDLE hConsole = GetStdHandle(STD_OUTPUT_HANDLE);
void RunTests();
void PrintTestHeader(const char * testName);
void PrintTestPass(const char * test);
void PrintTestFail(const char * test, const char * message);
void TestConstructor();
void TestCopyConstructor();
void TestAssignmentOperator();
void TestingIndexer();
void TestingConst();
int main()
{
RunTests();
_CrtSetReportMode(_CRT_WARN, _CRTDBG_MODE_FILE);
_CrtSetReportFile(_CRT_WARN, _CRTDBG_FILE_STDOUT);
_CrtDumpMemoryLeaks();
system("pause");
return 0;
}
void RunTests()
{
TestConstructor();
TestCopyConstructor();
TestAssignmentOperator();
TestingIndexer();
TestingConst();
}
void PrintTestHeader(const char * testName)
{
std::cout << "\n********** " << testName << " **********" << std::endl;
}
void PrintTestPass(const char * test)
{
SetConsoleTextAttribute(hConsole, GREEN);
std::cout << '\t' << test << ": TEST PASS" << std::endl;
SetConsoleTextAttribute(hConsole, WHITE);
}
void PrintTestFail(const char * test, const char * message)
{
SetConsoleTextAttribute(hConsole, RED);
std::cerr << message << std::endl;
std::cerr << '\t' << test << ": TEST FAIL" << std::endl;
SetConsoleTextAttribute(hConsole, WHITE);
}
void TestConstructor()
{
const char * test = "Constructor";
PrintTestHeader(test);
try
{
Array<int> array1;
assert(array1.getLength() == 0);
assert(array1.getStartIndex() == 0);
PrintTestPass(test);
}
catch (Exception exception)
{
PrintTestFail(test, exception.getMessage());
}
}
void TestCopyConstructor()
{
const char * test = "Copy Constructor";
PrintTestHeader(test);
try
{
Array<int> array1;
array1.setLength(1);
array1[0] = 5;
Array<int> array2(array1);
assert(array2.getLength() == 1);
assert(array2[0] == 5);
PrintTestPass(test);
}
catch (Exception exception)
{
PrintTestFail(test, exception.getMessage());
}
}
void TestAssignmentOperator()
{
const char * test = "Assignment Operator";
PrintTestHeader(test);
try
{
int length = 5;
int start_index = 0;
Array<int> array1(length, start_index);
Array<int> array2;
size_t values[] = { 5, 15, 25, 35, 45 };
for (size_t i = 0; i < 5; ++i)
array1[i] = values[i];
array2 = array1;
for (size_t i = 0; i < 5; ++i)
{
assert(array2[i] == values[i]);
}
PrintTestPass(test);
}
catch (Exception exception)
{
PrintTestFail(test, exception.getMessage());
}
}
void TestingIndexer()
{
const char * test = "Indexer";
PrintTestHeader(test);
try
{
int length = 5;
int start_index = 0;
Array<int> array1(length, start_index);
size_t values[] = { 5, 15, 25, 35, 45 };
for (size_t i = 0; i < 5; ++i)
array1[i] = values[i];
for (size_t i = 0; i < 5; ++i)
{
assert(array1[i] == values[i]);
}
PrintTestPass(test);
}
catch (Exception exception)
{
PrintTestFail(test, exception.getMessage());
}
}
void TestingConst()
{
const char * test = "Testing const: Expecting an exception";
PrintTestHeader(test);
try
{
const Array<int> array1;
const int value = array1[0];
}
catch (Exception msg)
{
std::cerr << msg.getMessage();
PrintTestPass(test);
}
}<file_sep>#ifndef EXCEPTION_H
#define EXCEPTION_H
#include <iostream>
using std::ostream;
class Exception
{
public:
Exception();
Exception(char * msg);
Exception(const Exception & copy);
~Exception();
Exception & operator= (const Exception & rhs);
const char * getMessage();
void setMessage(char * msg);
friend ostream & operator<< (ostream & stream, const Exception & except);
private:
char * m_msg;
};
#endif<file_sep>#ifndef ARRAY_H
#define ARRAY_H
#include "Exception.h"
template <typename T>
class Array
{
public:
Array();
Array(int length, int start_index = 0);
Array(const Array & copy);
~Array();
Array & operator= (const Array & rhs);
T & operator[] (int index);
const T & operator[] (int index) const;
int getStartIndex();
int getStartIndex() const;
void setStartIndex(int start_index);
int getLength();
int getLength() const;
void setLength(int length);
private:
T * m_array;
int m_length;
int m_start_index;
};
template <typename T>
Array<T>::Array()
: m_array(nullptr), m_length(0), m_start_index(0)
{
}
template <typename T>
Array<T>::Array(int length, int start_index = 0)
: m_length(length), m_start_index(start_index)
{
if (m_length < 0)
throw Exception("Error: Array length cannot be negative.");
m_array = new T[length];
}
template <typename T>
Array<T>::Array(const Array<T> & copy)
: m_length(copy.m_length), m_start_index(copy.m_start_index)
{
this->m_array = new T[copy.m_length];
for (int cur_index = 0; cur_index < copy.m_length; cur_index++)
this->m_array[cur_index] = copy.m_array[cur_index];
}
template <typename T>
Array<T>::~Array()
{
if (m_array != nullptr)
{
delete[] m_array;
m_array = nullptr;
m_length = 0;
m_start_index = 0;
}
}
template <typename T>
Array<T> & Array<T>::operator= (const Array<T> & rhs)
{
if (this != &rhs)
{
if (this->m_array != nullptr)
delete[] this->m_array;
this->m_array = new T[rhs.m_length];
for (int cur_index = 0; cur_index < rhs.m_length; cur_index++)
this->m_array[cur_index] = rhs.m_array[cur_index];
this->m_length = rhs.m_length;
this->m_start_index = rhs.m_start_index;
}
return *this;
}
template <typename T>
T & Array<T>::operator[] (int index)
{
if (index >= m_start_index && index < m_start_index + m_length)
return m_array[index - m_start_index];
throw Exception("Error: Out of range access");
}
template <typename T>
const T & Array<T>::operator[] (int index) const
{
if (index >= m_start_index && index < m_start_index + m_length)
return m_array[index - m_start_index];
throw Exception("Error: Out of range access");
}
template <typename T>
int Array<T>::getStartIndex()
{
return m_start_index;
}
template <typename T>
int Array<T>::getStartIndex() const
{
return m_start_index;
}
template <typename T>
void Array<T>::setStartIndex(int start_index)
{
m_start_index = start_index;
}
template <typename T>
int Array<T>::getLength()
{
return m_length;
}
template <typename T>
int Array<T>::getLength() const
{
return m_length;
}
template <typename T>
void Array<T>::setLength(int length)
{
if (length > 0)
{
T * new_array = new T[length];
int copy_length = length <= m_length ? length : m_length;
for (int cur_index = 0; cur_index < copy_length; cur_index++)
new_array[cur_index] = this->m_array[cur_index];
if (m_array != nullptr)
delete[] m_array;
m_array = new_array;
m_length = length;
}
else
throw Exception("Error: Array length must be a positive value");
}
#endif
|
c3415534aad326b8d024a7ec37cf0cb1e1d91b7c
|
[
"Markdown",
"C++"
] | 6
|
Markdown
|
LewisSanchez/one-dimensional-array
|
9d1b1f31d3bc9d250d35e39d25c8235303140646
|
d7c7279288b64c59cb70d53559456c4a05bde51d
|
refs/heads/master
|
<repo_name>farmstudio/slugcharmap<file_sep>/README.md
# SlugCharMap
Craft CMS allows custom character transliteration to ASCII by setting the `customAsciiCharMappings` value in `general.php`:
http://buildwithcraft.com/docs/config-settings#customAsciiCharMappings
For instance, in Hungarian, I can set `ü` to map to `u` instead of `ue` (which is a German convention). Unfortunately, this setting won't affect the slug generation mapping in the control panel, which is hardcoded in craft.js, see:
http://craftcms.stackexchange.com/questions/9110/trouble-with-transliteration-in-slug-caused-by-hardcoded-charmap-in-craft-js
This plugin will allow the `customAsciiCharMappings` setting to work while generating slugs.
# Installation:
1. Install the plugin under `craft/plugins/slugcharmap`, so that you end up with:
`craft/plugins/slugcharmap/SlugCharMapPlugin.php`
2. Enable the plugin in the control panel
3. Define the `customAsciiCharMappings` setting in `general.php`, see documentation for guidance:
http://buildwithcraft.com/docs/config-settings#customAsciiCharMappings
<file_sep>/SlugCharMapPlugin.php
<?php namespace Craft;
class SlugCharMapPlugin extends BasePlugin {
protected $_version = '1.0',
$_pluginName = 'SlugCharMap',
$_pluginUrl = 'https://github.com/farmstudio/slugcharmap',
$_developer = 'Farm Studio',
$_developerUrl = 'http://farm.co.hu';
public function getName() {
return $this->_pluginName;
}
public function getVersion() {
return $this->_version;
}
public function getDeveloper() {
return $this->_developer;
}
public function getDeveloperUrl() {
return $this->_developerUrl;
}
public function getPluginUrl() {
return $this->_pluginUrl;
}
public function init() {
parent::init();
$request = craft()->request;
if (craft()->request->isCpRequest()) {
$this->addJavascript();
}
}
protected function addJavascript() {
$charMap = json_encode(StringHelper::getAsciiCharMap());
craft()->templates->includeJs(
"$(document).ready(function() {
Craft.asciiCharMap = $charMap;
});",
TRUE
);
}
}
|
15f3888bd8298bcc3a52085d6f7f1024ef96e1cc
|
[
"Markdown",
"PHP"
] | 2
|
Markdown
|
farmstudio/slugcharmap
|
3ec3547f037df2d3e9e276c3cafa7b6ea8473deb
|
2e75190dc0c57902f298e1e1f5a0f5e54041544e
|
refs/heads/main
|
<repo_name>rai-mond/ese2_MetPet<file_sep>/script.js
window.onload = () => {
var pos = navigator.geolocation.getCurrentPosition(function (position) {
const latitude = position.Latitude;
const longitude = position.Longitude;
return `latitude: ${latitude}; longitude: ${longitude};`
let places = staticLoadPlaces(latitude,longitude);
renderPlaces(places);
})
};
function staticLoadPlaces(latitude,longitude) {
return [
{
name: 'MiaoMiao',
location: {
lat: latitude,
lng: longitude,
}
},
];
}
function renderPlaces(places) {
let scene = document.querySelector('a-scene');
places.forEach((place) => {
let latitude = place.location.lat;
let longitude = place.location.lng;
let model = document.createElement('a-entity');
model.setAttribute('gps-entity-place', `latitude: ${latitude}; longitude: ${longitude};`);
model.setAttribute('gltf-model', 'assets/miaoglb/Cat_male_animations_exported.glb');
model.setAttribute('rotation', '0 180 0');
model.setAttribute('scale', '0.5 0.5 0.5');
model.addEventListener('loaded', () => {
window.dispatchEvent(new CustomEvent('gps-entity-place-loaded'))
});
scene.appendChild(model);
});
}
|
8853b62cda93407ce213871e4d111b82082d1154
|
[
"JavaScript"
] | 1
|
JavaScript
|
rai-mond/ese2_MetPet
|
712e9783a52bed38b677ce566215514fda0cb278
|
47d1f58144ae4b32e67211f2254bf170ddf6e4f2
|
refs/heads/master
|
<file_sep>EXECUTABLE_NAME=game
CC=g++
OBJ_DIR=./obj
SRCS= $(wildcard *.cpp)
OBJS= $(patsubst %.cpp,$(OBJ_DIR)/%.o,$(SRCS))
CFLAGS+=-I./include
CFLAGS+=-I/Users/dhaksitha.malavisuriya/Downloads/SFML-2.5.1-macos-clang/include
LDFLAGS+=-L/Users/dhaksitha.malavisuriya/Downloads/SFML-2.5.1-macos-clang/lib
CFLAGS+=-std=c++11 -Wall
LDFLAGS+=-lsfml-graphics -lsfml-window -lsfml-system -rpath /Users/dhaksitha.malavisuriya/Downloads/SFML-2.5.1-macos-clang/lib
all: $(EXECUTABLE_NAME)
$(EXECUTABLE_NAME): $(OBJS)
$(CC) $(LDFLAGS) -o $@ $^
$(OBJ_DIR)/%.o: %.cpp
$(CC) $(CFLAGS) -c $< -o $@
.PHONY: clean
clean:
rm $(OBJ_DIR)/*.o $(EXECUTABLE_NAME)
<file_sep>
#include <SFML/Graphics.hpp>
#include <chrono>
#include <iostream>
#include <cmath>
#include <random>
#define FPS 60
/* //Draw circle using midCircle algorithm
void DrawCircle(SDL_Renderer * renderer, int32_t centreX, int32_t centreY, int32_t radius)
{
const int32_t diameter = (radius * 2);
int32_t x = (radius - 1);
int32_t y = 0;
int32_t tx = 1;
int32_t ty = 1;
int32_t error = (tx - diameter);
while (x >= y)
{
// Each of the following renders an octant of the circle
SDL_RenderDrawCircle(renderer, centreX + x, centreY - y);
SDL_RenderDrawCircle(renderer, centreX + x, centreY + y);
SDL_RenderDrawCircle(renderer, centreX - x, centreY - y);
SDL_RenderDrawCircle(renderer, centreX - x, centreY + y);
SDL_RenderDrawCircle(renderer, centreX + y, centreY - x);
SDL_RenderDrawCircle(renderer, centreX + y, centreY + x);
SDL_RenderDrawCircle(renderer, centreX - y, centreY - x);
SDL_RenderDrawCircle(renderer, centreX - y, centreY + x);
if (error <= 0)
{
++y;
error += ty;
ty += 2;
}
if (error > 0)
{
--x;
tx += 2;
error += (tx - diameter);
}
}
} */
struct Circle
{
sf::Vector2f position;
sf::Vector2f oldPosition;
sf::Vector2f acceleration;
float radius;
};
void printCircles(Circle* Circles)
{
for (int i = 0; i < 2; i++)
{
Circle& P = Circles[i];
std::cout << i << std::endl;
std::cout << "Pos: " << P.position.x << " " << P.position.y << std::endl;
std::cout << "old_Pos: " << P.oldPosition.x << " " << P.oldPosition.y << std::endl;
std::cout << "accel: " << P.acceleration.x << " " << P.acceleration.y << std::endl;
std::cout << "=====================================" << std::endl;
}
}
void updateVerlet(Circle* Circles)
{
float timeStep = 1.0f / FPS;
float timeStep2 = timeStep * timeStep;
for (int i = 0; i < 1; i++)
{
Circle& P = Circles[i];
sf::Vector2f Temp = P.position;
//pos += (pos - old_pos) + (accel * timestep ^ 2) -- Fixed timestep at 60fps (1/60s)
P.position += (P.position - P.oldPosition) + P.acceleration * timeStep2;
P.oldPosition = Temp;
std::cout << "Pos: " << P.position.x << " " << P.position.y << std::endl;
}
}
//counter clockwise perpendicular vector...
sf::Vector2f vectorPerpendicular(sf::Vector2f v)
{
return sf::Vector2f((-1 * v.y), v.x);
}
float dotProduct2(sf::Vector2f v1, sf::Vector2f v2)
{
return (v1.x * v2.x) + (v1.y * v2.y);
}
float vectorMagnitude(sf::Vector2f v)
{
return std::sqrt((v.x * v.x) + (v.y * v.y));
}
sf::Vector2f getReflectionVec(sf::Vector2f vec, sf::Vector2f norm)
{
float dot = dotProduct2(vec, norm);
sf::Vector2f tmp = vec - ((2 * dot) * norm);
return tmp;
}
sf::Vector2f normalize(sf::Vector2f vec)
{
float mag = vectorMagnitude(vec);
return (vec / mag);
}
void drawCircles(Circle* Circles, sf::RenderWindow& window)
{
for (int i = 0; i < 3; i++)
{
Circle& P = Circles[i];
sf::CircleShape shape;
shape.setRadius(P.radius);
shape.setOrigin(P.radius,P.radius);
shape.setFillColor(sf::Color::Green);
shape.setPosition(P.position);
window.draw(shape);
}
}
bool checkPointCirlceCol(sf::Vector2f& p, Circle& c)
{
sf::Vector2f dist = c.position - p;
float distance = vectorMagnitude(dist);
if (distance <= c.radius)
{
return true;
}
else
{
return false;
}
}
bool checkCircleLineCol(sf::Vector2f&& p1, sf::Vector2f&& p2, Circle& c)
{
if (checkPointCirlceCol(p1, c) || checkPointCirlceCol(p2, c))
{
return true;
}
sf::Vector2f line = p2 - p1;
float lineLen = vectorMagnitude(line);
sf::Vector2f lineDir = line / lineLen;
sf::Vector2f circleToLine = c.position - p1;
float scalarProj = dotProduct2(line, circleToLine) / (lineLen * lineLen);
sf::Vector2f proj = line * scalarProj;
sf::Vector2f closestPoint = proj + p1;
float d1 = vectorMagnitude(closestPoint - p1);
float d2 = vectorMagnitude(closestPoint - p2);
if (d1 + d2 > lineLen)
{
return false;
}
sf::Vector2f closestLine = closestPoint - c.position;
float distClosePoint = vectorMagnitude(closestLine);
if (distClosePoint <= c.radius)
{
std::cout << std::endl << "Pos: " << c.position.x << " " << c.position.y << std::endl;
std::cout << "old_Pos: " << c.oldPosition.x << " " << c.oldPosition.y << std::endl;
std::cout << "accel: " << c.acceleration.x << " " << c.acceleration.y << std::endl;
std::cout << "=====================================" << std::endl;
sf::Vector2f velocity_vec = c.position - c.oldPosition;
sf::Vector2f ref = getReflectionVec(velocity_vec, vectorPerpendicular(lineDir));
c.oldPosition = c.position;
c.position = c.position + ref;
std::cout << "Pos: " << c.position.x << " " << c.position.y << std::endl;
std::cout << "old_Pos: " << c.oldPosition.x << " " << c.oldPosition.y << std::endl;
std::cout << "accel: " << c.acceleration.x << " " << c.acceleration.y << std::endl;
std::cout << "=====================================" << std::endl << std::endl << std::endl;
return true;
}
else
{
return false;
}
}
bool checkCircleCircleCol(Circle& c1, Circle& c2)
{
float comRad = c1.radius + c2.radius;
sf::Vector2f dist = c1.position - c2.position;
float distance = vectorMagnitude(dist);
if (distance <= comRad)
{
if (distance > 0)
{
c1.position = c1.position + (dist / distance);
}
return true;
}
else
{
return false;
}
}
void createCircles(Circle* circles)
{
std::random_device dev;
std::mt19937 rng(dev());
std::uniform_real_distribution<float> x_pos(50.0f, 950.0f);
std::uniform_real_distribution<float> y_pos(50.0f, 900.0f);
std::uniform_real_distribution<float> x_accel(0.0f, 5.0f);
std::uniform_real_distribution<float> y_accel(0.0f, 5.0f);
for (int i = 0; i < 2; i++)
{
Circle& c = circles[i];
sf::Vector2f pos = sf::Vector2f(x_pos(rng), y_pos(rng));
c.acceleration = sf::Vector2f(x_accel(rng), y_accel(rng));
c.position = pos;
c.oldPosition = pos;
c.radius = 10.0f;
}
}
int main()
{
sf::RenderWindow window(sf::VideoMode(1000, 960), "SFML works!");
window.setMouseCursorVisible(false);
Circle Circles[3];
createCircles(Circles);
Circles[2].acceleration = sf::Vector2f(9.8f, 1.0f);
Circles[2].oldPosition = sf::Vector2f(0.0f, 0.0f);
Circles[2].position = sf::Vector2f(300.0f, 900.0f);
Circles[2].radius = 50.0f;
auto startTime = std::chrono::steady_clock::now();
float lag = 0.0f;
float timeStep_s = (1.0f / FPS);
while (window.isOpen())
{
sf::Event event;
while (window.pollEvent(event))
{
if (event.type == sf::Event::Closed)
window.close();
}
if (lag >= timeStep_s)
{
updateVerlet(Circles);
Circles[2].position = sf::Vector2f(sf::Mouse::getPosition(window));
lag -= timeStep_s;
}
checkCircleCircleCol(Circles[0], Circles[2]);
checkCircleLineCol(sf::Vector2f(0,960), sf::Vector2f(1000,960), Circles[0]);
checkCircleLineCol(sf::Vector2f(1000,0), sf::Vector2f(1000,960), Circles[0]);
checkCircleLineCol(sf::Vector2f(0,0), sf::Vector2f(0,960), Circles[0]);
checkCircleLineCol(sf::Vector2f(0,0), sf::Vector2f(1000,0), Circles[0]);
auto endTime = std::chrono::steady_clock::now();
std::chrono::duration<float> elapsedTime = endTime - startTime;
startTime = endTime;
lag += elapsedTime.count();
window.clear();
drawCircles(Circles, window);
window.display();
}
return 0;
}
<file_sep>cmake_minimum_required(VERSION 3.15)
project(sdl_Test)
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_BINARY_DIR ${CMAKE_SOURCE_DIR}/build/)
set(EXECUTABLE_OUTPUT_PATH ${CMAKE_BINARY_DIR})
set(LIBRARY_OUTPUT_PATH ${CMAKE_BINARY_DIR})
# Add source files
file(GLOB SOURCE_FILES
${CMAKE_SOURCE_DIR}/*.cpp)
# Add header files
file(GLOB_RECURSE HEADER_FILES
${CMAKE_SOURCE_DIR}/include/*.h
${CMAKE_SOURCE_DIR}/include/*.hpp)
find_package(SFML 2.5 COMPONENTS graphics audio REQUIRED)
find_package(OpenGL REQUIRED)
include_directories(${OPENGL_INCLUDE_DIRS} ${CMAKE_SOURCE_DIR}/include/)
add_executable(${PROJECT_NAME} ${SOURCE_FILES} ${HEADER_FILES})
target_link_libraries(${PROJECT_NAME} sfml-graphics sfml-audio ${OPENGL_LIBRARIES} ${CMAKE_DL_LIBS})
|
eb7f4f0181d017fe5e72043d090e5f4efc08e617
|
[
"CMake",
"Makefile",
"C++"
] | 3
|
Makefile
|
doladolaben/sfmlTest
|
00b9b135211e2a3ef4c49da0321bce0c31242e76
|
10c0b5db22bfb7872b48ea8fc2848d0c4455eddb
|
refs/heads/master
|
<repo_name>basima25z/POS-tagger<file_sep>/regexTest.py
import re
# string = "Congress/NNP july/NN"
# pattern = re.compile(r'(.*)/(.*)')
# matches = pattern.finditer(string)
# trainDict={}
# for match in matches:
# #print(match.group(1))
# #print(match.group(2))
# if match:
# key = match.group(1)
# value = match.group(2)
# trainDict[key]=value
# print(trainDict)
pattern = r"(.*)/(.*)"
stringPar = "old/JJ ,/, will/MD join/VB "
res = stringPar.split()
trainDict={}
for i in res:
match = re.search(pattern,i)
if match:
key = match.group(1)
value = match.group(2)
trainDict[key]=value
print(trainDict)
# matches=re.finditer(pattern,res)
# for matchNum, match in enumerate(matches, start=1):
# if match:
# key = match.group(1)
# value = match.group(2)
# trainDict[key]=value
# print(trainDict)
<file_sep>/tagger.py
import os
import sys
import re
import csv
import itertools
from collections import Counter
################################################################
# <NAME>
# CMSC: 416 - Natural Language Processing
# March 16th, 2021
# Programming Assignment 3
# This is a POS tagger program, the purpose of this program is to train your POS tagger with the
# training file and then utilize what your training file has learned on the testing file.
# The testing file only has words and our goal is to accurately predict the POS based off the highsest probability, this acts as a baseline
# The input of the command line requires two files: tagger.py pos-train.txt pos-test.txt
# The output utilies STDOUT, so in the command line, following the two files, use '>' along with the filename.txt that you would like to output to
# To run this program in the terimnal the command are: python3.8 tagger.py pos-train.txt pos-test.txt > pos-test-with-tags.txt
# It is not necceasry to add the 3.8 after python unless you're IDE defaults to the python 2.7 interpretor
################################################################
################################################################
# Fucntion: Main
# Parameter: command line arguments
# The purpose of the main method is to read two files from the command line
# The program outputs to STDOUT, hence when the '>' is typed in following a filename.txt anything that
# prints in the program will output directy to that file (within the same directory)
# After the files are read, the method removeBrackets() is called to remove the brackets of the training file
# After the brackets are removed, the method scrape is called where our frequency tables are created
###############################################################
def main(argv):
trainFile = os.path.basename(sys.argv[1])
testingFile = os.path.basename(sys.argv[2])
openTrainFile = open(trainFile, "r")
contentsTrain = openTrainFile.read().lower()
openTestFile = open(testingFile, "r")
contentsTest = openTestFile.read().lower()
#removeBrackets
contentsTrain = removeBrackets(contentsTrain)
scrape(contentsTrain, contentsTest)
##############################################################
# Function: removeBrackets
# Parameter: file
# The method traverses through each word (or index) in the training file, and if it has a bracket, it
# removes it by using the replace method
# It then returns the file without brackets
#############################################################
def removeBrackets(trainFile):
punc = '''[]'''
for p in trainFile:
if p in punc:
trainFile=trainFile.replace(p,"")
return trainFile
####################################################################
# Fucntion: Scapre
# Parameter: file, file
# The purpose of this function is to first use regex to split the word/pos into two parts and append it
# to a sublist of lists. It also appends just the key(word) to a list called JustKeys
# After the lists of lists is made and the justKeys list is made, I use counter to create the first
# frequency table which holds the word, pos and the number of occurance
# For example, the listOfLists could look like this: [[veto,nn],[veto,nn],[apart,nn]]
# After using Counter, it would look like this: (veto,nn):2, (apart,nn):1
# The second frequency table is created with justKeys,
# justKeys could look like this: (veto, veto, apart) and using the frequency method after, it would look
# like: {veto:2, apart:1}
# After both the tables are created, testFile is sent to removeBrackets prior to being sent over to be tagged
#####################################################################
def scrape(trainFile, testFile):
#use regex to get two groups
pattern = '(.*)/(.*)'
posReg = '(.*)|(.*)'
splitFileList = trainFile.split()
#uses regex to split it into two parts and adds to a list
listOfLists = []
justKeys = []
for i in splitFileList: #for each jumpled word (word/pos) or each index
match=re.search(pattern,i) #search if the pattern exist
if match: #if it does exist
key=match.group(1) #key is equal to the first match, in this case the word
value=match.group(2) #value is equal to the second match, in this case the POS
posRegMatch=re.search(posReg,value) #This check if the value (pos) has a '|' if it does
if posRegMatch:
value = posRegMatch.group(1) #only takes the first part of the pos
listOfLists.append([key,value]) #appends key,value to the list of lists
justKeys.append(key) #appends only keys to list
#creation of the first frequency matrix
count = Counter(map(tuple,listOfLists))
#creating of second frequency table --> just words and freq
frequency ={}
frequency= freq(justKeys)
#calls removeBrackets method and sends it testFile, have to do this here because right after it is sent to tag
testFile=removeBrackets(testFile)
###########################################################################
# Tags is the baseline method where tag is based off of the highest probability of it occuring
# Tags1 correlates to rule 1
# Tags2 correlates to rule 2
# Tags3 correlates to rule 3
# Tags4 correlates to rule 4
# Tags5 correlates to rule 5
# As you can see, as the rules became more ambigous, the accuracy degraded
####################################################################################
tags(trainFile,testFile,count,frequency) #Accuracy: 83
#tags1(trainFile,testFile,count,frequency) #Accuracy: 80
#tags2(trainFile,testFile,count,frequency) #Accuracy: 78
#tags3(trainFile,testFile,count,frequency) #Accuracy: 79
#tags4(trainFile,testFile,count,frequency) #Accuracy: 79
#tags5(trainFile,testFile,count,frequency) #Accuracy:78
###############################################
# Function: Freq
# Parameter: list (justKeys)
# The purpose of this method is to count the frequency of each word occuring and to return it in a dictionary
###############################################
def freq(justKeys):
wordfreq = [justKeys.count(p) for p in justKeys]
return dict(list(zip(justKeys,wordfreq)))
####################################################
# Function: find_pos
# Parameter: str, counter, dict
# The purpose of this method is to find the pos that correlates to the word being sent over from the tag method
# The pretense of this is the tag method and that a word in the training file matches a word in the test file
# That word is then sent over as the first parameter
# It then traverses through the counter which looks like this: (veto,nn):2, (apart,nn):1
# If the word sent in (variable w) is equal to a word in count, then it creates a mini dictionary of
# words and the pos that match the word
# For example, if the word that matched was 'no' the mini dictionary would have every option POS of no
# It would look like this: (no,dt):50, (no,rb):2
# It then finds the max frequency occurrance and then since our key is embeded like this: (no,dt)
# I then just sent the first index back which correlates to the pos
# The pos is returned
# The reason for max frequency instead of probability is because when you do the math, whatever has the
# highest frequency will have the highest probability
########################################################################
def find_pos(w,count,frequency):
matchingDict ={} #creation of a mini dictionary
for(word,pos),v in count.items(): #goes through count
if w==word: #if word sent it (that we know matches) matches a word in count
matchingDict[word,pos]=v #embedds the word,pos as key and the value (frequency) as v
max_key = max(matchingDict, key =matchingDict.get) #baseline, gets you the key with the max frequency
pos = max_key[1] #GETS YOU POS THAT WE NEED TO RETURN, since the key is embedded (looks like (veto,nn)), by doing max_key[1] we only get the POS
return pos #return POS
###############################################################################
# Function: tags
# Parameters: file, file, counter, dict
# The purpose of this method is to see if any words match in the training file and testing file, if it does
# it calls the find_pos method which returns the pos
# Once the pos is returned it appends it to the word and prints it to STDOUT
###############################################################################
def tags(trainFile, testFile, count, frequency):
tagTestWords =[]
splitFileTest = testFile.split()
#The testFile only contains the words, the brackets are removed prior to the file being sent here
# as an argument, so the method first splits in on whitespace, then it traverses through a for loop
# and appends each word to a list called tagTestWords
for i in splitFileTest:
tagTestWords.append(i)
#This is a for loop to see if a word in the tagTestWords matches a word in count
#found acts as a flag, so it initally is set to False
#If there is a match, it sends the word to find_pos which returns the pos associated with the word
#It then appends the word to the tag with a '/' in between and prints it to STDOUT and sets found = True
#When there isn't a match between the train and test, found = False and it appends the word + \nn and print it
for word in tagTestWords:
found=False
for (w,pos),v in count.items():
if (word == w): #if a word from the tagTestWords (testFile) matches a word found in count
tag=find_pos(word,count, frequency) #baseline rule - it sends the matches word, count, and freq dict to find_pos which returns the pos
wordFin = word + "/" + tag #concatenates the word matches + '/' + pos that is returned from find_pos
print(wordFin + "\n") #prints to STDOUT
found=True #found is set to true, so then it will continue in this for loop until found = False
break
if found==False: #if found == False, this means that there is no word that matches from the train and test
wordNoPosFound= word + "/nn" #it concatenates the word + /nn
print(wordNoPosFound + "\n") #prints to STDOUT
##############################################################################################
# Method: rule1
# Parameters: (str, count, dict)
# tag1 is associated with rule1
# each rule has its own method and tags method
# Rule 1 is if the pos is 'vbd' then set pos = 'vbn' instead and return that as your pos
# The accuracy achieved with rule1 is = 80
# This was found by having each rule run one at a time to print out to the file
# and running that file individually through scocer.py
################################################################################################
def rule1(w,count,frequency):
matchingDict ={}
for(word,pos),v in count.items():
if w==word:
matchingDict[word,pos]=v
max_key = max(matchingDict, key =matchingDict.get)
pos = max_key[1]
if(pos=='vbd'):
pos='vbn'
return pos
############################################################
# Method: tags1
# Parameter: (file,file,count,dict)
# This is similar to tags (baseline), but instead of calling find_pos, it calls rule1
############################################################
def tags1(trainFile, testFile, count, frequency):
tagTestWords =[]
splitFileTest = testFile.split()
for i in splitFileTest:
tagTestWords.append(i)
for word in tagTestWords:
found=False
for (w,pos),v in count.items():
if (word == w):
tag=rule1(word,count, frequency)
wordFin = word + "/" + tag
print(wordFin + "\n")
found=True
break
if found==False:
wordNoPosFound= word + "/nn"
print(wordNoPosFound + "\n")
##############################################################################################
# Method: rule2
# Parameters: (str, count, dict)
# tag2 is associated with rule2
# Rule 2 is if the pos is 'nnp' then set pos = 'nn' instead and return that as your pos
# The accuracy achieved with rule2 is = 78
# This was found by having each rule run one at a time to print out to the file
# and running that file individually through scocer.py
################################################################################################
def rule2(w,count,frequency):
matchingDict ={}
for(word,pos),v in count.items():
if w==word:
matchingDict[word,pos]=v
max_key = max(matchingDict, key =matchingDict.get)
pos = max_key[1]
if(pos=='nnp'):
pos='nn'
return pos
############################################################
# Method: tags2
# Parameter: (file,file,count,dict)
# This is similar to tags (baseline), but instead of calling find_pos, it calls rule2
############################################################
def tags2(trainFile, testFile, count, frequency):
tagTestWords =[]
splitFileTest = testFile.split()
for i in splitFileTest:
tagTestWords.append(i)
for word in tagTestWords:
found=False
for (w,pos),v in count.items():
if (word == w):
tag=rule2(word,count, frequency)
wordFin = word + "/" + tag
print(wordFin + "\n")
found=True
break
if found==False:
wordNoPosFound= word + "/nn"
print(wordNoPosFound + "\n")
##############################################################################################
# Method: rule3
# Parameters: (str, count, dict)
# tag3 is associated with rule3
# Rule 3 is if the pos is 'jj' then set pos = 'nnp' instead and return that as your pos
# The accuracy achieved with rule3 is = 79
# This was found by having each rule run one at a time to print out to the file
# and running that file individually through scocer.py
################################################################################################
def rule3(w,count,frequency):
matchingDict ={}
for(word,pos),v in count.items():
if w==word:
matchingDict[word,pos]=v
max_key = max(matchingDict, key =matchingDict.get)
pos = max_key[1]
if(pos=='jj'):
pos='nnp'
return pos
############################################################
# Method: tags3
# Parameter: (file,file,count,dict)
# This is similar to tags (baseline), but instead of calling find_pos, it calls rule3
############################################################
def tags3(trainFile, testFile, count, frequency):
tagTestWords =[]
splitFileTest = testFile.split()
for i in splitFileTest:
tagTestWords.append(i)
for word in tagTestWords:
found=False
for (w,pos),v in count.items():
if (word == w):
tag=rule3(word,count, frequency)
wordFin = word + "/" + tag
print(wordFin + "\n")
found=True
break
if found==False:
wordNoPosFound= word + "/nn"
print(wordNoPosFound + "\n")
##############################################################################################
# Method: rule4
# Parameters: (str, count, dict)
# tag4 is associated with rule4
# Rule 4 is if the pos is 'to' then set pos = 'in' instead and return that as your pos
# The accuracy achieved with rule4 is = 79
# This was found by having each rule run one at a time to print out to the file
# and running that file individually through scocer.py
################################################################################################
def rule4(w,count,frequency):
matchingDict ={}
for(word,pos),v in count.items():
if w==word:
matchingDict[word,pos]=v
max_key = max(matchingDict, key =matchingDict.get)
pos = max_key[1]
if(pos=='to'):
pos='in'
return pos
############################################################
# Method: tags4
# Parameter: (file,file,count,dict)
# This is similar to tags (baseline), but instead of calling find_pos, it calls rule4
############################################################
def tags4(trainFile, testFile, count, frequency):
tagTestWords =[]
splitFileTest = testFile.split()
for i in splitFileTest:
tagTestWords.append(i)
for word in tagTestWords:
found=False
for (w,pos),v in count.items():
if (word == w):
tag=rule4(word,count, frequency)
wordFin = word + "/" + tag
print(wordFin + "\n")
found=True
break
if found==False:
wordNoPosFound= word + "/nn"
print(wordNoPosFound + "\n")
##############################################################################################
# Method: rule5
# Parameters: (str, count, dict)
# tag5 is associated with rule5
# Rule 5 is if the pos is 'nns' then set pos = 'rb' instead and return that as your pos
# The accuracy achieved with rule5 is = 78
# This was found by having each rule run one at a time to print out to the file
# and running that file individually through scocer.py
################################################################################################
def rule5(w,count,frequency):
matchingDict ={}
for(word,pos),v in count.items():
if w==word:
matchingDict[word,pos]=v
max_key = max(matchingDict, key =matchingDict.get)
pos = max_key[1]
if(pos=='nns'):
pos='rb'
return pos
############################################################
# Method: tags5
# Parameter: (file,file,count,dict)
# This is similar to tags (baseline), but instead of calling find_pos, it calls rule5
# Overall, as rules were individually added and testesd, the accuracy would degrade
# If the rules worked in conjunction, the accuracy would be consdierably low
############################################################
def tags5(trainFile, testFile, count, frequency):
tagTestWords =[]
splitFileTest = testFile.split()
for i in splitFileTest:
tagTestWords.append(i)
for word in tagTestWords:
found=False
for (w,pos),v in count.items():
if (word == w):
tag=rule5(word,count, frequency)
wordFin = word + "/" + tag
print(wordFin + "\n")
found=True
break
if found==False:
wordNoPosFound= word + "/nn"
print(wordNoPosFound + "\n")
if __name__ == "__main__":
#print('---------------------------------------------------------------------------------------------')
#print('<NAME>')
main(sys.argv)<file_sep>/scorer.py
#from sklearn.metrics import confusion_matrix
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score
import pandas as pd
import numpy as np
#import sklearn
import os
import sys
import re
###########################################################
# <NAME>
# CMSC: 416 - Natural Language Processing
# March 16th, 2021
# Programming Assignment 3
# This is a utility program that compares the output from tagger.py to the key provided to us
# The purpose of this program is to see how accuracte tagger.py is at classifying a word to a pos
# We can see accuracy and precision with the confusion matrix and the accuracy score
# The input through the command line requires two files: pos-test-with-tags.txt and pos-test-key.txt
# The first file is the result of tagger.py
# The output file will be utilizing STDOUT, so the > symbol along with a file name is needed in the command line,
# so that the output knows where to be printed
# To run this program in the command line: python3.8 scorer.py pos-test-with-tags.txt pos-test-key.txt > pos-tagging-report.txt
# It is not necceasry to add the 3.8 after python unless you're IDE defaults to the python 2.7 interpretor
############################################################
########################################################
# Function: Main
# Parameters: argv
# The main method takes in two files, the first being the outputfile from tagger.py and the second being the test-key provided
# It opens up the two files and removes the brackets by calling the removeBracket method and sending it the file
# It then splits the two files and uses regex to just obtain the POS from both the test and the key provieded
#########################################################
def main(argv):
testFile = os.path.basename(sys.argv[1])
keyFile = os.path.basename(sys.argv[2])
openTestFile = open(testFile, "r")
contentsTest = openTestFile.read().lower()
openKeyFile = open(keyFile, "r")
contentsKey = openKeyFile.read().lower()
contentsTest= removeBrackets(contentsTest)
contentsKey=removeBrackets(contentsKey)
splitFileKey = contentsKey.split()
splitFileTest = contentsTest.split()
y_pred = splitFileTest #this is used if we want to compare word/pos from test to word/pos in key
y_act = splitFileKey #word/pos in key
###########################################################################
# Below is used to create the Confusion Matrix with just POS
# Using regex to just obtain the pos by first looking at each index (word/pos) in the testFile
# If there is a match of the word/pos in the testFile to word/pos in the keyFile, then it appends the second group matched
# (which is the pos) to a list called posTest
# The regex also checks to see if the pos matched contains a '|', if it does, it just appends the first group (ignoring the second pos attached)
# This algorithm is used first to obtain the pos from the testFile and then again to obtain pos from the keyFile
#################################################################
posTest =[]
pattern = '(.*)/(.*)'
posReg = '(.*)|(.*)'
for i in splitFileTest:
match=re.search(pattern,i)
if match:
key=match.group(1)
pos=match.group(2)
posRegMatch=re.search(posReg,pos)
if posRegMatch:
pos= posRegMatch.group(1)
posTest.append(pos)
posKey =[]
pattern = '(.*)/(.*)'
posReg = '(.*)|(.*)'
for i in splitFileKey:
match=re.search(pattern,i)
if match:
key=match.group(1)
pos=match.group(2)
posRegMatch=re.search(posReg,pos)
if posRegMatch:
pos = posRegMatch.group(1)
posKey.append(pos)
##############Confusion Matrix of POS only####################################
# Using pandas to create a confusion matrix for pos only
# Assigning the list posKey and posTest list that was created above and set it to Series (1-d Array)
# Labeling the y_actKey as Actual and y_predTest as Predicted
# Using crosstab method to create a confusion matrix
# Using accuracy_score method to find the accuracy and multipling it by 100 to get a whole number
###############################################################################
y_actKey = pd.Series(posKey, name='Actual')
y_predTest= pd.Series(posTest, name='Predicted')
df_conf = pd.crosstab(y_actKey, y_predTest)
pd.set_option("expand_frame_repr", False)
print("\n%s" % df_conf)
acc = accuracy_score(y_actKey, y_predTest)
print("Accuracy: ", acc *100)
#############Confusion Matrix of word/pos#############
#The assignment wasn't clear on how the confusion matrix should look, whether it was just comparing pos
# or if it was word/pos, so I made this prior to doing POS only, however, based off the number of columns and rows
# I just assumed, this isn't what was required, so I left it here just in case and if you'd like to see it just uncomment the print line
####################################################
y_actWord = pd.Series(y_act, name='Actual')
y_predWord= pd.Series(y_pred, name='Predicted')
df_conf = pd.crosstab(y_actWord, y_predWord)
pd.set_option("expand_frame_repr", False)
#print("\n%s" % df_conf)
##############################################################
# Function: removeBrackets
# Parameter: file
# The method traverses through each word (or index) in the training file, and if it has a bracket, it
# removes it by using the replace method
# It then returns the file without brackets
#############################################################
def removeBrackets(trainFile):
punc = '''[]'''
for p in trainFile:
if p in punc:
trainFile=trainFile.replace(p,"")
return trainFile
if __name__ == "__main__":
main(sys.argv)
|
ea4324db555f3d15a260217fc264e29a57577fd0
|
[
"Python"
] | 3
|
Python
|
basima25z/POS-tagger
|
99f3837c5bf6bb5da5b21cc1467717f5ebc1bbd0
|
b2036f2c7bb282cb76969585a2690051648d7f43
|
refs/heads/master
|
<repo_name>yuuzer/Project-Lannister<file_sep>/project-lannister-core/src/test/java/hu/yuuzer/lannister/core/services/InfoServiceIntTest.java
package hu.yuuzer.lannister.core.services;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
/**
* Created by yuuzer on 6/5/2015.
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration("/META-INF/services-ctx.xml")
public class InfoServiceIntTest {
@Autowired
private InfoService infoService;
@Test
public void shouldReadVersionFromProperty() throws Exception {
String version = infoService.getVersion();
assertThat(version, equalTo("1.0"));
}
}
<file_sep>/project-lannister-deploy/pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>project-lannister</artifactId>
<groupId>hu.yuuzer.lannister</groupId>
<version>1.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>project-lannister-deploy</artifactId>
<packaging>war</packaging>
<properties>
<war.warName>project-lannister</war.warName>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-war-plugin</artifactId>
<version>2.6</version>
<configuration>
<webXml>../project-lannister-rest/src/main/resources/WEB-INF/web.xml</webXml>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.tomcat.maven</groupId>
<artifactId>tomcat7-maven-plugin</artifactId>
<version>2.2</version>
<configuration>
<path>/${war.warName}</path>
<warFile>${project.build.directory}/${war.warName}.war</warFile>
<update>true</update>
<url>http://localhost:8080/manager/text</url>
<username>tomcat</username>
<password><PASSWORD></password>
</configuration>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>hu.yuuzer.lannister</groupId>
<artifactId>project-lannister-core</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>hu.yuuzer.lannister</groupId>
<artifactId>project-lannister-rest</artifactId>
<version>1.0-SNAPSHOT</version>
</dependency>
</dependencies>
</project><file_sep>/project-lannister-core/src/test/java/hu/yuuzer/lannister/core/services/InfoServiceTest.java
package hu.yuuzer.lannister.core.services;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
/**
* Created by yuuzer on 6/5/2015.
*/
public class InfoServiceTest {
private static final String TEST_VERSION = "test version";
@Test
public void shouldTellTheActualVersion() throws Exception {
InfoService infoService = new InfoService();
infoService.setVersion(TEST_VERSION);
String version = infoService.getVersion();
assertThat(version, equalTo(TEST_VERSION));
}
}
|
9f1234542bd546d1027aa11af2b9aa49ced9e3df
|
[
"Java",
"Maven POM"
] | 3
|
Java
|
yuuzer/Project-Lannister
|
532b18f34649ff1d5a9fd216b199e71af4c2d09b
|
0ffefe661fb7c40aa2bb25840733457622184240
|
refs/heads/master
|
<file_sep>import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { SignupComponent } from './signup.component';
import { ReactiveFormsModule } from '@angular/forms';
import { HttpClientModule } from '@angular/common/http';
import { RouterTestingModule } from '@angular/router/testing';
import { componentNeedsResolution } from '@angular/core/src/metadata/resource_loading';
describe('SignupComponent', () => {
let component: SignupComponent;
let fixture: ComponentFixture<SignupComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ SignupComponent ],
imports : [ ReactiveFormsModule, HttpClientModule, RouterTestingModule ],
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(SignupComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
it('is form invalid'),()=>{
let username = component.registerForm.controls['username']
username.setValue('bdbhd')
let email = component.registerForm.controls['email']
email.setValue('bdbhd')
let passowrd = component.registerForm.controls['passowrd']
passowrd.setValue('<PASSWORD>')
expect(component.registerForm.valid).toBeFalsy();
}
it('is form invalid'),()=>{
let username = component.registerForm.controls['username']
username.setValue('bdbhd')
let email = component.registerForm.controls['email']
email.setValue('bdbhd')
let passowrd = component.registerForm.controls['password']
passowrd.setValue('12')
expect(component.registerForm.valid).toBeFalsy();
}
it('email field is empty'),()=>{
let username = component.registerForm.controls['username']
username.setValue('bdbhd')
let email = component.registerForm.controls['email']
email.setValue('')
let password =component.registerForm.setValue['password']
password.setValue("<PASSWORD>")
expect(component.registerForm.valid).toBeFalsy();
}
it('password field is empty'),()=>{
let username = component.registerForm.controls['username']
username.setValue('bdbhd')
let email = component.registerForm.controls['email']
email.setValue('<EMAIL>')
let password =component.registerForm.setValue['password']
password.setValue("")
expect(component.registerForm.valid).toBeFalsy();
}
it('username is empty'),()=>{
let username = component.registerForm.controls['username']
username.setValue('')
let email = component.registerForm.controls['email']
email.setValue('<EMAIL>')
let password =component.registerForm.setValue['password']
password.setValue("<PASSWORD>")
expect(component.registerForm.valid).toBeFalsy();
}
});
<file_sep>import { Component, OnInit } from '@angular/core';
import { FormGroup, FormBuilder, Validators, ReactiveFormsModule, FormControl } from '@angular/forms';
import { LoginInfo } from '../login-info';
import { TokenstorageService } from '../tokenstorage.service';
import { HttpClient } from '@angular/common/http';
import { Observable } from 'rxjs';
import { JwtResponse } from '../jwt-response';
import { Router } from '@angular/router';
import { LoginServiceService } from '../login-service.service';
@Component({
selector: 'app-login',
templateUrl: './login.component.html',
styleUrls: ['./login.component.css']
})
export class LoginComponent implements OnInit {
submitted:boolean=false ;
signInForm: FormGroup;
isLoggedIn = false;
isLoginFailed = false;
errorMessage = '';
roles: string[] = [];
private loginInfo: LoginInfo;
private loginUrl = 'http://localhost:45000/NewsFeedSearch/api/auth/signin';
constructor(private fb:FormBuilder, private tokenStorage: TokenstorageService,
private http: HttpClient,private router:Router,private loginservice:LoginServiceService) { }
ngOnInit() {
this.signInForm=this.fb.group({
username:['',[Validators.required,Validators.minLength(2)]],
password:['',[Validators.required,Validators.minLength(6)]]
})
if (this.tokenStorage.getToken()) {
this.isLoggedIn = true;
this.loginservice.isLoggedIn=true;
this.roles = this.tokenStorage.getAuthorities();
}
}
onSubmit(){
this.submitted=true;
console.log(this.signInForm.value);
if(this.signInForm.invalid)
return;
else{
this.attemptAuth(this.signInForm.value).subscribe(
data=>{
this.tokenStorage.saveToken(data.accessToken);
this.tokenStorage.saveUsername(data.username);
this.tokenStorage.saveAuthorities(data.authorities);
this.isLoginFailed = false;
this.isLoggedIn = true;
console.log()
this.roles = this.tokenStorage.getAuthorities();
alert("Success Fully Logged In!")
console.log(this.roles[0])
if(this.roles[0]="ROLE_USER"){
this.reloadPage();
}else{
this.router.navigateByUrl('mic/admin');
}
},
error=>{
console.log(error);
this.errorMessage = error.error.message;
console.log(this.errorMessage);
if(error.status=400)
alert(this.errorMessage);
if(error.status==0||error.status==504)
alert("Unknown error");
this.isLoginFailed = true;
this.errorMessage="";
}
);
}
}
reloadPage() {
this.router.navigateByUrl('/news');
}
attemptAuth(credentials: LoginInfo): Observable<JwtResponse> {
return this.http.post<JwtResponse>(this.loginUrl, credentials);
}
get f()
{
return this.signInForm.controls;
}
// signup1(){
// console.log("signup clicked");
// this.router.navigate(['signup']);
// }
}
<file_sep>import { browser, by, element } from 'protractor';
export class SignUpPage{
navigateTo(){
return browser.get('/signup');
}
getTitleText(){
return element(by.css('h3')).getText();
}
setUsername(username:string){
element(by.id('username')).clear();
element(by.id('username')).sendKeys(username);
}
setEmail(email:string){
element(by.id('email')).clear();
element(by.id('email')).sendKeys(email);
}
setPassword(password:string){
element(by.id('password')).clear();
element(by.id('password')).sendKeys(password);
}
clickSignup(){
element(by.id('signup')).click(); }
}<file_sep>import { browser, element, by } from 'protractor';
export class LoginPage{
navigateTo(){
browser.get('login');
}
getTitleText(){
return element(by.css('h3')).getText();
}
setUsername(username:string){
return element(by.id('username')).sendKeys(username);
}
setPasswprd(password:string){
return element(by.id('password')).sendKeys(password);
}
clickLogin(){
element(by.id('login')).click();
}
clickSearchHistory(){
element(by.id('searchHistory')).click();
}
clickSearchNews()
{
element(by.id('searchNutton')).click();
}
clickLogout()
{
element(by.id('logout')).click();
}
getTable()
{
return element(by.css('table'));
}
getTableRows()
{
return this.getTable().all(by.css('tr')).first();
}
getTableCell()
{
return this.getTableRows().all(by.css('td'));
}
clickTableButton()
{
this.getTableCell().all(by.css('button')).click();
}
}<file_sep>import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { SignupComponent } from './signup/signup.component';
import { LoginComponent } from './login/login.component';
import { NewsApiComponent } from './news-api/news-api.component';
import { SearchHistoryComponent } from './search-history/search-history.component';
import { AdminComponent } from './admin/admin.component';
import { AuthGuard } from './auth.guard';
const routes: Routes = [
{
path: 'signup',
component: SignupComponent
},
{
path: '',
component: LoginComponent
},
{
path: 'news',
component: NewsApiComponent,
canActivate: [AuthGuard]
},
{
path: 'login',
component: LoginComponent
}, {
path: 'searchHistory',
component: SearchHistoryComponent,
canActivate: [AuthGuard]
},
{
path: 'mic/admin',
component: AdminComponent
}
];
@NgModule({
imports: [RouterModule.forRoot(routes)],
exports: [RouterModule]
})
export class AppRoutingModule { }
<file_sep>import { Component, OnInit } from '@angular/core';
import { TokenstorageService } from '../tokenstorage.service';
import { KeywordSearch } from '../keyword-search';
import { HttpClient } from '@angular/common/http';
import { map } from 'rxjs/operators';
import { ResourceLoader } from '@angular/compiler';
@Component({
selector: 'app-search-history',
templateUrl: './search-history.component.html',
styleUrls: ['./search-history.component.css']
})
export class SearchHistoryComponent implements OnInit {
private deleteKeywordURL = "http://localhost:45000/NewsFeedSearch/api/user/deleteSearchKeyword/"
keywordSearch:any;
private getAllKeywordUrl:string="http://localhost:45000/NewsFeedSearch/api/user/getAllSearchKeywords/";
constructor(private tokenStorage:TokenstorageService,
private _http:HttpClient) { }
errorMessage:string="";
ngOnInit() {
this.getDataObservable(this.getAllKeywordUrl+this.tokenStorage.getUsername()).subscribe(
data => {
this.keywordSearch = data;
console.log("I CANT SEE DATA HERE: ", this.keywordSearch);
}
);
}
deleteKeyword(id:number){
console.log(id);
console.log(this.deleteKeywordURL+id)
this._http.delete(this.deleteKeywordURL+id).subscribe(
result => console.log(result),
error => this.errorMessage = error
);
console.log("Delete Clicked");
this.reload();
}
getDataObservable(url:string) {
return this._http.get(url);
}
reload(){
this.getDataObservable(this.getAllKeywordUrl+this.tokenStorage.getUsername()).subscribe(
data => {
this.keywordSearch = data;
console.log("I CANT SEE DATA HERE: ", this.keywordSearch);
}
);
}
// getAllKeyword(){
// this.http.get(this.getAllKeywordUrl+this.tokenStorage.getUsername())
// .subscribe((data:KeywordSearch)=>this.keywordSearch={
// keyword:data['keyword'],
// username:data['username']
// })
// console.log(this.keywordSearch.keyword);
// }
}
<file_sep>export class KeywordSearch {
keyword:any;
username:string;
constructor(keyword:any,username:string){
this.keyword=keyword;
this.username=username;
}
}
<file_sep>import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { LoginComponent } from './login.component';
import { ReactiveFormsModule, FormGroup, FormsModule } from '@angular/forms';
import { HttpClientModule, HttpClient } from '@angular/common/http';
import { RouterTestingModule } from '@angular/router/testing';
import { LoginServiceService } from '../login-service.service';
import { TokenstorageService } from '../tokenstorage.service';
import { By } from '@angular/platform-browser';
describe('LoginComponent', () => {
let component: LoginComponent;
let fixture: ComponentFixture<LoginComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ LoginComponent],
imports : [ ReactiveFormsModule, HttpClientModule, RouterTestingModule ,FormsModule],
providers:[LoginServiceService,TokenstorageService,HttpClient]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(LoginComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
it('should create form',()=>{
expect(component.signInForm.contains('username')).toBeTruthy();
expect(component.signInForm.contains('password')).toBeTruthy();
});
it('should create form controls',()=>{
let name=component.signInForm.get('username');
name.setValue('');
expect(name.valid).toBeFalsy();
name.setValue('user123');
expect(name.valid).toBeTruthy();
let password=component.signInForm.get('password');
password.setValue('');
expect(password.valid).toBeFalsy();
password.setValue('<PASSWORD>');
expect(password.valid).toBeTruthy();
});
it('form submit test',()=>{
let service=TestBed.get(LoginServiceService);
// let spy=spyOn(service,'attemptAuth').and.returnValue({subscribe:()=>{return 'Success'}});
let name=component.signInForm.get('username');
name.setValue('user123');
let password=component.signInForm.get('password');
password.setValue('<PASSWORD>');
let form=fixture.debugElement.query(By.css('form'));
form.triggerEventHandler('submit',null);
fixture.detectChanges();
// expect(spy).toHaveBeenCalled();
})
});
<file_sep>FROM mysql:5.5
ADD signup.sql docker-entrypoint-initdb.d<file_sep>package com.cts.repository;
import java.util.List;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import org.springframework.stereotype.Repository;
import com.cts.entity.Users;
@Repository
public interface UsersRepository extends CrudRepository<Users,Integer> {
Users findByUsername(String username);
Boolean existsByUsername(String username);
Boolean existsByEmail(String email);
}
<file_sep>spring.datasource.url=jdbc:mysql://db:3306/signup
spring.datasource.username=root
spring.datasource.password=<PASSWORD>
spring.datasource.driver-class-name=com.mysql.jdbc.Driver
server.port=46001
spring.jpa.properties.hibernate.dialect = org.hibernate.dialect.MySQL5Dialect
spring.jpa.properties.hibernate.id.new_generator_mappings = false
spring.jpa.properties.hibernate.format_sql = true
spring.jpa.hibernate.ddl-auto = update
logging.level.org.hibernate.SQL=DEBUG<file_sep>import {by, browser,element} from 'protractor'<file_sep>import { Component, OnInit } from '@angular/core';
import { HttpClient } from '@angular/common/http';
import { Signupinfo } from '../signupinfo';
import { ResourceLoader } from '@angular/compiler';
import { FormBuilder, FormControl, FormGroup } from '@angular/forms';
@Component({
selector: 'app-admin',
templateUrl: './admin.component.html',
styleUrls: ['./admin.component.css']
})
export class AdminComponent implements OnInit {
private userUrl="http://localhost:45000/NewsFeedSearch/api/user/getAllUsers"
private deleteUrl="http://localhost:45000/NewsFeedSearch/api/user/blockUserById/"
private searchUrl="http://localhost:45000/NewsFeedSearch/api/user/searchUser/"
private errorMessage="";
private searchValue1="";
constructor(private _http:HttpClient,private formBuilder: FormBuilder) { }
signUpInfo:any;
searchForm: FormGroup;
ngOnInit() {
this.searchForm = this.formBuilder.group({
search:new FormControl()
});
this.getDataObservable(this.userUrl).subscribe(
data => {
this.signUpInfo= data;
console.log("I CAN SEE DATA HERE: ", this.signUpInfo);
}
);
}
getDataObservable(url:string) {
return this._http.get(url);
}
search(searchValue:string){
console.log(searchValue);
this.searchValue1=""+searchValue.search
this.getDataObservable(this.searchUrl+searchValue.search).subscribe(
data => {
this.signUpInfo= data;
console.log("Serach Data ", this.signUpInfo);
}
);
this.reload();
}
reload(){
this.getDataObservable(this.userUrl).subscribe(
data => {
this.signUpInfo= data;
console.log("I CAN SEE DATA HERE: ", this.signUpInfo);
}
);
}
block(id:number){
this._http.get(this.deleteUrl+id).subscribe(
data => {
this.signUpInfo= data;})
console.log("After deleting", this.signUpInfo);
console.log("Block Clicked");
this.reload();
}
}
<file_sep>FROM tomcat:8-jre8-slim
COPY services/target/NewsFeedSearch.war /usr/local/tomcat/webapps
COPY angular/dist/NewsSearch/. /usr/local/tomcat/webapps/NewsSearch
<file_sep>package com.cts.entity;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
@Entity
public class Articles {
@Id
@GeneratedValue(strategy=GenerationType.AUTO)
private int articleId;
private String title;
private String description;
private String imageURL;
private String author;
private String publishedAt;
public Articles(){
}
public Articles(String title, String description, String imageURL, String author, String publishedAt) {
this.title = title;
this.description = description;
this.imageURL = imageURL;
this.author = author;
this.publishedAt = publishedAt;
}
public int getId() {
return articleId;
}
public void setId(int articleId) {
this.articleId = articleId;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getImageURL() {
return imageURL;
}
public void setImageURL(String imageURL) {
this.imageURL = imageURL;
}
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
public String getPublishedAt() {
return publishedAt;
}
public void setPublishedAt(String publishedAt) {
this.publishedAt = publishedAt;
}
}
<file_sep>package com.cts.security;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.web.AuthenticationEntryPoint;
import org.springframework.stereotype.Component;
@Component
public class JwtAuthEntrypoint implements AuthenticationEntryPoint {
private static final Logger LOGGER= LoggerFactory.getLogger(JwtAuthEntrypoint.class);
public void commence(HttpServletRequest request, HttpServletResponse response, AuthenticationException e)
throws IOException, ServletException {
// TODO Auto-generated method stub
LOGGER.error("UNAUTHORISED MESSAGE",e.getMessage());
response.sendError(HttpServletResponse.SC_UNAUTHORIZED,"Error-.Unauthorized");
}
}
<file_sep>import { browser, protractor } from 'protractor';
import { SignUpPage } from './signup.po';
fdescribe('signup component',()=>{
let signUp:SignUpPage
beforeEach(()=>{
signUp=new SignUpPage();
signUp.navigateTo();
})
it('should display heading',()=>{
expect(signUp.getTitleText()).toEqual('Registration Form');
})
// it('sign up with new user data',()=>{
// signUp.setUsername('ygvtfcu');
// signUp.setPassword('<PASSWORD>');
// signUp.setEmail("<EMAIL>");
// signUp.clickSignup();
// browser.wait(protractor.ExpectedConditions.alertIsPresent(),20000)
// expect(browser.switchTo().alert().getText()).toEqual("Successfully Registered");
// browser.switchTo().alert().accept();
// // expect(browser.getCurrentUrl()).toEqual('http:localhost:4200/login');
// })
it('check register with already existing data',()=>
{
signUp.setUsername('rahul');
signUp.setEmail('<EMAIL>');
signUp.setPassword('<PASSWORD>');
signUp.clickSignup();
browser.wait(protractor.ExpectedConditions.alertIsPresent(),20000);
expect(browser.switchTo().alert().getText()).toEqual("Something went wrong! Please try again later.");
browser.switchTo().alert().accept();
expect(browser.getCurrentUrl()).toEqual('http://localhost:4200/signup');
})
it('user sign Up when server down',()=>{
signUp.setUsername('klbgfhfhbhhm');
signUp.setPassword('<PASSWORD>');
signUp.setEmail('<EMAIL>')
signUp.clickSignup()
browser.wait(protractor.ExpectedConditions.alertIsPresent(),1000);
expect(browser.switchTo().alert().getText()).toEqual("Unknown error");
browser.switchTo().alert().accept();
expect(browser.getCurrentUrl()).toEqual('http://localhost:4200/signup');
})
})
<file_sep>import { Component, OnInit } from '@angular/core';
import { FormGroup, FormBuilder, Validators, ReactiveFormsModule, FormControl } from '@angular/forms';
import { AuthServiceService } from '../auth-service.service';
import { Signupinfo } from '../signupinfo';
import { Observable } from 'rxjs';
import { HttpClient,HttpHeaders } from '@angular/common/http';
import { Router } from '@angular/router';
// const httpOptions = {
// headers: new HttpHeaders({ 'Content-Type': 'application/json' })
// };
@Component({
selector: 'app-signup',
templateUrl: './signup.component.html',
styleUrls: ['./signup.component.css']
})
export class SignupComponent implements OnInit {
submitted:boolean = false;
registerForm: FormGroup;
signupInfo: Signupinfo;
isSignedUp:boolean = false;
isSignUpFailed:boolean = false;
errorMessage:string = '';
role = new Array<String>();
private signupUrl = 'http://localhost:45000/NewsFeedSearch/api/auth/signup';
constructor(private authService: AuthServiceService,private formBuilder: FormBuilder,
private http: HttpClient,private router:Router) { }
ngOnInit() {
this.registerForm = this.formBuilder.group({
username: ['', [Validators.required, Validators.minLength(2)]],
email: ['', [Validators.required, Validators.email]],
password: ['', [Validators.required]],
})
}
get f()
{
return this.registerForm.controls;
}
onSubmit(){
this.submitted=true;
if(this.registerForm.invalid){
return;
}
this.signupInfo=new Signupinfo(
this.registerForm.get('username').value,this.registerForm.get('email').value,
this.registerForm.get('password').value);
console.log(this.registerForm.value);
this.signUp(this.signupInfo).subscribe(
data=>{
console.log(data);
alert('Successfully Registered')
this.isSignedUp = true;
this.isSignUpFailed = false;
this.router.navigateByUrl('/login');
},
error => {
console.log(error);
if(error.status == 400)
alert("Something went wrong! Please try again later.");
if(error.status==0||error.status==504)
alert("Unknown error");
this.errorMessage = error.error.message;
this.isSignUpFailed = true;
}
);
console.log(this.isSignedUp);
if(this.isSignedUp){
console.log("INSIGNEDUP")
}
}
signUp(info:Signupinfo): Observable<string> {
console.log(info+""+info.password);
return this.http.post<string>(this.signupUrl,info);
}
}
<file_sep>import { Component } from '@angular/core';
import { TokenstorageService } from './tokenstorage.service';
import { Route } from '@angular/compiler/src/core';
import { Router } from '@angular/router';
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.css']
})
export class AppComponent {
private roles: string[];
authority: string;
isLogged:boolean=false;
isNotLogged:boolean=false;
constructor(private tokenStorage: TokenstorageService,private router: Router) {
if(tokenStorage.getToken()){
this.isLogged=true;
this.isNotLogged=false;
}
if(tokenStorage.getToken()==null){
this.isNotLogged=true;
this.isLogged=false;
}
}
ngOnInit() {
if (this.tokenStorage.getToken()) {
this.roles = this.tokenStorage.getAuthorities();
this.roles.every(role => {
if (role === 'ROLE_ADMIN') {
this.authority = 'admin';
return false;
} else if (role === 'ROLE_PM') {
this.authority = 'pm';
return false;
}
this.authority = 'user';
return true;
});
}
}
logout(){
console.log("signed out")
this.tokenStorage.signOut();
this.router.navigateByUrl('/login');
}
check(){
if(localStorage.getItem('loggedIn')){
return true;
}else{
return false;
}
}
}
<file_sep>package com.cts.entity;
import java.time.LocalDateTime;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.validation.constraints.NotNull;
import org.hibernate.annotations.CreationTimestamp;
@Entity
public class KeywordSearch {
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
private int id;
@NotNull
private String keyword;
@NotNull
private String username;
@CreationTimestamp
private LocalDateTime localTime;
public KeywordSearch(){
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getKeyword() {
return keyword;
}
public void setKeyword(String keyword) {
this.keyword = keyword;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public LocalDateTime getLocalTime() {
return localTime;
}
public void setLocalTime(LocalDateTime localTime) {
this.localTime = localTime;
}
public KeywordSearch(String keyword, String username) {
this.keyword = keyword;
this.username = username;
}
}
<file_sep>import {LoginPage} from './login.po'
import {browser, protractor} from 'protractor'
describe('login component test',()=>{
let loginPage:LoginPage
beforeEach(()=>{
loginPage= new LoginPage();
loginPage.navigateTo();
})
it('check login heading',()=>{
expect(loginPage.getTitleText()).toEqual('Login');
})
it('check user with login data ',()=>{
loginPage.setUsername('lkjhgfdsa');
loginPage.setPasswprd('<PASSWORD>');
loginPage.clickLogin();
browser.wait(protractor.ExpectedConditions.alertIsPresent(),10000)
expect(browser.switchTo().alert().getText()).toEqual("Success Fully Logged In!")
browser.switchTo().alert().accept();
expect(browser.getCurrentUrl()).toEqual('http://localhost:4200/news');
loginPage.clickSearchHistory();
expect(browser.getCurrentUrl()).toEqual('http://localhost:4200/searchHistory');
expect(loginPage.getTable().isPresent()).toBeTruthy();
// loginPage.clickTableButton();
// browser.wait(protractor.ExpectedConditions.alertIsPresent(),1000)
// expect(browser.switchTo().alert().getText()).toEqual("Search deleted");
// browser.switchTo().alert().accept();
// expect(browser.getCurrentUrl()).toEqual('http://localhost:4200/searchHistory');
// loginPage.clickSearchNews();
// expect(browser.getCurrentUrl()).toEqual('http://localhost:4200/news');
})
it('check for blackListed user',()=>{
loginPage.setUsername('zxcvbnm');
loginPage.setPasswprd('<PASSWORD>');
loginPage.clickLogin();
browser.wait(protractor.ExpectedConditions.alertIsPresent(),10000);
expect(browser.switchTo().alert().getText()).toEqual("You are Blocked by admin");
browser.switchTo().alert().accept();
// expect(browser.getCurrentUrl()).toEqual('http://localhost:4200/login');
})
it('user sign in with invalid data',()=>{
loginPage.setUsername('klbgfhfhbhhm');
loginPage.setPasswprd('<PASSWORD>');
loginPage.clickLogin();
browser.wait(protractor.ExpectedConditions.alertIsPresent(),1000);
expect(browser.switchTo().alert().getText()).toEqual("No User Found");
// browser.switchTo().alert().accept();
// expect(browser.getCurrentUrl()).toEqual("http://localhost:4200/login");
})
it('successfully logged out',()=>{
loginPage.setUsername('lkjhgfdsa');
loginPage.setPasswprd('<PASSWORD>');
loginPage.clickLogin();
browser.wait(protractor.ExpectedConditions.alertIsPresent(),10000);
browser.switchTo().alert().accept();
expect(browser.getCurrentUrl()).toEqual('http://localhost:4200/news');
loginPage.clickLogout();
expect(browser.getCurrentUrl()).toEqual('http://localhost:4200/login');
})
it('user sign in when server down',()=>{
loginPage.setUsername('klbgfhfhbhhm');
loginPage.setPasswprd('<PASSWORD>');
loginPage.clickLogin();
browser.wait(protractor.ExpectedConditions.alertIsPresent(),1000);
expect(browser.switchTo().alert().getText()).toEqual("Unknown error");
browser.switchTo().alert().accept();
expect(browser.getCurrentUrl()).toEqual("http://localhost:4200/login");
})
})<file_sep>import { Injectable } from '@angular/core';
import { HttpClient } from '@angular/common/http';
@Injectable({
providedIn: 'root'
})
export class NewsApiService {
private keywordSaveURL="http://localhost:46001/NewsFeedSearch/api/user/saveKeyword"
api_key = '<KEY>';
pageSize:number=1;
constructor(private http:HttpClient) { }
initSources(){
return this.http.get('https://newsapi.org/v2/sources?language=en&apiKey='+this.api_key);
}
initArticles(){
return this.http.get('https://newsapi.org/v2/top-headlines?pageSize=8&page='+this.pageSize+++'&sources=bbc-news,the-verge,abc-news&apiKey='+this.api_key);
}
getArticlesByID(source: String){
return this.http.get('https://newsapi.org/v2/everything?' +
'q='+source+
'&from=2019-02-01&' +
'sortBy=popularity&apiKey='+this.api_key);
}
sendKeyword(){
}
}
<file_sep>version: '3'
services:
db:
build: ./database
restart: always
environment:
MYSQL_ROOT_PASSWORD: <PASSWORD>
MYSQL_DATABASE: signup
web:
build: .
restart: always
ports:
- "9095:8080"
links:
- "db"
depends_on:
- "db"
<file_sep>package com.cts.exception;
import java.util.List;
import java.util.Set;
import javax.validation.ConstraintViolation;
import javax.validation.ConstraintViolationException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.authentication.BadCredentialsException;
import org.springframework.security.authentication.InternalAuthenticationServiceException;
import org.springframework.validation.BindingResult;
import org.springframework.validation.FieldError;
import org.springframework.web.bind.MethodArgumentNotValidException;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ResponseStatus;
import com.cts.entity.ResponseMessage;
public class ExeceptionHandle {
private static final Logger LOGGER = LoggerFactory.getLogger(ExeceptionHandle.class);
String defaultMessage="";
@ExceptionHandler(Exception.class)
@ResponseStatus(code=HttpStatus.BAD_REQUEST)
public ResponseEntity<ResponseMessage> handle(Exception e){
LOGGER.error(e.getMessage());
if(e instanceof MethodArgumentNotValidException){
MethodArgumentNotValidException ex =(MethodArgumentNotValidException) e;
BindingResult bindingResult = ex.getBindingResult();
LOGGER.error(""+ex.getBindingResult());
List<FieldError> fieldError = bindingResult.getFieldErrors();
fieldError.stream().forEach(err->{
String test= err.getField()+" "+err.getDefaultMessage()+",\n";
defaultMessage+=test;
});
return new ResponseEntity<ResponseMessage>(new ResponseMessage(
defaultMessage.substring(0,defaultMessage.length()-1)),HttpStatus.BAD_REQUEST);
}
if(e instanceof BadCredentialsException){
String defaultMessage1="error:";
BadCredentialsException ex =(BadCredentialsException) e;
return new ResponseEntity<ResponseMessage>(new ResponseMessage(
defaultMessage1.substring(0,defaultMessage1.length()-1)),HttpStatus.BAD_REQUEST);
}
if(e instanceof ConstraintViolationException){
ConstraintViolationException ex =(ConstraintViolationException) e;
Set<ConstraintViolation<?>> set = ex.getConstraintViolations();
String errorMessage="Validation Failed:";
for(ConstraintViolation<?> constratintViolation:set){
errorMessage+=constratintViolation.getMessageTemplate()+",";
}
return new ResponseEntity<ResponseMessage>(new ResponseMessage(
errorMessage.substring(0,errorMessage.length()-1)),HttpStatus.BAD_REQUEST);
}
if(e instanceof InternalAuthenticationServiceException){
InternalAuthenticationServiceException ex= (InternalAuthenticationServiceException) e;
String defaultMessage2=ex.getMessage();
return new ResponseEntity<ResponseMessage>(new ResponseMessage(
defaultMessage2),HttpStatus.BAD_REQUEST);
}
return new ResponseEntity<ResponseMessage>(new ResponseMessage(
e.getMessage()),HttpStatus.INTERNAL_SERVER_ERROR);
}
}<file_sep>import { Component, OnInit } from '@angular/core';
import { FormGroup, FormBuilder,FormControl} from '@angular/forms';
import { Router } from '@angular/router';
import { NewsApiService } from '../news-api.service';
import { HttpClient } from '@angular/common/http';
import { KeywordSearch } from '../keyword-search';
import { TokenstorageService } from '../tokenstorage.service';
import { Observable } from 'rxjs';
@Component({
selector: 'app-news-api',
templateUrl: './news-api.component.html',
styleUrls: ['./news-api.component.css']
})
export class NewsApiComponent implements OnInit {
searchH=false;
MainContent=true;
searchForm: FormGroup;
mArticles:Array<any>;
mSources:Array<any>;
page:number=1;
username:string;
errorMessage:string = '';
searchKeyword:KeywordSearch;
private keywordSaveURL="http://localhost:45000/NewsFeedSearch/api/user/saveKeyword"
constructor(private formBuilder: FormBuilder,
private router: Router, private newsapi:NewsApiService,private http:HttpClient,private tokenStorage:TokenstorageService) {
}
ngOnInit() {
//load articles
this.newsapi.initArticles().subscribe(data => this.mArticles = data['articles']);
//load news sources
this.newsapi.initSources().subscribe(data=> this.mSources = data['sources']);
this.searchForm = this.formBuilder.group({
search:new FormControl()
});
}
searchArticles(source:any){
console.log("selected source is: "+source.search);
this.username= this.tokenStorage.getUsername();
this.searchKeyword = new KeywordSearch(source.search,this.tokenStorage.getUsername());
this.sendKeyword(this.searchKeyword).subscribe(
data=>{
console.log(data);
},
error => {
console.log(error);
this.errorMessage = error.error.message;
}
);
console.log("clocked searchArticles");
this.newsapi.getArticlesByID(source.search).subscribe(data => this.mArticles = data['articles']);
}
SearchHistory() {
console.log("Search History clicked");
this.searchH=true;
this.MainContent=false;
this.router.navigateByUrl('searchHistory');
}
loadMore(){
console.log("load more clicked");
this.newsapi.initArticles().subscribe(data => this.mArticles = data['articles']);
this.newsapi.initSources().subscribe(data=> this.mSources = data['sources']);
}
sendKeyword(info:KeywordSearch): Observable<string> {
return this.http.post<string>(this.keywordSaveURL,info);
}
}
<file_sep>package com.cts.controller;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import com.cts.entity.Articles;
import com.cts.entity.KeywordSearch;
import com.cts.entity.Users;
import com.cts.exception.ExeceptionHandle;
import com.cts.repository.ArticleRepository;
import com.cts.service.UserService;
@CrossOrigin(origins="http://localhost:4200")
@Controller
@RequestMapping("api/user")
public class AccessController extends ExeceptionHandle{
@Autowired UserService userService;
@Autowired ArticleRepository articleRepo;
@GetMapping("/api/test/user/saveArticle")
@PreAuthorize("hasRole('ROLE_USER') ")
public boolean addToBookmarks(Users users){
Users user1 =userService.getUserByUsername(users.getUsername());
System.out.println(user1);
user1.getArticleList().add(users.getArticleList().get(0));
userService.saveArticle(users.getArticleList());
return true;
}
@GetMapping("/api/test/user/getArticles")
@PreAuthorize("hasRole('ROLE_USER') or hasRole('ROLE_ADMIN') or hasRole('ROLE_GUEST')")
public List<Articles> getAllArticlesByUsers(Users users ){
Users userArticle = userService.getArticleByUsername(users.getUsername());
return userArticle.getArticleList();
}
@PostMapping("saveKeyword")
@PreAuthorize("hasRole('ROLE_USER') or hasRole('ROLE_ADMIN') or hasRole('ROLE_GUEST')")
public ResponseEntity<?> saveSearchedWord(@RequestBody KeywordSearch keywordSearch){
return new ResponseEntity<Boolean>(userService.saveSearchword(keywordSearch),HttpStatus.OK);
}
@GetMapping("getAllSearchKeywords/{username}")
@PreAuthorize("hasRole('ROLE_USER') or hasRole('ROLE_ADMIN') or hasRole('ROLE_GUEST')")
public ResponseEntity<List<KeywordSearch>> getAllKeywords(@PathVariable String username){
return new ResponseEntity<List<KeywordSearch>>(userService.getAllSearchWordsByUsername(username)
,HttpStatus.OK);
}
@DeleteMapping("deleteSearchKeyword/{searchKeywordId}")
public ResponseEntity<Boolean> deleteSearchKeyword(@PathVariable Integer searchKeywordId){
System.out.println("IN delete");
return new ResponseEntity<Boolean>(userService.deleteKeyword(searchKeywordId),HttpStatus.OK);
}
@GetMapping("getAllUsers")
@PreAuthorize("hasRole('ROLE_USER') or hasRole('ROLE_ADMIN') or hasRole('ROLE_GUEST')")
public ResponseEntity<List<Users>> getAllUsers(){
return new ResponseEntity<List<Users>>(userService.getAllUsers(),HttpStatus.OK);
}
@GetMapping("blockUserById/{id}")
@PreAuthorize("hasRole('ROLE_USER') or hasRole('ROLE_ADMIN') or hasRole('ROLE_GUEST')")
public ResponseEntity<Boolean> blockUser(@PathVariable Integer id){
return new ResponseEntity<Boolean>(userService.blockUserById(id),HttpStatus.OK);
}
@GetMapping("searchUser/{username}")
@PreAuthorize("hasRole('ROLE_USER') or hasRole('ROLE_ADMIN') or hasRole('ROLE_GUEST')")
public ResponseEntity<Users> searchUser(@PathVariable String username){
return new ResponseEntity<Users>(userService.getUserByUsername(username),HttpStatus.OK);
}
}
<file_sep>import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { NewsApiComponent } from './news-api.component';
import { ReactiveFormsModule, FormsModule } from '@angular/forms';
import { HttpClientModule, HttpClient } from '@angular/common/http';
import { RouterTestingModule } from '@angular/router/testing';
import { NewsApiService } from '../news-api.service';
import { AuthGuard } from '../auth.guard';
import { DebugElement } from '@angular/core';
import { By } from '@angular/platform-browser';
import { of } from 'rxjs';
describe('NewsApiComponent', () => {
let component: NewsApiComponent;
let fixture: ComponentFixture<NewsApiComponent>;
let debugElement:DebugElement;
let newsSevice: NewsApiService;
let spyService;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ NewsApiComponent ],
imports : [ ReactiveFormsModule, HttpClientModule, RouterTestingModule,FormsModule ],
providers:[NewsApiService,HttpClient,AuthGuard]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(NewsApiComponent);
component = fixture.componentInstance;
debugElement = fixture.debugElement;
newsSevice=debugElement.injector.get(NewsApiService);
spyService= spyOn(newsSevice,'initArticles').and.returnValue({ subscribe: () => {} });
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
it('call service',()=>{
let form = fixture.debugElement.query(By.css('form'));
let serachField= component.searchForm.controls['search'];
serachField.setValue('mahi');
form.triggerEventHandler('Search',null);
expect(spyService).toHaveBeenCalled();
})
fit('should save search after fetch from api',()=>{
let data = {
keyword:'violin',
username:'abcnfdbmndfbmnb'
};
// spyOn(component,'sendKeyword').and.callFake(()=>{
// component.sendKeyword(data);
// });
let spy = spyOn(component,'sendKeyword').and.returnValue(of(true));
component.sendKeyword(data);
expect(spy).toHaveBeenCalled();
});
});
<file_sep>package com.cts.controller;
import javax.validation.Valid;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.cts.entity.JwtResponse;
import com.cts.entity.ResponseMessage;
import com.cts.entity.Users;
import com.cts.exception.ExeceptionHandle;
import com.cts.repository.UsersRepository;
import com.cts.security.JwtProvider;
import com.cts.service.UserService;
@CrossOrigin(origins = "http://localhost:4200")
@RestController
@RequestMapping("api/auth")
public class MainController extends ExeceptionHandle{
@Autowired UserService userService;
@Autowired AuthenticationManager authenticationManager;
@Autowired JwtProvider jwtProvider;
@Autowired UsersRepository userRepo;
@Autowired PasswordEncoder passwordEncoder;
@PostMapping("/signup")
public ResponseEntity<?> registerUser(@Valid @RequestBody Users users) {
System.out.println("M"+users.getPassword());
System.out.println(users);
System.out.println(users.getEmail());
System.out.println(users.getPassword());
if (userRepo.existsByUsername(users.getUsername())) {
return new ResponseEntity<>(new ResponseMessage("Fail -> Username is already taken!"),
HttpStatus.BAD_REQUEST);
}
if (userRepo.existsByEmail(users.getEmail())) {
return new ResponseEntity<>(new ResponseMessage("Fail -> Email is already in use!"),
HttpStatus.BAD_REQUEST);
}
users.setPassword(passwordEncoder.encode(users.getPassword()));
userService.save(users);
return new ResponseEntity<>(new ResponseMessage("Message: Details saved Successfully"),HttpStatus.OK);
}
@PostMapping("/signin")
public ResponseEntity<?> authenticateUser( @RequestBody Users users ) {
System.out.println("in signin");
System.out.println(users.getUsername()+" "+users.getPassword());
Authentication authentication = authenticationManager.authenticate(
new UsernamePasswordAuthenticationToken(users.getUsername(),users.getPassword()));
System.out.println(users.getUsername()+" "+users.getPassword());
SecurityContextHolder.getContext().setAuthentication(authentication);
String jwt = jwtProvider.generateJwtToken(authentication);
UserDetails userDetails = (UserDetails) authentication.getPrincipal();
return ResponseEntity.ok(new JwtResponse(jwt, userDetails.getUsername(), userDetails.getAuthorities()));
}
}
<file_sep>package com.cts.entity;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
@Entity
public class Role {
@Id
@GeneratedValue(strategy=GenerationType.AUTO)
private int id;
private String roleName;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getRoleName() {
return roleName;
}
public void setRoleName(String roleName) {
this.roleName = roleName;
}
}
|
abab39d45c82d0b603b3c340739b914d74f19d45
|
[
"YAML",
"INI",
"Java",
"TypeScript",
"Dockerfile"
] | 29
|
TypeScript
|
micgogi/NewsFeed
|
5c09d5a10f5b32c851cdf6dbca842316d3775d07
|
78d1ad33682c2a6773226946ed9e71a45924ea0e
|
refs/heads/master
|
<file_sep>const navbar = document.getElementById("navbar");
const navbarToggle = navbar.querySelector(".navbar-toggle");
function openMobileNavbar() {
navbar.classList.add("opened");
navbarToggle.setAttribute("aria-label", "Close navigation menu.");
}
function closeMobileNavbar() {
navbar.classList.remove("opened");
navbarToggle.setAttribute("aria-label", "Open navigation menu.");
}
var slideIndex = 0;
showSlides();
var slides, dots;
function showSlides() {
var i;
slides = document.getElementsByClassName("mySlides");
dots = document.getElementsByClassName("dot");
for (i = 0; i < slides.length; i++) {
slides[i].style.display = "none";
}
slideIndex++;
if (slideIndex > slides.length) { slideIndex = 1 }
for (i = 0; i < dots.length; i++) {
dots[i].className = dots[i].className.replace(" active", "");
}
slides[slideIndex - 1].style.display = "block";
dots[slideIndex - 1].className += " active";
setTimeout(showSlides, 8000); // Change image every 8 seconds
}
function plusSlides(position) {
slideIndex += position;
if (slideIndex > slides.length) { slideIndex = 1 } else if (slideIndex < 1) { slideIndex = slides.length }
for (i = 0; i < slides.length; i++) {
slides[i].style.display = "none";
}
for (i = 0; i < dots.length; i++) {
dots[i].className = dots[i].className.replace(" active", "");
}
slides[slideIndex - 1].style.display = "block";
dots[slideIndex - 1].className += " active";
}
function currentSlide(index) {
if (index > slides.length) { index = 1 } else if (index < 1) { index = slides.length }
for (i = 0; i < slides.length; i++) {
slides[i].style.display = "none";
}
for (i = 0; i < dots.length; i++) {
dots[i].className = dots[i].className.replace(" active", "");
}
slides[index - 1].style.display = "block";
dots[index - 1].className += " active";
}
navbarToggle.addEventListener("click", () => {
if (navbar.classList.contains("opened")) {
closeMobileNavbar();
} else {
openMobileNavbar();
}
});
const navbarMenu = navbar.querySelector(".navbar-menu");
const navbarLinksContainer = navbar.querySelector(".navbar-links");
navbarLinksContainer.addEventListener("click", (clickEvent) => {
clickEvent.stopPropagation();
});
navbarMenu.addEventListener("click", closeMobileNavbar);
document
.getElementById("options")
.querySelectorAll("input[name='navtype']")
.forEach((option) => {
option.addEventListener("change", (e) => {
const navType = e.target.id.split("-").join(" ");
navbarMenu.classList = "navbar-menu " + navType;
});
});<file_sep># Web_project
Here is my responsive web page
Please launch it with a chromium based browser otherwise you will have some issues
(During my development process I used Brave browser)
If you have any issues with it, do not hesitate to contact.
Hope you'll enjoy it , if you do, don't hesitate to put a star ⭐
|
a9121944b15f180f5a6fedabdce9fa5c8740fb11
|
[
"JavaScript",
"Markdown"
] | 2
|
JavaScript
|
AnthonyRP05/Web_project
|
76746575eb22d452554142cea77de3b6f832aeed
|
860719f36aed0918b5c45e6e19548d289aa69cbb
|
refs/heads/master
|
<file_sep>class UserApiKey < ActiveRecord::Base
belongs_to :user
validates :user_id, presence: true
before_save :generate_access_token!, if: :new_record?
before_save :set_expiration!, if: :new_record?
after_save :access_token_and_expires_at_exist?
def self.build_for_user(user)
user_api_key = UserApiKey.new
user_api_key.send(:set_user!, user)
return user_api_key
end
def self.time_to_live_from_now
90.days.from_now
end
private
def access_token_exists?
UserApiKey.exists?(access_token: access_token)
end
def generate_access_token!
assign_fresh_token! while access_token_exists?
end
def assign_fresh_token!
self.access_token = SecureRandom.hex
end
def set_expiration!
self.expires_at = UserApiKey.time_to_live_from_now
end
def set_user!(user)
self.user_id = user.id
end
def access_token_or_expires_at_nil?
expires_at.nil? || access_token.nil?
end
def access_token_and_expires_at_exist?
return !access_token_or_expires_at_nil?
end
end
<file_sep>require 'spec_helper'
describe UserApiKey do
let!(:user) { create(:user) }
let!(:user_api_key) { UserApiKey.build_for_user(user) }
[ :id, :access_token, :user_id, :expires_at ].each do |db_field|
it { should respond_to db_field }
end
[ :id, :access_token, :user_id, :expires_at ].each do |field|
it { should_not allow_mass_assignment_of field }
end
it { should belong_to :user }
[ :user_id ].each do |field|
it { should validate_presence_of field }
end
describe 'before create' do
it 'should call generate_access_token! once' do
user_api_key.should_receive(:generate_access_token!)
user_api_key.save
end
it 'should call set_expiration! once' do
user_api_key.should_receive(:set_expiration!)
user_api_key.save
end
end
describe 'before update' do
before(:each) { user_api_key.save }
it 'should NOT call generate_access_token!' do
user_api_key.should_receive(:generate_access_token!).never
user_api_key.save
end
it 'should NOT call set_expiration!' do
user_api_key.should_receive(:set_expiration!).never
user_api_key.save
end
end
describe 'after save' do
context 'when creating' do
it 'should call access_token_and_expires_at_exist? once' do
user_api_key.should_receive(:access_token_and_expires_at_exist?).once
user_api_key.save
end
end
context 'when updating' do
it 'should call access_token_and_expires_at_exist? once' do
user_api_key.save
user_api_key.should_receive(:access_token_and_expires_at_exist?).once
user_api_key.save
end
end
end
describe '.build_for_user(user)' do
let!(:user) { create(:user) }
let!(:fake_user_api_key) { build(:user_api_key) }
it 'should create a new user object' do
UserApiKey.should_receive(:new).once.and_return(fake_user_api_key)
fake_user_api_key.should_receive(:set_user!).with(user).and_call_original
UserApiKey.build_for_user(user).should == fake_user_api_key
end
end
describe '.time_to_live_from_now' do
it 'should return 90.days.from_now' do
fake_time_now = Time.parse("Jan 1 2014")
Time.stub(:now).and_return(fake_time_now)
UserApiKey.time_to_live_from_now.should == fake_time_now + 90.days
end
end
describe '#access_token_exists?' do
let(:user_api_key) { build(:user_api_key) }
it 'should check whether the access_token field is present in the database' do
[ true, false ].each do |fake_return_value|
UserApiKey.should_receive(:exists?).and_return(fake_return_value)
user_api_key.send(:access_token_exists?).should ==fake_return_value
end
end
end
describe '#generate_access_token!' do
describe 'calling #access_token_exists? should happen as many times as it needs to' do
context 'when #access_token_exists? returns true and then false' do
it 'should not call #assign_fresh_token! if #access_token_exists? returns false right away' do
user_api_key.should_receive(:access_token_exists?).and_return(false)
user_api_key.should_receive(:assign_fresh_token!).never
user_api_key.send(:generate_access_token!)
end
end
context 'when #access_token_exists? returns true and then false' do
it 'should call #assign_fresh_token! once' do
user_api_key.should_receive(:access_token_exists?).and_return(true, false)
user_api_key.should_receive(:assign_fresh_token!).once.and_return(true)
user_api_key.send(:generate_access_token!)
end
end
context 'when #access_token_exists? returns true, then true again, and then false' do
it 'should call #assign_fresh_token! twice' do
user_api_key.should_receive(:access_token_exists?).and_return(true, true, false)
user_api_key.should_receive(:assign_fresh_token!).twice.and_return(true)
user_api_key.send(:generate_access_token!)
end
end
end
end
describe '#assign_fresh_token!' do
let(:user_api_key) { build(:user_api_key) }
it 'should assign SecureRandom.hex' do
[ 'fake hex value', 'another hex value', 'one more hex value' ].each do |fake_hex_value|
SecureRandom.should_receive(:hex).and_return(fake_hex_value)
user_api_key.send(:assign_fresh_token!)
user_api_key.access_token.should == fake_hex_value
end
end
end
describe '#set_expiration!' do
let(:fake_time_to_live_value) { 30.days.from_now }
it 'should set the expires_at field to #time_to_live_from_now' do
UserApiKey.should_receive(:time_to_live_from_now).and_return(fake_time_to_live_value)
user_api_key.send(:set_expiration!)
user_api_key.expires_at.should == fake_time_to_live_value
end
end
describe '#set_user!(user)' do
let(:user_api_key) { UserApiKey.new }
it 'should set the user object from the parameters' do
user_api_key.send(:set_user!, user)
user_api_key.user.should == user
user_api_key.user_id.should == user.id
end
end
describe '#access_token_or_expires_at_nil?' do
before(:each) { user_api_key.save.should == true }
context 'when both access_token and expires_at are not nil' do
it 'should return false' do
user_api_key.stub(:access_token).and_return('some non-nil value')
user_api_key.stub(:expires_at).and_return('another non-nil value')
user_api_key.send(:access_token_or_expires_at_nil?).should == false
end
end
context 'when access_token is nil' do
it 'should return true' do
user_api_key.stub(:expires_at).and_return('some non-nil value')
user_api_key.stub(:access_token).and_return(nil)
user_api_key.send(:access_token_or_expires_at_nil?).should == true
end
end
context 'when expires_at is nil' do
it 'should return true' do
user_api_key.stub(:expires_at).and_return(nil)
user_api_key.stub(:access_token).and_return('some non-nil value')
user_api_key.send(:access_token_or_expires_at_nil?).should == true
end
end
end
describe '#access_token_and_expires_at_exist?' do
[ :true, :false ].each do |boolean_value|
context "when #access_token_or_expires_at_nil? returns #{boolean_value}" do
before(:each) do
user_api_key
.should_receive(:access_token_or_expires_at_nil?)
.and_return(boolean_value)
end
it 'should return !#{boolean_value}' do
user_api_key.send(:access_token_and_expires_at_exist?).should == !boolean_value
end
end
end
end
end
|
c42543f879e51c2bb21cd597072d7f082c145630
|
[
"Ruby"
] | 2
|
Ruby
|
val-litvak/api_key_rails_example
|
5cdab0ac63f6692fadce3584d3b4cd1ac1e08314
|
544c20d1c3524c6a7ac719e948adb78ef0185338
|
refs/heads/master
|
<repo_name>matheuslfb/trabalho_grafos<file_sep>/src/Aeroporto.java
import java.io.Serializable;
import java.util.ArrayList;
public class Aeroporto implements Serializable {
private static final long serialVersionUID = 6748342554146454296L;
// código do aeroporto,
//latitude e longitude, nome do aeroporto e código do país.
String codAero;
double latitude;
double longitude;
String nome;
String codPais;
public Aeroporto(){
this.codAero = codAero;
this.latitude = latitude;
this.longitude = longitude;
this.nome = nome;
this.codPais = codPais;
}
public String getCodAero() {
return codAero;
}
public void setCodAero(String codAero) {
this.codAero = codAero;
}
public double getLatitude() {
return latitude;
}
public void setLatitude(double latitude) {
this.latitude = latitude;
}
public double getLongitude() {
return longitude;
}
public void setLongitude(double longitude) {
this.longitude = longitude;
}
public String getNome() {
return nome;
}
public void setNome(String nome) {
this.nome = nome;
}
public String getCodPais() {
return codPais;
}
public void setCodPais(String codPais) {
this.codPais = codPais;
}
@Override
public String toString() {
return "Código (" + codAero + "), latitude = " + latitude + ", longitude = " + longitude + ", nome = "
+ nome + ", codPais = " + codPais + "]";
}
}
<file_sep>/src/App.java
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.Scanner;
public class App {
public static void main(String[] args) {
/*Scanner entrada = new Scanner(System.in);
int nodo;
Grafo g=null;
try {
g = new Grafo("src\\gr3_3105.txt");
} catch (IOException e) {
System.out.println("Impossível ler grafo!");
}
System.out.println(g);
System.out.println("Informe o nodo:\n");
nodo = entrada.nextInt();
System.out.println("Grau de saída nodo: " + g.grauDeSaida(nodo));
System.out.println("Grau de entrada nodo: " + g.grauDeEntrada(nodo));
System.out.println("Vertice mais proximo: " + g.getAdjacenteProximo(nodo));
System.out.println("Arestas: " + g.getArestas());*/
ArrayList<CiaAerea> listAirlines = null;
try {
BufferedReader in = new BufferedReader(new FileReader(new File("src\\data\\airlines.dat")));
System.out.println("File open successful!");
listAirlines = new ArrayList<>();
in.readLine();
String line;
while ((line = in.readLine()) != null) {
String[] temp;
String delimiter = ";";
temp = line.split(delimiter);
CiaAerea airline = new CiaAerea();
airline.codigo = temp[0];
airline.nome = temp[1];
listAirlines.add(airline);
}
} catch (IOException e) {
System.out.println("Erro ao ler airlines.dat");
}
// for(CiaAerea a: listAirlines){
// System.out.println(a);
// }
ArrayList<Pais> listPais = null;
try {
BufferedReader in = new BufferedReader(new FileReader(new File("src\\data\\countries.dat")));
System.out.println("File open successful!");
listPais = new ArrayList<>();
in.readLine();
String line;
while ((line = in.readLine()) != null) {
String[] temp;
String delimiter = ";";
temp = line.split(delimiter);
Pais pais = new Pais();
pais.codigo = temp[0];
pais.nome = temp[1];
listPais.add(pais);
}
} catch (IOException e) {
System.out.println("Erro ao ler countries.dat");
}
// for(Pais p : listPais){
// System.out.println(p);
// }
ArrayList<Aeroporto> listAero = null;
try {
BufferedReader in = new BufferedReader(new FileReader(new File("src\\data\\airports.dat")));
System.out.println("File open successful!");
listAero = new ArrayList<>();
in.readLine();
String line;
while ((line = in.readLine()) != null) {
String[] temp;
String delimiter = ";";
temp = line.split(delimiter);
Aeroporto a = new Aeroporto();
a.codAero = temp[0];
a.latitude = (Double.parseDouble(temp[1]));
a.longitude = (Double.parseDouble(temp[2]));
a.nome = temp[3];
a.codPais = (temp[4]);
listAero.add(a);
}
} catch (IOException e) {
System.out.println("Erro ao ler airports.dat");
}
// for(Aeroporto p : listAero){
// System.out.println(p);
// }
ArrayList<Rota> listRota = null;
try {
BufferedReader in = new BufferedReader(new FileReader(new File("src\\data\\routes.dat")));
System.out.println("File open successful!");
listRota = new ArrayList<>();
in.readLine();
String line;
while ((line = in.readLine()) != null) {
String[] temp;
String delimiter = ";";
temp = line.split(delimiter);
Rota r = new Rota();
r.origem = temp[0];
r.destino = temp[1];
r.distancia = Double.parseDouble(temp[2]);
r.codCiaa = temp[3];
listRota.add(r);
}
} catch (IOException e) {
System.out.println("Erro ao ler routes.dat");
}
for(Rota r : listRota){
System.out.println(r);
}
}
}
|
d05efe5d919484194b3560fa0995bbb953bd59c6
|
[
"Java"
] | 2
|
Java
|
matheuslfb/trabalho_grafos
|
f1a6bd1720c75736c4068b89e6c95830f8320315
|
a4d2fc113d37b047eb9b296916eab5966707894d
|
refs/heads/master
|
<repo_name>niwrE91/Animation1<file_sep>/Animation1/ViewController.swift
//
// ViewController.swift
// Animation1
//
// Created by <NAME> on 26.06.19.
// Copyright © 2019 <NAME>. All rights reserved.
//
import UIKit
class ViewController: UIViewController {
//set up UI
let titleLabel = UILabel()
let bodyLable = UILabel()
fileprivate func setupLabels() {
titleLabel.textColor = .white
bodyLable.textColor = .white
titleLabel.numberOfLines = 0
titleLabel.text = "Welcome To Company XYZ"
titleLabel.font = UIFont(name: "Arial", size: 34)
bodyLable.numberOfLines = 0
bodyLable.text = "Hello there! Thanks so much for downloading our brand new app and giging us a try. Make sure to leace us a good review in the AppStore."
}
fileprivate func setupStackView() {
let stackView = UIStackView(arrangedSubviews: [titleLabel, bodyLable])
stackView.axis = .vertical
stackView.spacing = 8
view.addSubview(stackView)
//enables autolayout
stackView.translatesAutoresizingMaskIntoConstraints = false
stackView.centerXAnchor.constraint(equalTo: view.centerXAnchor).isActive = true
stackView.centerYAnchor.constraint(equalTo: view.centerYAnchor).isActive = true
stackView.widthAnchor.constraint(equalTo: view.widthAnchor, constant: -100).isActive = true
}
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .black
setupLabels()
setupStackView()
//Fun Animations
//Recognize that you tap the screen
view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(handleTapAnimations)))
}
@objc fileprivate func handleTapAnimations() {
print("Animating")
//Animation for titleLabel
UIView.animate(withDuration: 0.5, delay: 0,usingSpringWithDamping: 0.5, initialSpringVelocity: 0.5, options: .curveEaseOut, animations: {
self.titleLabel.transform = CGAffineTransform(translationX: -30, y: 0)
}) { (_) in
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 1, initialSpringVelocity: 1, options: .curveEaseOut, animations: {
self.titleLabel.alpha = 0
self.titleLabel.transform = self.titleLabel.transform.translatedBy(x: 0, y: -200)
})
}
//Animation for bodyLabel
UIView.animate(withDuration: 0.5, delay: 0.5,usingSpringWithDamping: 0.5, initialSpringVelocity: 0.5, options: .curveEaseOut, animations: {
self.bodyLable.transform = CGAffineTransform(translationX: -30, y: 0)
}) { (_) in
UIView.animate(withDuration: 0.5, delay: 0, usingSpringWithDamping: 1, initialSpringVelocity: 1, options: .curveEaseOut, animations: {
self.bodyLable.alpha = 0
self.bodyLable.transform = self.bodyLable.transform.translatedBy(x: 0, y: -200)
})
}
}
}
|
7ae5144be3a40b219f8550983786a4ab1e569776
|
[
"Swift"
] | 1
|
Swift
|
niwrE91/Animation1
|
73f7fae3f57fdea5a7a103f0e98ec4115a63aeaa
|
b74758c416e7146c8d5ca4fd6c74f29e37a22433
|
refs/heads/master
|
<file_sep>file(REMOVE_RECURSE
"CMakeFiles/bus_sub_detect1.dir/src/bus_sub_detect.cpp.o"
"/home/thocao/catkin_ws/devel/lib/bus_sub_video/bus_sub_detect1.pdb"
"/home/thocao/catkin_ws/devel/lib/bus_sub_video/bus_sub_detect1"
)
# Per-language clean rules from dependency scanning.
foreach(lang CXX)
include(CMakeFiles/bus_sub_detect1.dir/cmake_clean_${lang}.cmake OPTIONAL)
endforeach()
<file_sep># CMAKE generated file: DO NOT EDIT!
# Generated by "Unix Makefiles" Generator, CMake Version 3.10
# Default target executed when no arguments are given to make.
default_target: all
.PHONY : default_target
# Allow only one "make -f Makefile2" at a time, but pass parallelism.
.NOTPARALLEL:
#=============================================================================
# Special targets provided by cmake.
# Disable implicit rules so canonical targets will work.
.SUFFIXES:
# Remove some rules from gmake that .SUFFIXES does not remove.
SUFFIXES =
.SUFFIXES: .hpux_make_needs_suffix_list
# Suppress display of executed commands.
$(VERBOSE).SILENT:
# A target that is always out of date.
cmake_force:
.PHONY : cmake_force
#=============================================================================
# Set environment variables for the build.
# The shell in which to execute make rules.
SHELL = /bin/sh
# The CMake executable.
CMAKE_COMMAND = /usr/bin/cmake
# The command to remove a file.
RM = /usr/bin/cmake -E remove -f
# Escaping for special characters.
EQUALS = =
# The top-level source directory on which CMake was run.
CMAKE_SOURCE_DIR = /home/thocao/catkin_ws/src
# The top-level build directory on which CMake was run.
CMAKE_BINARY_DIR = /home/thocao/catkin_ws/build
#=============================================================================
# Targets provided globally by CMake.
# Special rule for the target install/strip
install/strip: preinstall
@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Installing the project stripped..."
/usr/bin/cmake -DCMAKE_INSTALL_DO_STRIP=1 -P cmake_install.cmake
.PHONY : install/strip
# Special rule for the target install/strip
install/strip/fast: preinstall/fast
@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Installing the project stripped..."
/usr/bin/cmake -DCMAKE_INSTALL_DO_STRIP=1 -P cmake_install.cmake
.PHONY : install/strip/fast
# Special rule for the target install/local
install/local: preinstall
@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Installing only the local directory..."
/usr/bin/cmake -DCMAKE_INSTALL_LOCAL_ONLY=1 -P cmake_install.cmake
.PHONY : install/local
# Special rule for the target install/local
install/local/fast: preinstall/fast
@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Installing only the local directory..."
/usr/bin/cmake -DCMAKE_INSTALL_LOCAL_ONLY=1 -P cmake_install.cmake
.PHONY : install/local/fast
# Special rule for the target install
install: preinstall
@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Install the project..."
/usr/bin/cmake -P cmake_install.cmake
.PHONY : install
# Special rule for the target install
install/fast: preinstall/fast
@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Install the project..."
/usr/bin/cmake -P cmake_install.cmake
.PHONY : install/fast
# Special rule for the target list_install_components
list_install_components:
@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Available install components are: \"Unspecified\""
.PHONY : list_install_components
# Special rule for the target list_install_components
list_install_components/fast: list_install_components
.PHONY : list_install_components/fast
# Special rule for the target edit_cache
edit_cache:
@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Running CMake cache editor..."
/usr/bin/ccmake -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR)
.PHONY : edit_cache
# Special rule for the target edit_cache
edit_cache/fast: edit_cache
.PHONY : edit_cache/fast
# Special rule for the target test
test:
@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Running tests..."
/usr/bin/ctest --force-new-ctest-process $(ARGS)
.PHONY : test
# Special rule for the target test
test/fast: test
.PHONY : test/fast
# Special rule for the target rebuild_cache
rebuild_cache:
@$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Running CMake to regenerate build system..."
/usr/bin/cmake -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR)
.PHONY : rebuild_cache
# Special rule for the target rebuild_cache
rebuild_cache/fast: rebuild_cache
.PHONY : rebuild_cache/fast
# The main all target
all: cmake_check_build_system
cd /home/thocao/catkin_ws/build && $(CMAKE_COMMAND) -E cmake_progress_start /home/thocao/catkin_ws/build/CMakeFiles /home/thocao/catkin_ws/build/bus_publisher_video1/CMakeFiles/progress.marks
cd /home/thocao/catkin_ws/build && $(MAKE) -f CMakeFiles/Makefile2 bus_publisher_video1/all
$(CMAKE_COMMAND) -E cmake_progress_start /home/thocao/catkin_ws/build/CMakeFiles 0
.PHONY : all
# The main clean target
clean:
cd /home/thocao/catkin_ws/build && $(MAKE) -f CMakeFiles/Makefile2 bus_publisher_video1/clean
.PHONY : clean
# The main clean target
clean/fast: clean
.PHONY : clean/fast
# Prepare targets for installation.
preinstall: all
cd /home/thocao/catkin_ws/build && $(MAKE) -f CMakeFiles/Makefile2 bus_publisher_video1/preinstall
.PHONY : preinstall
# Prepare targets for installation.
preinstall/fast:
cd /home/thocao/catkin_ws/build && $(MAKE) -f CMakeFiles/Makefile2 bus_publisher_video1/preinstall
.PHONY : preinstall/fast
# clear depends
depend:
cd /home/thocao/catkin_ws/build && $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 1
.PHONY : depend
# Convenience name for target.
bus_publisher_video1/CMakeFiles/roscpp_generate_messages_nodejs.dir/rule:
cd /home/thocao/catkin_ws/build && $(MAKE) -f CMakeFiles/Makefile2 bus_publisher_video1/CMakeFiles/roscpp_generate_messages_nodejs.dir/rule
.PHONY : bus_publisher_video1/CMakeFiles/roscpp_generate_messages_nodejs.dir/rule
# Convenience name for target.
roscpp_generate_messages_nodejs: bus_publisher_video1/CMakeFiles/roscpp_generate_messages_nodejs.dir/rule
.PHONY : roscpp_generate_messages_nodejs
# fast build rule for target.
roscpp_generate_messages_nodejs/fast:
cd /home/thocao/catkin_ws/build && $(MAKE) -f bus_publisher_video1/CMakeFiles/roscpp_generate_messages_nodejs.dir/build.make bus_publisher_video1/CMakeFiles/roscpp_generate_messages_nodejs.dir/build
.PHONY : roscpp_generate_messages_nodejs/fast
# Convenience name for target.
bus_publisher_video1/CMakeFiles/bus_video1.dir/rule:
cd /home/thocao/catkin_ws/build && $(MAKE) -f CMakeFiles/Makefile2 bus_publisher_video1/CMakeFiles/bus_video1.dir/rule
.PHONY : bus_publisher_video1/CMakeFiles/bus_video1.dir/rule
# Convenience name for target.
bus_video1: bus_publisher_video1/CMakeFiles/bus_video1.dir/rule
.PHONY : bus_video1
# fast build rule for target.
bus_video1/fast:
cd /home/thocao/catkin_ws/build && $(MAKE) -f bus_publisher_video1/CMakeFiles/bus_video1.dir/build.make bus_publisher_video1/CMakeFiles/bus_video1.dir/build
.PHONY : bus_video1/fast
# Convenience name for target.
bus_publisher_video1/CMakeFiles/roscpp_generate_messages_eus.dir/rule:
cd /home/thocao/catkin_ws/build && $(MAKE) -f CMakeFiles/Makefile2 bus_publisher_video1/CMakeFiles/roscpp_generate_messages_eus.dir/rule
.PHONY : bus_publisher_video1/CMakeFiles/roscpp_generate_messages_eus.dir/rule
# Convenience name for target.
roscpp_generate_messages_eus: bus_publisher_video1/CMakeFiles/roscpp_generate_messages_eus.dir/rule
.PHONY : roscpp_generate_messages_eus
# fast build rule for target.
roscpp_generate_messages_eus/fast:
cd /home/thocao/catkin_ws/build && $(MAKE) -f bus_publisher_video1/CMakeFiles/roscpp_generate_messages_eus.dir/build.make bus_publisher_video1/CMakeFiles/roscpp_generate_messages_eus.dir/build
.PHONY : roscpp_generate_messages_eus/fast
# Convenience name for target.
bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_cpp.dir/rule:
cd /home/thocao/catkin_ws/build && $(MAKE) -f CMakeFiles/Makefile2 bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_cpp.dir/rule
.PHONY : bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_cpp.dir/rule
# Convenience name for target.
rosgraph_msgs_generate_messages_cpp: bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_cpp.dir/rule
.PHONY : rosgraph_msgs_generate_messages_cpp
# fast build rule for target.
rosgraph_msgs_generate_messages_cpp/fast:
cd /home/thocao/catkin_ws/build && $(MAKE) -f bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_cpp.dir/build.make bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_cpp.dir/build
.PHONY : rosgraph_msgs_generate_messages_cpp/fast
# Convenience name for target.
bus_publisher_video1/CMakeFiles/roscpp_generate_messages_cpp.dir/rule:
cd /home/thocao/catkin_ws/build && $(MAKE) -f CMakeFiles/Makefile2 bus_publisher_video1/CMakeFiles/roscpp_generate_messages_cpp.dir/rule
.PHONY : bus_publisher_video1/CMakeFiles/roscpp_generate_messages_cpp.dir/rule
# Convenience name for target.
roscpp_generate_messages_cpp: bus_publisher_video1/CMakeFiles/roscpp_generate_messages_cpp.dir/rule
.PHONY : roscpp_generate_messages_cpp
# fast build rule for target.
roscpp_generate_messages_cpp/fast:
cd /home/thocao/catkin_ws/build && $(MAKE) -f bus_publisher_video1/CMakeFiles/roscpp_generate_messages_cpp.dir/build.make bus_publisher_video1/CMakeFiles/roscpp_generate_messages_cpp.dir/build
.PHONY : roscpp_generate_messages_cpp/fast
# Convenience name for target.
bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_eus.dir/rule:
cd /home/thocao/catkin_ws/build && $(MAKE) -f CMakeFiles/Makefile2 bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_eus.dir/rule
.PHONY : bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_eus.dir/rule
# Convenience name for target.
rosgraph_msgs_generate_messages_eus: bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_eus.dir/rule
.PHONY : rosgraph_msgs_generate_messages_eus
# fast build rule for target.
rosgraph_msgs_generate_messages_eus/fast:
cd /home/thocao/catkin_ws/build && $(MAKE) -f bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_eus.dir/build.make bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_eus.dir/build
.PHONY : rosgraph_msgs_generate_messages_eus/fast
# Convenience name for target.
bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_lisp.dir/rule:
cd /home/thocao/catkin_ws/build && $(MAKE) -f CMakeFiles/Makefile2 bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_lisp.dir/rule
.PHONY : bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_lisp.dir/rule
# Convenience name for target.
rosgraph_msgs_generate_messages_lisp: bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_lisp.dir/rule
.PHONY : rosgraph_msgs_generate_messages_lisp
# fast build rule for target.
rosgraph_msgs_generate_messages_lisp/fast:
cd /home/thocao/catkin_ws/build && $(MAKE) -f bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_lisp.dir/build.make bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_lisp.dir/build
.PHONY : rosgraph_msgs_generate_messages_lisp/fast
# Convenience name for target.
bus_publisher_video1/CMakeFiles/bus_detect1.dir/rule:
cd /home/thocao/catkin_ws/build && $(MAKE) -f CMakeFiles/Makefile2 bus_publisher_video1/CMakeFiles/bus_detect1.dir/rule
.PHONY : bus_publisher_video1/CMakeFiles/bus_detect1.dir/rule
# Convenience name for target.
bus_detect1: bus_publisher_video1/CMakeFiles/bus_detect1.dir/rule
.PHONY : bus_detect1
# fast build rule for target.
bus_detect1/fast:
cd /home/thocao/catkin_ws/build && $(MAKE) -f bus_publisher_video1/CMakeFiles/bus_detect1.dir/build.make bus_publisher_video1/CMakeFiles/bus_detect1.dir/build
.PHONY : bus_detect1/fast
# Convenience name for target.
bus_publisher_video1/CMakeFiles/roscpp_generate_messages_py.dir/rule:
cd /home/thocao/catkin_ws/build && $(MAKE) -f CMakeFiles/Makefile2 bus_publisher_video1/CMakeFiles/roscpp_generate_messages_py.dir/rule
.PHONY : bus_publisher_video1/CMakeFiles/roscpp_generate_messages_py.dir/rule
# Convenience name for target.
roscpp_generate_messages_py: bus_publisher_video1/CMakeFiles/roscpp_generate_messages_py.dir/rule
.PHONY : roscpp_generate_messages_py
# fast build rule for target.
roscpp_generate_messages_py/fast:
cd /home/thocao/catkin_ws/build && $(MAKE) -f bus_publisher_video1/CMakeFiles/roscpp_generate_messages_py.dir/build.make bus_publisher_video1/CMakeFiles/roscpp_generate_messages_py.dir/build
.PHONY : roscpp_generate_messages_py/fast
# Convenience name for target.
bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_nodejs.dir/rule:
cd /home/thocao/catkin_ws/build && $(MAKE) -f CMakeFiles/Makefile2 bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_nodejs.dir/rule
.PHONY : bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_nodejs.dir/rule
# Convenience name for target.
rosgraph_msgs_generate_messages_nodejs: bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_nodejs.dir/rule
.PHONY : rosgraph_msgs_generate_messages_nodejs
# fast build rule for target.
rosgraph_msgs_generate_messages_nodejs/fast:
cd /home/thocao/catkin_ws/build && $(MAKE) -f bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_nodejs.dir/build.make bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_nodejs.dir/build
.PHONY : rosgraph_msgs_generate_messages_nodejs/fast
# Convenience name for target.
bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_py.dir/rule:
cd /home/thocao/catkin_ws/build && $(MAKE) -f CMakeFiles/Makefile2 bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_py.dir/rule
.PHONY : bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_py.dir/rule
# Convenience name for target.
rosgraph_msgs_generate_messages_py: bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_py.dir/rule
.PHONY : rosgraph_msgs_generate_messages_py
# fast build rule for target.
rosgraph_msgs_generate_messages_py/fast:
cd /home/thocao/catkin_ws/build && $(MAKE) -f bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_py.dir/build.make bus_publisher_video1/CMakeFiles/rosgraph_msgs_generate_messages_py.dir/build
.PHONY : rosgraph_msgs_generate_messages_py/fast
# Convenience name for target.
bus_publisher_video1/CMakeFiles/roscpp_generate_messages_lisp.dir/rule:
cd /home/thocao/catkin_ws/build && $(MAKE) -f CMakeFiles/Makefile2 bus_publisher_video1/CMakeFiles/roscpp_generate_messages_lisp.dir/rule
.PHONY : bus_publisher_video1/CMakeFiles/roscpp_generate_messages_lisp.dir/rule
# Convenience name for target.
roscpp_generate_messages_lisp: bus_publisher_video1/CMakeFiles/roscpp_generate_messages_lisp.dir/rule
.PHONY : roscpp_generate_messages_lisp
# fast build rule for target.
roscpp_generate_messages_lisp/fast:
cd /home/thocao/catkin_ws/build && $(MAKE) -f bus_publisher_video1/CMakeFiles/roscpp_generate_messages_lisp.dir/build.make bus_publisher_video1/CMakeFiles/roscpp_generate_messages_lisp.dir/build
.PHONY : roscpp_generate_messages_lisp/fast
src/bus_video.o: src/bus_video.cpp.o
.PHONY : src/bus_video.o
# target to build an object file
src/bus_video.cpp.o:
cd /home/thocao/catkin_ws/build && $(MAKE) -f bus_publisher_video1/CMakeFiles/bus_video1.dir/build.make bus_publisher_video1/CMakeFiles/bus_video1.dir/src/bus_video.cpp.o
.PHONY : src/bus_video.cpp.o
src/bus_video.i: src/bus_video.cpp.i
.PHONY : src/bus_video.i
# target to preprocess a source file
src/bus_video.cpp.i:
cd /home/thocao/catkin_ws/build && $(MAKE) -f bus_publisher_video1/CMakeFiles/bus_video1.dir/build.make bus_publisher_video1/CMakeFiles/bus_video1.dir/src/bus_video.cpp.i
.PHONY : src/bus_video.cpp.i
src/bus_video.s: src/bus_video.cpp.s
.PHONY : src/bus_video.s
# target to generate assembly for a file
src/bus_video.cpp.s:
cd /home/thocao/catkin_ws/build && $(MAKE) -f bus_publisher_video1/CMakeFiles/bus_video1.dir/build.make bus_publisher_video1/CMakeFiles/bus_video1.dir/src/bus_video.cpp.s
.PHONY : src/bus_video.cpp.s
src/detectnetcamera.o: src/detectnetcamera.cpp.o
.PHONY : src/detectnetcamera.o
# target to build an object file
src/detectnetcamera.cpp.o:
cd /home/thocao/catkin_ws/build && $(MAKE) -f bus_publisher_video1/CMakeFiles/bus_detect1.dir/build.make bus_publisher_video1/CMakeFiles/bus_detect1.dir/src/detectnetcamera.cpp.o
.PHONY : src/detectnetcamera.cpp.o
src/detectnetcamera.i: src/detectnetcamera.cpp.i
.PHONY : src/detectnetcamera.i
# target to preprocess a source file
src/detectnetcamera.cpp.i:
cd /home/thocao/catkin_ws/build && $(MAKE) -f bus_publisher_video1/CMakeFiles/bus_detect1.dir/build.make bus_publisher_video1/CMakeFiles/bus_detect1.dir/src/detectnetcamera.cpp.i
.PHONY : src/detectnetcamera.cpp.i
src/detectnetcamera.s: src/detectnetcamera.cpp.s
.PHONY : src/detectnetcamera.s
# target to generate assembly for a file
src/detectnetcamera.cpp.s:
cd /home/thocao/catkin_ws/build && $(MAKE) -f bus_publisher_video1/CMakeFiles/bus_detect1.dir/build.make bus_publisher_video1/CMakeFiles/bus_detect1.dir/src/detectnetcamera.cpp.s
.PHONY : src/detectnetcamera.cpp.s
# Help Target
help:
@echo "The following are some of the valid targets for this Makefile:"
@echo "... all (the default if no target is provided)"
@echo "... clean"
@echo "... depend"
@echo "... install/strip"
@echo "... install/local"
@echo "... roscpp_generate_messages_nodejs"
@echo "... bus_video1"
@echo "... roscpp_generate_messages_eus"
@echo "... rosgraph_msgs_generate_messages_cpp"
@echo "... roscpp_generate_messages_cpp"
@echo "... install"
@echo "... rosgraph_msgs_generate_messages_eus"
@echo "... rosgraph_msgs_generate_messages_lisp"
@echo "... bus_detect1"
@echo "... roscpp_generate_messages_py"
@echo "... list_install_components"
@echo "... rosgraph_msgs_generate_messages_nodejs"
@echo "... rosgraph_msgs_generate_messages_py"
@echo "... roscpp_generate_messages_lisp"
@echo "... edit_cache"
@echo "... test"
@echo "... rebuild_cache"
@echo "... src/bus_video.o"
@echo "... src/bus_video.i"
@echo "... src/bus_video.s"
@echo "... src/detectnetcamera.o"
@echo "... src/detectnetcamera.i"
@echo "... src/detectnetcamera.s"
.PHONY : help
#=============================================================================
# Special targets to cleanup operation of make.
# Special rule to run CMake to check the build system integrity.
# No rule that depends on this can have commands that come from listfiles
# because they might be regenerated.
cmake_check_build_system:
cd /home/thocao/catkin_ws/build && $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 0
.PHONY : cmake_check_build_system
<file_sep>set(_CATKIN_CURRENT_PACKAGE "bus_publisher_video1")
set(bus_publisher_video1_VERSION "0.0.0")
set(bus_publisher_video1_MAINTAINER "thocao <<EMAIL>>")
set(bus_publisher_video1_PACKAGE_FORMAT "2")
set(bus_publisher_video1_BUILD_DEPENDS "cv_bridge" "image_transport" "roscpp" "sensor_msgs")
set(bus_publisher_video1_BUILD_EXPORT_DEPENDS "cv_bridge" "image_transport" "roscpp" "sensor_msgs")
set(bus_publisher_video1_BUILDTOOL_DEPENDS "catkin")
set(bus_publisher_video1_BUILDTOOL_EXPORT_DEPENDS )
set(bus_publisher_video1_EXEC_DEPENDS "cv_bridge" "image_transport" "roscpp" "sensor_msgs")
set(bus_publisher_video1_RUN_DEPENDS "cv_bridge" "image_transport" "roscpp" "sensor_msgs")
set(bus_publisher_video1_TEST_DEPENDS )
set(bus_publisher_video1_DOC_DEPENDS )
set(bus_publisher_video1_URL_WEBSITE "")
set(bus_publisher_video1_URL_BUGTRACKER "")
set(bus_publisher_video1_URL_REPOSITORY "")
set(bus_publisher_video1_DEPRECATED "")<file_sep>file(REMOVE_RECURSE
"CMakeFiles/yolo_pub_video1.dir/src/yolo_pub_video.cpp.o"
"/home/thocao/catkin_ws/devel/lib/bus_sub_video/yolo_pub_video1.pdb"
"/home/thocao/catkin_ws/devel/lib/bus_sub_video/yolo_pub_video1"
)
# Per-language clean rules from dependency scanning.
foreach(lang CXX)
include(CMakeFiles/yolo_pub_video1.dir/cmake_clean_${lang}.cmake OPTIONAL)
endforeach()
<file_sep>file(REMOVE_RECURSE
"CMakeFiles/yolo_sub_video1.dir/src/yolo_sub_video.cpp.o"
"/home/thocao/catkin_ws/devel/lib/bus_sub_video/yolo_sub_video1.pdb"
"/home/thocao/catkin_ws/devel/lib/bus_sub_video/yolo_sub_video1"
)
# Per-language clean rules from dependency scanning.
foreach(lang CXX)
include(CMakeFiles/yolo_sub_video1.dir/cmake_clean_${lang}.cmake OPTIONAL)
endforeach()
<file_sep>file(REMOVE_RECURSE
"CMakeFiles/bus_video.dir/src/bus_video.cpp.o"
"/home/thocao/catkin_ws/devel/lib/bus_publisher_video1/bus_video.pdb"
"/home/thocao/catkin_ws/devel/lib/bus_publisher_video1/bus_video"
)
# Per-language clean rules from dependency scanning.
foreach(lang CXX)
include(CMakeFiles/bus_video.dir/cmake_clean_${lang}.cmake OPTIONAL)
endforeach()
<file_sep>file(REMOVE_RECURSE
"CMakeFiles/bus_detect.dir/src/detectnetcamera.cpp.o"
"/home/thocao/catkin_ws/devel/lib/bus_publisher_video1/bus_detect.pdb"
"/home/thocao/catkin_ws/devel/lib/bus_publisher_video1/bus_detect"
)
# Per-language clean rules from dependency scanning.
foreach(lang CXX)
include(CMakeFiles/bus_detect.dir/cmake_clean_${lang}.cmake OPTIONAL)
endforeach()
<file_sep>file(REMOVE_RECURSE
"CMakeFiles/bus_video1.dir/src/bus_video.cpp.o"
"/home/thocao/catkin_ws/devel/lib/bus_publisher_video1/bus_video1.pdb"
"/home/thocao/catkin_ws/devel/lib/bus_publisher_video1/bus_video1"
)
# Per-language clean rules from dependency scanning.
foreach(lang CXX)
include(CMakeFiles/bus_video1.dir/cmake_clean_${lang}.cmake OPTIONAL)
endforeach()
<file_sep>file(REMOVE_RECURSE
"CMakeFiles/yolo_sub_video.dir/src/yolo_sub_video.cpp.o"
"/home/thocao/catkin_ws/devel/lib/bus_sub_video/yolo_sub_video.pdb"
"/home/thocao/catkin_ws/devel/lib/bus_sub_video/yolo_sub_video"
)
# Per-language clean rules from dependency scanning.
foreach(lang CXX)
include(CMakeFiles/yolo_sub_video.dir/cmake_clean_${lang}.cmake OPTIONAL)
endforeach()
<file_sep>file(REMOVE_RECURSE
"CMakeFiles/yolo_pub_video.dir/src/yolo_pub_video.cpp.o"
"/home/thocao/catkin_ws/devel/lib/bus_sub_video/yolo_pub_video.pdb"
"/home/thocao/catkin_ws/devel/lib/bus_sub_video/yolo_pub_video"
)
# Per-language clean rules from dependency scanning.
foreach(lang CXX)
include(CMakeFiles/yolo_pub_video.dir/cmake_clean_${lang}.cmake OPTIONAL)
endforeach()
<file_sep>set(_CATKIN_CURRENT_PACKAGE "bus_sub_video")
set(bus_sub_video_VERSION "0.0.0")
set(bus_sub_video_MAINTAINER "thocao <<EMAIL>>")
set(bus_sub_video_PACKAGE_FORMAT "2")
set(bus_sub_video_BUILD_DEPENDS "cv_bridge" "image_transport" "roscpp" "sensor_msgs")
set(bus_sub_video_BUILD_EXPORT_DEPENDS "cv_bridge" "image_transport" "roscpp" "sensor_msgs")
set(bus_sub_video_BUILDTOOL_DEPENDS "catkin")
set(bus_sub_video_BUILDTOOL_EXPORT_DEPENDS )
set(bus_sub_video_EXEC_DEPENDS "cv_bridge" "image_transport" "roscpp" "sensor_msgs")
set(bus_sub_video_RUN_DEPENDS "cv_bridge" "image_transport" "roscpp" "sensor_msgs")
set(bus_sub_video_TEST_DEPENDS )
set(bus_sub_video_DOC_DEPENDS )
set(bus_sub_video_URL_WEBSITE "")
set(bus_sub_video_URL_BUGTRACKER "")
set(bus_sub_video_URL_REPOSITORY "")
set(bus_sub_video_DEPRECATED "")<file_sep>#include <ros/ros.h>
#include <image_transport/image_transport.h>
#include <opencv2/highgui/highgui.hpp>
#include <cv_bridge/cv_bridge.h>
#include <darknet_ros_msgs/BoundingBox.h>
#include <darknet_ros_msgs/BoundingBoxes.h>
cv::Mat frame;
bool flag = false;
bool initFlag = false;
void msgCallback(const darknet_ros_msgs::BoundingBoxes::ConstPtr& msg){
if(!frame.empty()){
for(int i=0;i<msg->bounding_boxes.size();i++){
if(msg->bounding_boxes[i].Class != "person") continue;
cv::rectangle(frame,cv::Point(msg->bounding_boxes[i].xmin,msg->bounding_boxes[i].ymin),cv::Point(msg->bounding_boxes[i].xmax,msg->bounding_boxes[i].ymax),cv::Scalar(255,0,255),3,8);
}
//cv::imshow("Yolo V3", frame);
//cv::waitKey(30);
flag = false;
}
initFlag = true;
}
cv::Mat horizontalStack(cv::Mat &left,cv::Mat &right){
if(left.empty() || right.empty()) return cv::Mat();
cv::Mat dst(cv::Size(left.cols*2,left.rows),left.type(),cv::Scalar::all(0));
cv::Mat matRoi = dst(cv::Rect(0,0,left.cols,left.rows));
left.copyTo(matRoi);
matRoi = dst(cv::Rect(left.cols,0,left.cols,left.rows));
right.copyTo(matRoi);
return dst;
}
cv::Mat verticalStack(cv::Mat &up,cv::Mat &down){
if(up.empty() || down.empty()) return cv::Mat();
cv::Mat dst(cv::Size(up.cols,up.rows*2),up.type(),cv::Scalar::all(0));
cv::Mat matRoi = dst(cv::Rect(0,0,up.cols,up.rows));
up.copyTo(matRoi);
matRoi = dst(cv::Rect(0,up.rows,up.cols,up.rows));
down.copyTo(matRoi);
return dst;
}
int main(int argc, char **argv)
{
ros::init(argc, argv, "yolo_id_1");
ros::NodeHandle nh;
image_transport::ImageTransport it(nh);
image_transport::Publisher pub = it.advertise("/camera/rgb/image_raw", 1);
ros::Subscriber yolo_boundingBox = nh.subscribe("/darknet_ros/bounding_boxes",100,msgCallback);
std::istringstream video_sourceCmd(argv[1]);
int video_source;
cv::VideoCapture cap1(0); // open the default camera
if(!cap1.isOpened()) // check if we succeeded
return -1;
cap1.set(CV_CAP_PROP_FRAME_WIDTH,320);
cap1.set(CV_CAP_PROP_FRAME_HEIGHT,240);
sensor_msgs::ImagePtr msg;
ros::Rate loop_rate(1);
//cv::namedWindow("Yolo V3",cv::WINDOW_NORMAL);
//cv::setWindowProperty("Yolo V3",cv::WND_PROP_FULLSCREEN,cv::WINDOW_FULLSCREEN);
while (nh.ok()) {
cap1 >> frame; // get a new frame from camera
// Check if grabbed frame is actually full with some content
if(!frame.empty()){
if(flag == false)
{
msg = cv_bridge::CvImage(std_msgs::Header(), "bgr8", frame).toImageMsg();
pub.publish(msg);
std::printf("send image \n");
cv::waitKey(1);
flag = true;
}
if(initFlag == false){
msg = cv_bridge::CvImage(std_msgs::Header(), "bgr8", frame).toImageMsg();
pub.publish(msg);
std::printf("send image \n");
cv::waitKey(1);
}
}
ros::spinOnce();
loop_rate.sleep();
}
//cv::destroyWindow("Yolo V3");
}
<file_sep>file(REMOVE_RECURSE
"CMakeFiles/bus_sub_detect.dir/src/bus_sub_detect.cpp.o"
"/home/thocao/catkin_ws/devel/lib/bus_sub_video/bus_sub_detect.pdb"
"/home/thocao/catkin_ws/devel/lib/bus_sub_video/bus_sub_detect"
)
# Per-language clean rules from dependency scanning.
foreach(lang CXX)
include(CMakeFiles/bus_sub_detect.dir/cmake_clean_${lang}.cmake OPTIONAL)
endforeach()
<file_sep>#!/bin/bash
export ROS_WS=/home/ubuntu/catkin_ws
#source $ROS_WS/devel/setup.bash
export PATH=$ROS_ROOT/bin:$PATH
export ROS_PACKAGE_PATH=$ROS_PACKAGE_PAH:$ROS_WS
export ROS_IP=192.168.0.2
export ROS_MASTER_URI=http://192.168.0.23:11311
export ROS_HOSTNAME=192.168.0.2
source $ROS_WS/devel/setup.bash
exec "$@"
|
633eb57b6efabd99930c8688e34fb7282f7da5c9
|
[
"Makefile",
"CMake",
"C++",
"Shell"
] | 14
|
CMake
|
ThoCao/catkin_ws
|
8ece64f152ff6bb47ee8b8aa6ad479eaf1f67cca
|
4cfa6086e9ca2c9e3f407a4cd7aa652bc942fe07
|
refs/heads/master
|
<file_sep>#ifndef HEAP_SORT_H
#define HEAP_SORT_H
void max_heapify(int nums[], int heap_size, int i);
void build_max_heap(int nums[], int len);
void heap_sort(int nums[], int len);
// TODO: 实现下面的函数可以利用堆实现一个优先队列
void max_heap_insert();
int heap_extract_max(int nums[], int heap_size);
void heap_increase_key();
int heap_maximum(int nums[]);
#endif
<file_sep># 第二章 算法基础
<file_sep>#ifndef COUNTING_SORT_H
#define COUNTING_SORT_H
#define COUNTING_SORT_NUM_MAX 10
void counting_sort(int nums[], int len);
#endif
<file_sep>#ifndef MAXIMUM_SUBARRAY_H
#define MAXIMUM_SUBARRAY_H
struct MSA_slt_s {
int left; // 最大子数组的左端点(闭)
int right; // 最大子数组的右端点(闭)
int sum; // 最大子数组的和
};
typedef struct MSA_slt_s MSA_slt_t;
enum MSA_policy_e {
MSA_PLC_DAQ, // 分治法
MSA_PLC_LINEAR // 线性时间算法
};
typedef enum MSA_policy_e MSA_policy_t;
MSA_slt_t maximum_subarray(int nums[], int begin, int end, MSA_policy_t p);
#endif
<file_sep># 第四章 分治策略
<file_sep>/**
* matrix_chain_multiplication.c
* 矩阵链乘法问题
*/
#include "matrix_chain_multiplication.h"
#include "../utils.h"
#include <stdio.h>
#include <stdlib.h>
#include <limits.h>
void free_mcm_slt(mcm_slt_t slt) {
for (int i = 0; i < slt.len; i++) {
free(slt.m[i]);
free(slt.s[i]);
}
free(slt.m);
free(slt.s);
}
mcm_slt_t matrix_chain_order(int p[], int n) {
// int len = n+1;
// mcm_slt_t slt;
// slt.s = malloc(len*sizeof(int*));
// slt.m = malloc(len*sizeof(int*));
// for (int i = 0; i < len; i++) {
// slt.s[i] = malloc(len*sizeof(int));
// slt.m[i] = malloc(len*sizeof(int));
// }
// slt.len = n+1;
// for (int i = 1; i < len; i++) {
// slt.m[i][i] = 0;
// }
// for (int l = 1; l < len; l++) {
// for (int i = 1; i < len-l+1; i++) {
// int j = i+l-1;
// slt.m[i][j] = INT_MAX;
// for (int k =i; k < j; k++) {
// int cur = slt.m[i][k] + slt.m[k+1][j] + p[i-1]*p[k]*p[j];
// if (cur < slt.m[i][j]) {
// slt.m[i][j] = cur;
// slt.s[i][j] = k;
// }
// }
// }
// }
// return slt;
// 注意matrix_count = n-1;
mcm_slt_t slt;
slt.s = malloc(n*sizeof(int*));
slt.m = malloc(n*sizeof(int*));
for (int i = 0; i < n; i++) {
slt.s[i] = malloc(n*sizeof(int));
slt.m[i] = malloc(n*sizeof(int));
}
slt.len = n;
for (int i = 1; i < n; i++) {
slt.m[i][i] = 0;
}
for (int j = 2; j < n; j++) {
for (int i = j-1; i > 0; i--) {
int min = INT_MAX;
for (int k = i; k < j; k++) {
int tmp = slt.m[i][k] + slt.m[k+1][j] + p[i-1]*p[k]*p[j];
if (tmp < min) {
min = tmp;
}
}
slt.m[i][j] = min;
}
}
return slt;
}
void print_optimal_parens_aux(mcm_slt_t slt, int i, int j) {
if (i == j) {
printf("A%d", i);
} else {
printf("(");
print_optimal_parens_aux(slt, i, slt.s[i][j]);
print_optimal_parens_aux(slt, slt.s[i][j]+1, j);
printf(")");
}
}
void print_optimal_parens(int p[], int n, int i, int j) {
mcm_slt_t slt = matrix_chain_order(p, n);
printf("%d\n", slt.m[i][j]);
print_optimal_parens_aux(slt, i, j);
printf("\n");
}
<file_sep>/**
* merge_sort.cpp
* 归并排序
*/
#include <limits.h>
#include <stdlib.h>
#include "../utils.h"
void merge_sort(int nums[], int len) {
if (len == 1) return;
int mid = len/2;
int ll = mid;
int rl = len - mid;
int *l = malloc((ll+1)*sizeof(int));
int *r = malloc((rl+1)*sizeof(int));
merge_sort(nums, ll);
merge_sort(nums+mid, rl);
for (int i=0; i < ll; i++) {
l[i] = nums[i];
}
for (int i=0; i < rl; i++) {
r[i] = nums[mid+i];
}
// 哨兵元素,merge的时候不用检查子数组是否到达边界
l[ll] = INT_MAX;
r[rl] = INT_MAX;
int i = 0;
int il = 0;
int ir = 0;
while (i < len) {
if (l[il] < r[ir]) {
nums[i++] = l[il++];
} else {
nums[i++] = r[ir++];
}
}
free(l);
free(r);
}
<file_sep>#ifndef MATRIX_CHAIN_MULTIPLICATION
#define MATRIX_CHAIN_MULTIPLICATION
struct mcm_slt_s {
int **s;
int **m;
int len;
};
typedef struct mcm_slt_s mcm_slt_t;
void free_mcm_slt(mcm_slt_t slt);
void print_optimal_parens(int p[], int n, int i, int j);
mcm_slt_t matrix_chain_order(int p[], int n);
#endif
<file_sep>#include "optimal_binary_search_tree.h"
#include "../utils.h"
#include <CUnit/CUnit.h>
#include <CUnit/Console.h>
#include <stdio.h>
static int InitSuite() {
return 0;
}
static int EndSuite() {
return 0;
}
static void test_optimal_bst() {
double p[] = {0, 0.15, 0.10, 0.05, 0.10, 0.20};
double q[] = {0.05, 0.10, 0.05, 0.05, 0.05, 0.10};
int n = 5;
obst_slt_t slt = optimal_bst(p, q, n);
CU_ASSERT(slt.e[1][n] == 2.75);
}
CU_ErrorCode add_test_obst() {
CU_pSuite pSuite = NULL;
/***************
* 1. CU_add_suite 增加一个Suite
* 2. Suite名字 : testSuite
* 3. InitSuite EndSuite:分别是测试单元初始和释放函数,如不需要则NULL传递
****************/
pSuite = CU_add_suite("optimal_bst", InitSuite, EndSuite);
CHECK_CU_GLOBAL();
/***************
* 1. 注册当前Suite下的测试用例
* 2. pSuite:用例指针
* 3. "Test1": 测试单元名称
* 4. Test1:测试函数
***************/
CU_ADD_TEST(pSuite, test_optimal_bst);
CHECK_CU_GLOBAL();
return CUE_SUCCESS;
}
<file_sep>#ifndef TEST_LONGEST_COMMON_SUBSEQ
#define TEST_LONGEST_COMMON_SUBSEQ
#include <CUnit/CUnit.h>
CU_ErrorCode add_test_lcs();
#endif
<file_sep>#include "quick_sort.h"
#include "test_quick_sort.h"
#include "../utils.h"
#include <CUnit/CUnit.h>
#include <stdlib.h>
static void test_quick_sort() {
int len = 100;
int nums[len];
for (int i = 0; i < len; i++) {
nums[i] = rand();
}
quick_sort(nums, 0, len);
for (int i = 1; i < len; i++) {
CU_ASSERT(nums[i-1] <= nums[i]);
}
}
CU_ErrorCode add_test_quick_sort() {
CU_pSuite pSuite = CU_add_suite("quick_sort", NULL, NULL);
CHECK_CU_GLOBAL();
CU_ADD_TEST(pSuite, test_quick_sort);
CHECK_CU_GLOBAL();
return CUE_SUCCESS;
}
<file_sep>#ifndef MERGE_SORT_H
#define MERGE_SORT_H
void merge_sort(int array[], int len);
#endif
<file_sep>#include <stdio.h>
int main() {
printf("Nothing is here. Please run \"test_algrithms\"\n");
return 0;
}
<file_sep>/**
* longest_common_subsequence.c
* 最长公共子序列问题
*/
#include "longest_common_subsequence.h"
#include "../utils.h"
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
void init_lcs_slt(lcs_slt_t *slt, int m, int n) {
slt->m = m;
slt->n = n;
slt->c = malloc((m+1)*sizeof(int*));
slt->b = malloc((m+1)*sizeof(lcs_slt_dirc_t*));
for (int i = 0; i <= slt->m; i++) {
slt->c[i] = malloc((n+1)*sizeof(int));
slt->b[i] = malloc((n+1)*sizeof(lcs_slt_dirc_t));
}
for (int i = 0; i <= m; i++) {
slt->c[i][0] = 0;
}
for (int j = 0; j <= n; j++) {
slt->c[0][j] = 0;
}
}
void free_lcs_slt(lcs_slt_t *slt) {
for(int i = 0; i <= slt->m; i++) {
free(slt->c[i]);
free(slt->b[i]);
}
free(slt->c);
free(slt->b);
}
lcs_slt_t lcs_length(const char *strx, const char *stry) {
int m = strlen(strx);
int n = strlen(stry);
lcs_slt_t slt = {NULL, NULL, 0, 0};
init_lcs_slt(&slt, m, n);
int **c = slt.c;
lcs_slt_dirc_t **b = slt.b;
for (int i = 1; i <= m; i++) {
for (int j = 1; j <= n; j++) {
if (strx[i-1] == stry[j-1]) {
c[i][j] = c[i-1][j-1] + 1;
b[i][j] = UP_LEFT;
} else if (c[i-1][j] >= c[i][j-1]) {
c[i][j] = c[i-1][j];
b[i][j] = UP;
} else {
c[i][j] = c[i][j-1];
b[i][j] = LEFT;
}
}
}
return slt;
}
void get_lcs_aux(const char *strx, int i, int j, lcs_slt_dirc_t **b, char *buf, int *idx);
void get_lcs(const char *strx, const char *stry, int i, int j, char *buf) {
lcs_slt_t slt = lcs_length(strx, stry);
lcs_slt_dirc_t **b = slt.b;
int idx = 0;
get_lcs_aux(strx, i, j, b, buf, &idx);
buf[idx] = '\0';
for (int i = 0; i <= (idx-1)/2; i++) {
char tmp = buf[i];
buf[i] = buf[idx-i-1];
buf[idx-i-1] = tmp;
}
free_lcs_slt(&slt);
}
void get_lcs_aux(const char *strx, int i, int j, lcs_slt_dirc_t **b, char *buf, int *idx) {
if (i == 0 || j == 0) {
return;
}
if (b[i][j] == UP_LEFT) {
buf[*idx] = strx[i-1];
(*idx)++;
get_lcs_aux(strx, i-1, j-1, b, buf, idx);
printf("%c", strx[i-1]);
} else if (b[i][j] == UP) {
get_lcs_aux(strx, i-1, j, b, buf, idx);
} else {
get_lcs_aux(strx, i, j-1, b, buf, idx);
}
}
<file_sep># 第九章 中位数和顺序统计量
<file_sep>#include "chap02/test_insertion_sort.h"
#include "chap02/test_merge_sort.h"
#include "chap04/test_maximum_subarray.h"
#include "chap06/test_heap_sort.h"
#include "chap07/test_quick_sort.h"
#include "chap08/test_counting_sort.h"
#include "chap08/test_radix_sort.h"
#include "chap09/test_order_statistic.h"
#include "chap15/test_cut_rod.h"
#include "chap15/test_optimal_binary_search_tree.h"
#include "chap15/test_longest_common_subsequence.h"
#include "chap15/test_matrix_chain_multiplication.h"
#include "utils.h"
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include <CUnit/Console.h>
int main() {
srand(time(NULL));
//CU_initialize_registry 注册函数注册一个用例返回CUE_系列异常值
int cu_res;
cu_res = CU_initialize_registry();
CHECK_CU_RETURN(cu_res);
//CU_get_registry 返回注册到用例指针
//assert(NULL != CU_get_registry());
//检测是否在执行
//assert(!CU_is_test_running());
//注册各个suite
CHECK_CU_RETURN(add_test_insertion_sort());
CHECK_CU_RETURN(add_test_merge_sort());
CHECK_CU_RETURN(add_test_maximum_subarray());
CHECK_CU_RETURN(add_test_heap_sort());
CHECK_CU_RETURN(add_test_quick_sort());
CHECK_CU_RETURN(add_test_counting_sort());
CHECK_CU_RETURN(add_test_radix_sort());
CHECK_CU_RETURN(add_test_order_statistic());
CHECK_CU_RETURN(add_test_cut_rod());
CHECK_CU_RETURN(add_test_obst());
CHECK_CU_RETURN(add_test_lcs());
CHECK_CU_RETURN(add_test_matrix_chain_multi());
//使用console控制交互界面的函数入口
CU_console_run_tests();
/***使用自动产生XML文件的模式********
CU_set_output_filename("TestMax");
CU_list_tests_to_file();
CU_automated_run_tests();
***********************************/
//调用完毕清除注册信息
CU_cleanup_registry();
return 0;
}
<file_sep>#include "test_maximum_subarray.h"
#include "maximum_subarray.h"
#include "../utils.h"
#include <CUnit/CUnit.h>
#include <stdlib.h>
static void test_maximum_subarray() {
int nums[] = {13,-3,-25,20,-3,-16,-23,18,20,-7,12,-5,-22,15,-4,7};
MSA_slt_t slt = maximum_subarray(nums, 0, sizeof(nums)/sizeof(int), MSA_PLC_DAQ);
CU_ASSERT_EQUAL(slt.left, 7);
CU_ASSERT_EQUAL(slt.right, 10);
CU_ASSERT_EQUAL(slt.sum, 43);
slt = maximum_subarray(nums, 0, sizeof(nums)/sizeof(int), MSA_PLC_LINEAR);
CU_ASSERT_EQUAL(slt.left, 7);
CU_ASSERT_EQUAL(slt.right, 10);
CU_ASSERT_EQUAL(slt.sum, 43);
int nums2[] = {1, 2, -3, 1};
slt = maximum_subarray(nums2, 0, sizeof(nums2)/sizeof(int), MSA_PLC_LINEAR);
CU_ASSERT_EQUAL(slt.left, 0);
CU_ASSERT_EQUAL(slt.right, 1);
CU_ASSERT_EQUAL(slt.sum, 3);
}
CU_ErrorCode add_test_maximum_subarray() {
CU_pSuite pSuite = CU_add_suite("maximum_subarray", NULL, NULL);
CHECK_CU_GLOBAL();
CU_ADD_TEST(pSuite, test_maximum_subarray);
CHECK_CU_GLOBAL();
return CUE_SUCCESS;
}
<file_sep>#include "matrix_chain_multiplication.h"
#include "test_matrix_chain_multiplication.h"
#include "../utils.h"
#include <CUnit/CUnit.h>
#include <CUnit/Console.h>
#include <stdio.h>
static int InitSuite() {
return 0;
}
static int EndSuite() {
return 0;
}
static void test_matrix_chain_multi() {
int p[] = {30, 35, 15, 5, 10, 20, 25};
mcm_slt_t slt = matrix_chain_order(p, 7);
CU_ASSERT_EQUAL(slt.m[1][6] , 15125);
free_mcm_slt(slt);
}
CU_ErrorCode add_test_matrix_chain_multi() {
CU_pSuite pSuite = NULL;
/***************
* 1. CU_add_suite 增加一个Suite
* 2. Suite名字 : testSuite
* 3. InitSuite EndSuite:分别是测试单元初始和释放函数,如不需要则NULL传递
****************/
pSuite = CU_add_suite("matrix_chain_multi", InitSuite, EndSuite);
CHECK_CU_GLOBAL();
/***************
* 1. 注册当前Suite下的测试用例
* 2. pSuite:用例指针
* 3. "Test1": 测试单元名称
* 4. Test1:测试函数
***************/
CU_ADD_TEST(pSuite, test_matrix_chain_multi);
CHECK_CU_GLOBAL();
return CUE_SUCCESS;
}
<file_sep>#ifndef LONGEST_COMMON_SUBSEQUENCE_H
#define LONGEST_COMMON_SUBSEQUENCE_H
enum lcs_slt_dirc_e {
UP,
LEFT,
UP_LEFT
};
typedef enum lcs_slt_dirc_e lcs_slt_dirc_t;
struct lcs_slt_s {
int **c;
lcs_slt_dirc_t **b;
int m;
int n;
};
typedef struct lcs_slt_s lcs_slt_t;
void init_lcs_slt(lcs_slt_t *slt, int m, int n);
void free_lcs_slt(lcs_slt_t *slt);
lcs_slt_t lcs_length(const char *strx, const char *stry);
void get_lcs(const char *strx, const char *stry, int i, int j, char *buf);
#endif
<file_sep># 第六章 堆排序
<file_sep>#ifndef TEST_QUICK_SORT_H
#define TEST_QUICK_SORT_H
#include <CUnit/CUnit.h>
CU_ErrorCode add_test_quick_sort();
#endif
<file_sep>#ifndef QUICK_SORT_H
#define QUICK_SORT_H
int partition(int nums[], int begin, int end);
int randomized_partition(int nums[], int begin, int end);
void randomized_quick_sort(int nums[], int begin, int end);
void quick_sort(int nums[], int begin, int end);
#endif
<file_sep>#include "cut_rod.h"
#include "test_cut_rod.h"
#include "../utils.h"
#include <CUnit/CUnit.h>
#include <CUnit/Console.h>
#include <stdio.h>
static int InitSuite() {
return 0;
}
static int EndSuite() {
return 0;
}
static void test_cut_rod() {
int p[] = {0, 1, 5, 8, 9, 10, 17, 17, 20, 24, 30};
CU_ASSERT(bottom_up_cut_rod(p, 10) == 30);
CU_ASSERT(memoized_cut_rod(p, 10) == 30);
}
CU_ErrorCode add_test_cut_rod() {
CU_pSuite pSuite = NULL;
/***************
* 1. CU_add_suite 增加一个Suite
* 2. Suite名字 : testSuite
* 3. InitSuite EndSuite:分别是测试单元初始和释放函数,如不需要则NULL传递
****************/
pSuite = CU_add_suite("cut_rod", InitSuite, EndSuite);
CHECK_CU_GLOBAL();
/***************
* 1. 注册当前Suite下的测试用例
* 2. pSuite:用例指针
* 3. "Test1": 测试单元名称
* 4. Test1:测试函数
***************/
CU_ADD_TEST(pSuite, test_cut_rod);
CHECK_CU_GLOBAL();
return CUE_SUCCESS;
}
<file_sep>#ifndef TEST_MATRIX_CHAIN_MULTI
#define TEST_MATRIX_CHAIN_MULTI
#include <CUnit/CUnit.h>
CU_ErrorCode add_test_matrix_chain_multi();
#endif
<file_sep># 第八章 线性时间排序
<file_sep>#ifndef TEST_OPTIMAL_BINARY_SEARCH_TREE
#define TEST_OPTIMAL_BINARY_SEARCH_TREE
#include <CUnit/CUnit.h>
CU_ErrorCode add_test_obst();
#endif
<file_sep>/**
* optimal_binary_search_tree.c
* 最优二叉搜索树
*/
#include "optimal_binary_search_tree.h"
#include "../utils.h"
#include <stdlib.h>
#include <limits.h>
#include <float.h>
#include <stdio.h>
void init_obst_slt(obst_slt_t *slt, int n) {
slt->n = n;
slt->e = malloc((n+2)*sizeof(double*));
slt->r = malloc((n+2)*sizeof(int*));
for (int i = 1; i < n+2; i++) {
slt->e[i] = malloc((n+1)*sizeof(double));
slt->r[i] = malloc((n+1)*sizeof(int));
}
}
void free_obst_slt(obst_slt_t *slt) {
int n = slt->n;
for (int i= 1; i < n+2; i++) {
free(slt->e[i]);
free(slt->r[i]);
}
free(slt->e);
free(slt->r);
}
obst_slt_t optimal_bst(double p[], double q[], int n) {
obst_slt_t slt;
init_obst_slt(&slt, n);
double **e = slt.e;
int **root = slt.r;
double w[n+2][n+1];
for (int i = 1; i < n+2; i++) {
e[i][i-1] = q[i-1];
w[i][i-1] = q[i-1];
}
for (int l = 1; l <= n; l++) {
for (int i = 1; i <= n-l+1; i++) {
int j = i+l-1;
e[i][j] = DBL_MAX;
w[i][j] = w[i][j-1] + p[j] + q[j];
for (int r = i; r <= j; r++) {
double cur = e[i][r-1] + e[r+1][j] + w[i][j];
if (cur < e[i][j]) {
e[i][j] = cur;
root[i][j] = r;
}
}
}
}
return slt;
}
void print_obst(double p[], double q[], int i, int j) {
}
<file_sep>#include "longest_common_subsequence.h"
#include "test_longest_common_subsequence.h"
#include "../utils.h"
#include <CUnit/CUnit.h>
#include <CUnit/Console.h>
#include <stdio.h>
#include <stdlib.h>
static int InitSuite() {
return 0;
}
static int EndSuite() {
return 0;
}
static void test_lcs() {
const char *str1 = "ABCBDAB";
const char *str2 = "BDCABA";
const char *lcs = "BCBA";
char buf[512];
get_lcs(str1, str2, strlen(str1), strlen(str2), buf);
CU_ASSERT(strlen(buf) == strlen(lcs));
CU_ASSERT(strncmp(buf, lcs, 4) == 0);
}
CU_ErrorCode add_test_lcs() {
CU_pSuite pSuite = NULL;
/***************
* 1. CU_add_suite 增加一个Suite
* 2. Suite名字 : testSuite
* 3. InitSuite EndSuite:分别是测试单元初始和释放函数,如不需要则NULL传递
****************/
pSuite = CU_add_suite("lcs", InitSuite, EndSuite);
CHECK_CU_GLOBAL();
/***************
* 1. 注册当前Suite下的测试用例
* 2. pSuite:用例指针
* 3. "Test1": 测试单元名称
* 4. Test1:测试函数
***************/
CU_ADD_TEST(pSuite, test_lcs);
CHECK_CU_GLOBAL();
return CUE_SUCCESS;
}
<file_sep>#ifndef RADIX_SORT
#define RADIX_SORT
#include <limits.h>
#include <stdio.h>
#include <math.h>
#define RADIX_SORT_NUM_MAX_WIDTH ((int)floor((log(abs(INT_MAX))/(log(10)))) + 1)
void radix_sort(int nums[], int len);
#endif
<file_sep># 项目信息
project(algorithms)
cmake_minimum_required(VERSION 2.6)
# 增加cunit头文件所在目录
include_directories(/usr/local/include)
# 增加cunit链接库所在目录
link_directories(/usr/local/lib)
# 用于构建静态链接库的源码的所在目录
aux_source_directory(./chap02 DIR_SRCS)
aux_source_directory(./chap04 DIR_SRCS)
aux_source_directory(./chap06 DIR_SRCS)
aux_source_directory(./chap07 DIR_SRCS)
aux_source_directory(./chap08 DIR_SRCS)
aux_source_directory(./chap09 DIR_SRCS)
aux_source_directory(./chap15 DIR_SRCS)
add_library(algo ${DIR_SRCS})
# 定义可执行目标
add_executable(algorithms main.c utils.c)
add_executable(test_algorithms test_main.c utils.c)
# 可执行目标需要链接的库
target_link_libraries(algorithms algo)
target_link_libraries(test_algorithms algo cunit)
<file_sep>#ifndef TEST_CUT_ROD
#define TEST_CUT_ROD
#include <CUnit/CUnit.h>
CU_ErrorCode add_test_cut_rod();
#endif
<file_sep>#ifndef TEST_RADIX_SORT_H
#define TEST_RADIX_SORT_H
#include <CUnit/CUnit.h>
CU_ErrorCode add_test_radix_sort();
#endif
<file_sep>#include "insertion_sort.h"
#include "test_insertion_sort.h"
#include "../utils.h"
#include <CUnit/CUnit.h>
#include <stdlib.h>
static void test_insertion_sort() {
int len = 100;
int nums[len];
for (int i = 0; i < len; i++) {
nums[i] = rand();
}
insertion_sort(nums, len);
for (int i = 1; i < len; i++) {
CU_ASSERT(nums[i-1] <= nums[i]);
}
}
CU_ErrorCode add_test_insertion_sort() {
CU_pSuite pSuite = CU_add_suite("insertion_sort", NULL, NULL);
CHECK_CU_GLOBAL();
CU_ADD_TEST(pSuite, test_insertion_sort);
CHECK_CU_GLOBAL();
return CUE_SUCCESS;
}
<file_sep>#ifndef TEST_INSERTION_SORT_H
#define TEST_INSERTION_SORT_H
#include <CUnit/CUnit.h>
CU_ErrorCode add_test_insertion_sort();
#endif
<file_sep>#include "counting_sort.h"
#include "test_counting_sort.h"
#include "../utils.h"
#include <CUnit/CUnit.h>
#include <stdlib.h>
static void test_counting_sort() {
int len = 100;
int nums[len];
for (int i = 0; i < len; i++) {
nums[i] = rand() % COUNTING_SORT_NUM_MAX;
}
counting_sort(nums, len);
for (int i = 1; i < len; i++) {
CU_ASSERT(nums[i-1] <= nums[i]);
}
}
CU_ErrorCode add_test_counting_sort() {
CU_pSuite pSuite = CU_add_suite("counting_sort", NULL, NULL);
CHECK_CU_GLOBAL();
CU_ADD_TEST(pSuite, test_counting_sort);
CHECK_CU_GLOBAL();
return CUE_SUCCESS;
}
<file_sep># 第七章 快速排序
<file_sep>/**
* cut_rod.h
* 钢条切割问题
*/
#include "cut_rod.h"
#include "../utils.h"
#include <stdlib.h>
#include <stdio.h>
#include <limits.h>
int memoized_cut_rod_aux(int price_table[], int n, int max_table[]);
struct cr_slt_s {
int *max_table;
int *cut_table;
int len;
};
typedef struct cr_slt_s cr_slt_t;
void free_cr_slt(cr_slt_t slt);
cr_slt_t extended_bottom_up_cut_rod(int price_table[], int n);
/**
* 输入价格表和钢条长度,返回最佳切割方案的价格
* 复杂度 O(2^n)
*/
int cut_rod(int price_table[], int n) {
if (n == 0) {
return 0;
}
int max = INT_MIN;
for (int i = 0; i < n; i++) {
int p = price_table[i] + cut_rod(price_table, n-i);
if (p > max) {
max = p;
}
}
return max;
}
int memoized_cut_rod(int price_table[], int n) {
int *max_table = malloc((n+1)*sizeof(int));
max_table[0] = 0;
for (int i = 0; i <= n; i++) {
max_table[i] = INT_MIN;
}
int max = memoized_cut_rod_aux(price_table, n, max_table);
free(max_table);
return max;
}
int memoized_cut_rod_aux(int price_table[], int n, int max_table[]) {
if (max_table[n] >= 0) {
return max_table[n];
}
int max = INT_MIN;;
if (n == 0) {
max = 0;
}
for (int i = 1; i <= n; i++) {
int p = price_table[i] + memoized_cut_rod_aux(price_table, n-i, max_table);
if (p > max) {
max = p;
}
}
max_table[n] = max;
return max;
}
int bottom_up_cut_rod(int price_table[], int n) {
int *max_table = malloc((n+1)*sizeof(int));
max_table[0] = 0;
for (int i = 1; i <= n; i++) {
int max = INT_MIN;
for (int j = 1; j <= i; j++) {
int cur = price_table[j] + max_table[i-j];
if (cur > max) {
max = cur;
}
}
max_table[i] = max;
}
int result = max_table[n];
free(max_table);
return result;
}
void free_cr_slt(cr_slt_t slt) {
free(slt.max_table);
free(slt.cut_table);
}
cr_slt_t extended_bottom_up_cut_rod(int price_table[], int n) {
cr_slt_t slt;
slt.len = n+1;
slt.max_table = malloc((n+1)*sizeof(int));
slt.cut_table= malloc((n+1)*sizeof(int));
slt.max_table[0] = 0;
slt.cut_table[0] = 0;
for (int i = 1; i <= n; i++) {
int max = INT_MIN;
for (int j = 1; j <= i; j++) {
int cur = slt.max_table[j] + slt.max_table[i-j];
if (cur > max) {
max = cur;
slt.cut_table[i] = j;
}
}
slt.max_table[i] = max;
}
return slt;
}
void print_cut_rod_solution(int price_table[], int n) {
cr_slt_t slt = extended_bottom_up_cut_rod(price_table, n);
printf("%d\n", slt.max_table[n]);
while (n > 0) {
printf("%d ", slt.cut_table[n]);
n -= slt.cut_table[n];
}
printf("\n");
free_cr_slt(slt);
}
<file_sep>/**
* radix_sort.c
* 基数排序
*/
#include "radix_sort.h"
#include "counting_sort.h"
#include "../utils.h"
#include <stdlib.h>
void radix_sort(int nums[], int len) {
int counts[COUNTING_SORT_NUM_MAX];
int sorted[len];
// 从最低有效位开始为关键字进行计数排序(只要是稳定排序就行)
int max_len = get_dgt_cnt(RAND_MAX);
for (int j = 0; j < max_len; j++) {
for (int i = 0; i < COUNTING_SORT_NUM_MAX; i++) {
counts[i] = 0;
}
for (int i = 0; i < len; i++) {
counts[get_ith_dgt(nums[i], j)]++;
}
for (int i = 1; i < COUNTING_SORT_NUM_MAX; i++) {
counts[i] += counts[i-1];
}
for (int i = len-1; i >= 0; i--) {
sorted[counts[get_ith_dgt(nums[i], j)]-1] = nums[i];
counts[get_ith_dgt(nums[i], j)]--;
}
for (int i = 0; i < len; i++) {
nums[i] = sorted[i];
}
}
}
<file_sep>/**
* quick_sort.h
* 快速排序
*/
#include "quick_sort.h"
#include "../utils.h"
#include <stdlib.h>
int partition(int nums[], int begin, int end) {
int small_top = begin;
int key = nums[end-1];
// 分组过程可以看作是把比key小的数压栈
for (int i = begin; i < end; i++) {
if (nums[i] < key) {
swap(nums, small_top, i);
small_top++;
}
}
swap(nums, small_top, end-1);
return small_top;
}
void quick_sort(int nums[], int begin, int end) {
if (begin < end) {
int p = partition(nums, begin, end);
quick_sort(nums, begin, p);
quick_sort(nums, p+1, end);
}
}
int randomized_partition(int nums[], int begin, int end) {
int r = rand() % (end-begin) + begin;
swap(nums, end-1, r);
return partition(nums, begin, end);
}
void randomized_quick_sort(int nums[], int begin, int end) {
if (begin < end) {
int p = randomized_partition(nums, begin, end);
randomized_quick_sort(nums, begin, p);
randomized_quick_sort(nums, p+1, end);
}
}
<file_sep>/**
* utils.c
* 工具函数
*/
#include "utils.h"
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
void print_array(int nums[], int len) {
printf("[");
for (int i = 0; i < len; i++) {
if (i < len-1) {
printf("%d, ", nums[i]);
} else {
printf("%d", nums[i]);
}
if (((i + 1) % 5 == 0) && (i != len - 1)) {
printf("\n");
}
}
printf("]\n");
}
void print_array2(int **nums, int rows, int cols) {
printf("[\n");
for (int i = 0; i < rows; i++) {
printf("[");
for (int j = 0; j < cols; j++) {
if (j < cols-1) {
printf("%d, ", nums[i][j]);
} else {
printf("%d", nums[i][j]);
}
if (j != 0 && j%10 == 0) printf("\n");
}
printf("]\n");
}
printf("]\n");
}
void swap(int nums[], int a, int b) {
if (a == b) return;
int tmp = nums[a];
nums[a] = nums[b];
nums[b] = tmp;
}
int maxnum(int a, int b) {
if (a > b) {
return a;
} else {
return b;
}
}
int minnum(int a, int b) {
if (a < b) {
return a;
} else {
return b;
}
}
int get_ith_dgt(int num, int i) {
return num % (int)pow(10, i+1) / (int)pow(10, i);
}
int get_dgt_cnt(int num) {
int i = 1;
while (num > pow(10, i)-1) {
i++;
}
return i;
}
void shuffle(int nums[], int len) {
// 最后一个位置自动确定了
for (int i = len-1; i > 0; i--) {
swap(nums, i, rand()%(i+1));
}
}
<file_sep>#ifndef CUT_ROD_H
#define CUT_ROD_H
int cut_rod(int price_table[], int n);
int memoized_cut_rod(int price_table[], int n);
int bottom_up_cut_rod(int price_table[], int n);
void print_cut_rod_solution(int price_table[], int n);
#endif
<file_sep>#ifndef TEST_MAXIMUM_SUBARRAY_H
#define TEST_MAXIMUM_SUBARRAY_H
#include <CUnit/CUnit.h>
CU_ErrorCode add_test_maximum_subarray();
#endif
<file_sep>#ifndef UTILS_H
#define UTILS_H
#define CHECK_CU_GLOBAL() \
do { \
CU_ErrorCode c = CU_get_error(); \
if (CUE_SUCCESS != c) { \
printf("%s", CU_get_error_msg()); \
return c; \
} \
} \
while (0)
#define CHECK_CU_RETURN(res) \
do { \
if (CUE_SUCCESS != (res)) { \
printf("%s", CU_get_error_msg()); \
return (res); \
} \
} \
while (0)
/**
* 输出数组
*/
void print_array(int nums[], int len);
void print_array2(int **nums, int rows, int cols);
void swap(int nums[], int a, int b);
int maxnum(int a, int b);
int minnum(int a, int b);
int get_ith_dgt(int num, int i);
int get_dgt_cnt(int num);
void shuffle(int nums[], int len);
#endif
<file_sep>#ifndef OPTIMAL_BINARY_SEARCH_TREE
#define OPTIMAL_BINARY_SEARCH_TREE
struct obst_slt_s {
double **e;
int **r;
int n;
};
typedef struct obst_slt_s obst_slt_t;
void init_obst_slt(obst_slt_t *slt, int n);
void free_obst_slt(obst_slt_t *slt);
obst_slt_t optimal_bst(double p[], double q[], int n);
#endif
<file_sep>#ifndef INSERTION_SORT_H
#define INSERTION_SORT_H
void insertion_sort(int array[], int len);
#endif
<file_sep>/**
* maximum_subarray.cpp
* 最大子数组
*/
#include <limits.h>
#include <stdio.h>
#include "maximum_subarray.h"
#include "../utils.h"
static MSA_slt_t maximum_subarray_daq(int nums[], int begin, int end);
static MSA_slt_t maximum_subarray_linear(int nums[], int begin, int end);
/**
* 解法1:分治法
* 分:均分成两个子数组。最大子数组只能是以下三种位置:
* 1. 完全在左子数组。此时它也是左子数组的最大子数组
* 2. 完全在右子数组。此时它也是左子数组的最大子数组
* 3. 一部分在左,一部分在右
* 此思路不是最优算法,有线性时间算法。
*/
MSA_slt_t maximum_subarray_daq(int nums[], int begin, int end) {
int len = end - begin;
MSA_slt_t sa;
if (len == 1) {
sa.left = sa.right = begin;
sa.sum = nums[begin];
return sa;
}
int mid = begin + len/2;
// 左右子数组的最大子数组
MSA_slt_t lsa = maximum_subarray_daq(nums, begin, mid);
MSA_slt_t rsa = maximum_subarray_daq(nums, mid, end);
// 跨越mid的最大子数组
int maxsum = INT_MIN;
int suml = 0;
int sum = 0;
int maxil = mid-1;
for (int i = mid-1; i >= begin; i--) {
suml += nums[i];
if (suml > maxsum) {
maxsum = suml;
maxil = i;
}
}
sum += maxsum;
maxsum = INT_MIN;
int sumr = 0;
int maxir = mid;
for (int i = mid; i < end; i++) {
sumr += nums[i];
if (sumr > maxsum) {
maxsum = sumr;
maxir = i;
}
}
sum += maxsum;
// 选择三种分类中的最优解
if (lsa.sum > rsa.sum) {
sa.left = lsa.left;
sa.right = lsa.right;
sa.sum = lsa.sum;
} else {
sa.left = rsa.left;
sa.right = rsa.right;
sa.sum = rsa.sum;
}
if (sum > sa.sum) {
sa.sum = sum;
sa.left = maxil;
sa.right = maxir;
}
// printf("%d %d %d\n", suml, sumr, sum);
return sa;
}
MSA_slt_t maximum_subarray_linear(int nums[], int begin, int end) {
MSA_slt_t slt = {begin, begin, nums[begin]};
int cur = nums[begin];
int left = begin;
for (int i = begin+1; i < end; i++) {
if (cur <= 0) {
cur = nums[i];
left = i;
} else {
cur += nums[i];
}
if (cur > slt.sum) {
slt.left = left;
slt.right = i;
slt.sum = cur;
}
}
return slt;
}
MSA_slt_t maximum_subarray(int nums[], int begin, int end, MSA_policy_t p) {
if (p == MSA_PLC_DAQ) {
return maximum_subarray_daq(nums, begin, end);
} else {
return maximum_subarray_linear(nums, begin, end);
}
}
<file_sep>/**
* 插入排序
*/
#include "insertion_sort.h"
#include "../utils.h"
/**
* 插入排序
*/
void insertion_sort(int nums[], int len) {
// 要插入第i个数,从i-i个数开始往前扫描
// 循环不变式: nums[0]~nums[i-1]是排好序的
for (int i = 1; i < len; i++) {
int j = i-1;
int key = nums[i];
// 只要被扫描的数大于等于要插入的数就把被扫描的数往后挪一个位置,
// 然后接着扫描前一个数,知道没有剩余没扫描的数字或者要被扫描的数字小于等于key
// 循环不变式: nums[0]~nums[j],nums[j+2]~nums[i]升序且,且nums[j+2]~nums[i]都是比key大的
while (j >= 0 && nums[j] > key) {
nums[j+1] = nums[j];
j--;
}
// 扫描结束,根据循环不变式要插入的数的位置就是j+1
nums[j+1] = key;
}
}
<file_sep>/**
* heap_sort.c
* 堆排序
*/
#include "heap_sort.h"
#include <limits.h>
#include "../utils.h"
/**
* 维护堆的性质
* 前置条件:i的左右子树都已经是最大堆
* 后置条件:i为根的树也是最大堆
*/
void max_heapify(int nums[], int heap_size, int i) {
int l = 2*i+1;
int r = 2*i+2;
// 在i, l, r三个位置中找最大值上,如果是l或者r就和i交换位置
int max;
if (l < heap_size && nums[l] > nums[i]) {
max = l;
} else {
max = i;
}
if (r < heap_size && nums[r] > nums[max]) {
max = r;
}
if (max != i) {
swap(nums, i, max);
max_heapify(nums, heap_size, max);
}
}
/**
* 建堆
* 注意: 因为叶子节点可以看做是已经建好的堆,可以直接从非叶子节点开始自底向上建堆
*/
void build_max_heap(int nums[], int len) {
int heap_size = len;
for (int i = len/2-1; i >=0; i--) {
max_heapify(nums, heap_size, i);
}
}
/**
* 堆排序
*/
void heap_sort(int nums[], int len) {
build_max_heap(nums, len);
int heap_size = len;
for (int i = len-1; i >= 0; i--) {
int tmp = nums[0];
nums[0] = nums[i];
nums[i] = tmp;
heap_size--;
max_heapify(nums, heap_size, 0);
}
}
int heap_maximum(int nums[]) {
return nums[0];
}
int heap_extract_max(int nums[], int heap_size) {
int max;
max = nums[0];
nums[0] = nums[heap_size-1];
heap_size--;
max_heapify(nums, heap_size, 0);
return max;
}
void heap_increse_key(int nums[], int i, int key) {
if (key < nums[i]) {
// TODO: error
}
nums[i] = key;
while (i > 0 && nums[(i-1)/2] < nums[i]) {
int tmp = nums[i];
nums[i] = nums[(i-1)/2];
nums[(i-1/2)] = tmp;
i = (i-1)/2;
}
}
void max_heap_insert(int nums[], int *heap_size, int key) {
(*heap_size)++;
nums[*heap_size] = INT_MIN;
heap_increse_key(nums, *heap_size-1, key);
}
<file_sep># algorithms
《算法导论》(<NAME>, 第三版)中伪代码的c语言代码实现
## 运行测试
```shell
# 生成构建文件
cmake .
# 构建
make
# 运行测试demo
./test_algorithms
```
<file_sep>#include "order_statistic.h"
#include "test_order_statistic.h"
#include "../utils.h"
#include <CUnit/CUnit.h>
#include <stdlib.h>
static void test_order_statistic() {
int len = 100;
int nums[len];
for (int i = 0; i < len; i++) {
nums[i] = i;
}
shuffle(nums, len);
CU_ASSERT(maximum(nums, len) == len-1);
CU_ASSERT(minimum(nums, len) == 0);
for (int i = 0; i < 10; i++) {
// CU_ASSERT(randomized_select(nums, 0, len, i) == i);
int rst = linear_select(nums, 0, len, i);
// printf("expect: %d, actually: %d\n", i, rst);
CU_ASSERT_EQUAL(rst, i);
}
}
CU_ErrorCode add_test_order_statistic() {
CU_pSuite pSuite = CU_add_suite("order_statistic", NULL, NULL);
CHECK_CU_GLOBAL();
CU_ADD_TEST(pSuite, test_order_statistic);
CHECK_CU_GLOBAL();
return CUE_SUCCESS;
}
<file_sep>/**
* order_statistic.c
* 顺序统计量
*/
#include "order_statistic.h"
#include "../chap02/insertion_sort.h"
#include "../chap07/quick_sort.h"
#include "../utils.h"
#include <stdlib.h>
#include <stdio.h>
int minimum(int nums[], int len) {
int min = nums[0];
for (int i = 1; i < len; i++) {
if (nums[i] < min) {
min = nums[i];
}
}
return min;
}
int maximum(int nums[], int len) {
int max = nums[0];
for (int i = 1; i < len; i++) {
if (nums[i] > max) {
max = nums[i];
}
}
return max;
}
/**
* 返回第i小的元素。最小的是第0小
* 平均O(n)
*/
int randomized_select(int nums[], int begin, int end, int i) {
int len = end - begin;
if (len == 1) {
return nums[begin];
}
int q = randomized_partition(nums, begin, end);
int left_len = q - begin;
if (i == left_len) {
return nums[q];
} else if (i < left_len) {
return randomized_select(nums, begin, q, i);
} else {
return randomized_select(nums, q+1, end, i-left_len-1);
}
}
/**
* 将数组5个一组进行分组,找出每组的中位数,返回这些中位数的中位数
*/
int median_of_medians(int nums[], int begin, int end) {
int len = end - begin;
int parts_count = len / 5;
int last_part_len = len % 5;
int part[5] = {0};
if (last_part_len > 0) {
parts_count++;
} else {
last_part_len = 5;
}
// int *medians = malloc(parts_count * sizeof(int));
int medians[parts_count];
for (int i = 0; i < parts_count; i++) {
int part_len = 5;
if (i == parts_count-1) {
part_len = last_part_len;
}
for (int j = 0; j < part_len; j++) {
part[j] = nums[begin+i*5+j];
}
// 这里可以直接使用插入排序确定每组的中位数
// 树上就是这样做而且证明这样整个算法还是最坏线性
// 我的理解是因为每组的大小是常数
insertion_sort(part, part_len);
medians[i] = part[(part_len-1)/2];
}
int mm = linear_select(medians, 0, parts_count, (parts_count-1)/2);
// free(medians);
return mm;
}
/**
* 以pivoit作为主元对数组进行partition。返回partition之后主元后的位置
*/
int specified_partition(int nums[], int begin, int end, int pivot) {
int pivot_index = begin;
for (; pivot_index < end; pivot_index++) {
if (nums[pivot_index] == pivot) {
break;
}
}
swap(nums, end-1, pivot_index);
return partition(nums, begin, end);
}
/**
* 最差O(n), 假设数组的数都是互异的
*/
int linear_select(int nums[], int begin, int end, int i) {
int len = end - begin;
if (len == 1) {
return nums[begin];
}
// median_of_medians
int mm = median_of_medians(nums, begin, end);
int q = specified_partition(nums, begin, end, mm);
int left_len = q - begin;
if (i == left_len) {
return nums[q];
} else if (i < left_len) {
return linear_select(nums, begin, q, i);
} else {
return linear_select(nums, q+1, end, i-left_len-1);
}
}
<file_sep>#ifndef TEST_MERGE_SORT_H
#define TEST_MERGE_SORT_H
#include <CUnit/CUnit.h>
CU_ErrorCode add_test_merge_sort();
#endif
<file_sep>#ifndef TEST_HEAP_SORT_H
#define TEST_HEAP_SORT_H
#include <CUnit/CUnit.h>
CU_ErrorCode add_test_heap_sort();
#endif
<file_sep>#ifndef ORDER_STATISTIC_H
#define ORDER_STATISTIC_H
int minimum(int nums[], int len);
int maximum(int nums[], int len);
int linear_select(int nums[], int begin, int end, int r);
int randomized_select(int nums[], int begin, int end, int r);
#endif
<file_sep>#include "heap_sort.h"
#include "test_heap_sort.h"
#include "../utils.h"
#include <CUnit/CUnit.h>
#include <stdlib.h>
static void test_heap_sort() {
int len = 100;
int nums[len];
for (int i = 0; i < len; i++) {
nums[i] = rand();
}
heap_sort(nums, len);
for (int i = 1; i < len; i++) {
CU_ASSERT(nums[i-1] <= nums[i]);
}
}
CU_ErrorCode add_test_heap_sort() {
CU_pSuite pSuite = CU_add_suite("heap_sort", NULL, NULL);
CHECK_CU_GLOBAL();
CU_ADD_TEST(pSuite, test_heap_sort);
CHECK_CU_GLOBAL();
return CUE_SUCCESS;
}
<file_sep>#ifndef TEST_ORDER_STATISTIC_H
#define TEST_ORDER_STATISTIC_H
#include <CUnit/CUnit.h>
CU_ErrorCode add_test_order_statistic();
#endif
<file_sep>#ifndef TEST_COUNTING_SORT_H
#define TEST_COUNTING_SORT_H
#include <CUnit/CUnit.h>
CU_ErrorCode add_test_counting_sort();
#endif
<file_sep>#include "merge_sort.h"
#include "test_merge_sort.h"
#include "../utils.h"
#include <CUnit/CUnit.h>
#include <stdlib.h>
static void test_merge_sort() {
int len = 100;
int nums[len];
for (int i = 0; i < len; i++) {
nums[i] = rand();
}
merge_sort(nums, len);
for (int i = 1; i < len; i++) {
CU_ASSERT(nums[i-1] <= nums[i]);
}
}
CU_ErrorCode add_test_merge_sort() {
CU_pSuite pSuite = CU_add_suite("merge_sort", NULL, NULL);
CHECK_CU_GLOBAL();
CU_ADD_TEST(pSuite, test_merge_sort);
CHECK_CU_GLOBAL();
return CUE_SUCCESS;
}
<file_sep># 第15章 动态规划
<file_sep>/**
* couting_sort.h
* 计数排序
* 注意:
* 基数排序原本是非in-place,为了实现in-place空间复杂度从O(k)z增加到O(n+k)
*/
#include "counting_sort.h"
#include "../utils.h"
#include <stdlib.h>
void counting_sort(int nums[], int len) {
int *counts = malloc(COUNTING_SORT_NUM_MAX * sizeof(int));
int *sorted = malloc(len * sizeof(int));
for (int i = 0; i < COUNTING_SORT_NUM_MAX; i++) {
counts[i] = 0;
}
for (int i = 0; i < len; i++) {
counts[nums[i]]++;
}
for (int i = 1; i < COUNTING_SORT_NUM_MAX; i++) {
counts[i] += counts[i-1];
}
// 必须从后往前遍历,以保证排序的稳定性
for (int i = len-1; i >= 0; i--) {
sorted[counts[nums[i]]-1] = nums[i];
counts[nums[i]]--;
}
for (int i = 0; i < len; i++) {
nums[i] = sorted[i];
}
free(counts);
free(sorted);
}
|
70597c00b6e1a1c96deac3b1fae27ca6430308d5
|
[
"Markdown",
"C",
"CMake"
] | 60
|
C
|
jungor/algorithms
|
6e942021be1df81648b7d2ea68206b21ecad3225
|
6fbbcb10b9436a3c840035cfd265b89ed7e0f5c9
|
refs/heads/master
|
<file_sep>package com.example.other.hookdemo;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
public class HookManager {
static Object activityThreadInstance;
public static void init() throws ClassNotFoundException, NoSuchMethodException,
InvocationTargetException, IllegalAccessException {
Class<?> clazz=Class.forName("android.app.ActivityThread");
Method currentActivityThread=clazz.getDeclaredMethod("currentActivityThread");
activityThreadInstance=currentActivityThread.invoke(null);
}
public static void injectInstrumentation() throws NoSuchFieldException, IllegalAccessException {
Field field_instrumentation=activityThreadInstance.getClass()
.getDeclaredField("mInstrumentation");
field_instrumentation.setAccessible(true);
InstrumentationHook instrumentationHook=new InstrumentationHook();
field_instrumentation.set(activityThreadInstance,instrumentationHook);
}
}
|
0a0a2ce48a137797402355d3cde0327a7b6593d7
|
[
"Java"
] | 1
|
Java
|
wangjiapu/HookDemo
|
c644eb4bb2c86b43c2b1e5fcaf2fdb8d89eec96c
|
1268da3b5b642d6acd97037b806afcbd8994a353
|
refs/heads/master
|
<file_sep>package ca.kklee.comics;
import android.app.NotificationManager;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.graphics.Typeface;
import android.os.Bundle;
import android.os.Handler;
import android.support.v4.view.PagerTitleStrip;
import android.support.v4.view.ViewPager;
import android.support.v4.widget.DrawerLayout;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.app.AppCompatActivity;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.AbsListView;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.TextView;
import com.kklee.utilities.Logger;
import ca.kklee.comics.comic.ComicCollection;
import ca.kklee.comics.navdrawer.DrawerItemClickListener;
import ca.kklee.comics.navdrawer.NavDrawerAdapter;
import ca.kklee.comics.navdrawer.NavDrawerHeader;
import ca.kklee.comics.navdrawer.RefreshListener;
import ca.kklee.comics.options.OptionsActivity;
import ca.kklee.comics.scheduletask.SilentDownload;
import ca.kklee.comics.viewpager.SectionsPagerAdapter;
/**
* TODO List
* seperate FileUtil so it can be moved to lib
* custom options menu
* proper image scaling
* image pinch zooming
* view comics of diff dates
* add authors
*/
public class HomeActivity extends AppCompatActivity {
private Intent onDrawerCloseIntent = null;
private DrawerLayout drawerLayout;
private ListView drawerList;
private LinearLayout drawerLinear;
private ViewPager viewPager;
private ActionBarDrawerToggle drawerToggle;
private SwipeRefreshLayout srl;
private SharedPreferences pref;
private SharedPreferences.Editor editor;
private Handler handler = new Handler();
private Runnable runnable = new Runnable() {
@Override
public void run() {
hideUI(getWindow().getDecorView());
}
};
public static void hideUI(View decorView) {
if ((decorView.getSystemUiVisibility() & View.SYSTEM_UI_FLAG_FULLSCREEN) == 0) {
decorView.setSystemUiVisibility(
View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION // hide nav bar
| View.SYSTEM_UI_FLAG_FULLSCREEN // hide status bar
| View.SYSTEM_UI_FLAG_IMMERSIVE
);
}
}
public static void showUI(View decorView) {
if ((decorView.getSystemUiVisibility() & View.SYSTEM_UI_FLAG_FULLSCREEN) != 0) {
decorView.setSystemUiVisibility(
View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
);
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home);
//setLogger
Logger.setIsLogging(AppConfig.IS_LOGGING());
Logger.setLogToFile(getApplicationContext());
pref = getSharedPreferences(SharedPrefConstants.COMICNEWFLAG, Context.MODE_PRIVATE);
editor = pref.edit();
initComicCollection(); //do this before everything else
initComicPager();
initNavDrawer(); //ComicPager comes before this
// initTestButton();
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.KITKAT) {
hideUI(getWindow().getDecorView());
initImmersionFullScreen();
}
}
private void initComicCollection() {
if (ComicCollection.getInstance().getComics() == null) {
ComicCollection.getInstance().setComics(this);
}
}
private void initComicPager() {
SectionsPagerAdapter sectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager(), this);
viewPager = (ViewPager) findViewById(R.id.pager);
viewPager.setAdapter(sectionsPagerAdapter);
viewPager.setOnPageChangeListener(new ViewPager.OnPageChangeListener() {
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
hideUI(getWindow().getDecorView());
}
@Override
public void onPageSelected(int position) {
drawerList.setItemChecked(position, true);
}
@Override
public void onPageScrollStateChanged(int state) {
}
});
PagerTitleStrip pagerTitleStrip = (PagerTitleStrip) findViewById(R.id.pager_title_strip);
Typeface typeface = Typeface.createFromAsset(getAssets(), "fonts/ComicNeue-Regular-Oblique.ttf");
for (int counter = 0; counter < pagerTitleStrip.getChildCount(); counter++) {
if (pagerTitleStrip.getChildAt(counter) instanceof TextView) {
((TextView) pagerTitleStrip.getChildAt(counter)).setTypeface(typeface);
((TextView) pagerTitleStrip.getChildAt(counter)).setTextSize(18);
}
}
}
private void initNavDrawer() {
drawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout);
drawerLinear = (LinearLayout) findViewById(R.id.left_drawer);
drawerList = (ListView) findViewById(R.id.drawer_list_view);
drawerToggle = new ActionBarDrawerToggle(this, drawerLayout, 0, 0) {
@Override
public void onDrawerSlide(View drawerView, float slideOffset) {
hideUI(getWindow().getDecorView());
super.onDrawerSlide(drawerView, slideOffset);
}
@Override
public void onDrawerOpened(View drawerView) {
drawerList.invalidateViews();
NavDrawerHeader.update(getSharedPreferences(SharedPrefConstants.COMICNEWFLAG, Context.MODE_PRIVATE), (TextView) findViewById(R.id.comic_header_last_update));
}
@Override
public void onDrawerClosed(View drawerView) {
String title = ComicCollection.getInstance().getComics()[viewPager.getCurrentItem()].getTitle();
if (pref.getBoolean(title, false)) {
editor.putBoolean(title, false);
editor.commit();
}
if (onDrawerCloseIntent != null) {
startActivity(onDrawerCloseIntent);
onDrawerCloseIntent = null;
}
}
};
drawerLayout.setDrawerListener(drawerToggle);
drawerList.setAdapter(new NavDrawerAdapter(this, R.layout.nav_list_item_layout, ComicCollection.getInstance().getFullTitleArray()));
drawerList.setOnItemClickListener(new DrawerItemClickListener(this, viewPager, drawerLayout));
drawerList.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
drawerList.setItemChecked(0, true);
drawerList.setOnScrollListener(new AbsListView.OnScrollListener() {
@Override
public void onScrollStateChanged(AbsListView absListView, int i) {
if (i == SCROLL_STATE_IDLE)
drawerLayout.setDrawerLockMode(DrawerLayout.LOCK_MODE_UNLOCKED);
else
drawerLayout.setDrawerLockMode(DrawerLayout.LOCK_MODE_LOCKED_OPEN);
}
@Override
public void onScroll(AbsListView absListView, int i, int i2, int i3) {
}
});
final RefreshListener refreshListener = new RefreshListener() {
@Override
public void onRefreshComplete() {
srl.setRefreshing(false);
drawerLayout.setDrawerLockMode(DrawerLayout.LOCK_MODE_UNLOCKED);
viewPager.getAdapter().notifyDataSetChanged();
NavDrawerHeader.update(getSharedPreferences(SharedPrefConstants.COMICNEWFLAG, Context.MODE_PRIVATE), (TextView) findViewById(R.id.comic_header_last_update));
drawerList.invalidateViews();
}
};
srl = (SwipeRefreshLayout) findViewById(R.id.drawer_swipe_refresh_view);
srl.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
refresh(refreshListener);
}
});
srl.setProgressBackgroundColorSchemeResource(R.color.primary_4);
srl.setColorSchemeResources(R.color.primary_2, R.color.complement_2);
ImageView navButton = (ImageView) findViewById(R.id.nav_icon);
navButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
drawerLayout.openDrawer(drawerLinear);
}
});
NavDrawerHeader.update(getSharedPreferences(SharedPrefConstants.COMICNEWFLAG, Context.MODE_PRIVATE), (TextView) findViewById(R.id.comic_header_last_update));
//options button
View optionsFooter = ((LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE)).inflate(R.layout.nav_list_options_layout, null, false);
final Intent i = new Intent(this, OptionsActivity.class);
optionsFooter.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
onDrawerCloseIntent = i;
drawerLayout.closeDrawer(Gravity.LEFT);
}
});
TextView optionsFooterTextView = (TextView) optionsFooter.findViewById(R.id.options_text);
optionsFooterTextView.setTypeface(Typeface.createFromAsset(getAssets(), "fonts/ComicNeue-Regular-Oblique.ttf"));
drawerList.addFooterView(optionsFooter);
//keith made this button
final HomeActivity homeActivity = this;
View footer = ((LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE)).inflate(R.layout.nav_list_footer_layout, null, false);
footer.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (BuildConfig.DEBUG) {
Logger.showDialog(homeActivity);
}
}
});
drawerList.addFooterView(footer);
}
private void initImmersionFullScreen() {
getWindow().getDecorView().setOnSystemUiVisibilityChangeListener(new View.OnSystemUiVisibilityChangeListener() {
@Override
public void onSystemUiVisibilityChange(int visibility) {
// Note that system bars will only be "visible" if none of the
// LOW_PROFILE, HIDE_NAVIGATION, or FULLSCREEN flags are set.
if ((visibility & View.SYSTEM_UI_FLAG_FULLSCREEN) == 0) {
// The system bars are visible
handler.postDelayed(runnable, 1000 * 5);
} else {
// The system bars are NOT visible
stopAutoHideUI();
}
}
});
}
// @Override
// protected void onPostCreate(Bundle savedInstanceState) {
// super.onPostCreate(savedInstanceState);
// // Sync the toggle state after onRestoreInstanceState has occurred.
// drawerToggle.syncState();
// }
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
drawerToggle.onConfigurationChanged(newConfig);
}
@Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.KITKAT) {
if (hasFocus) {
hideUI(getWindow().getDecorView());
}
}
}
@Override
protected void onResume() {
super.onResume();
NotificationManager notificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
notificationManager.cancelAll();
if (pref.getBoolean(SharedPrefConstants.OPENDRAWER, true)) {
if (viewPager != null) {
viewPager.getAdapter().notifyDataSetChanged();
}
drawerLayout.openDrawer(drawerLinear);
editor.putBoolean(SharedPrefConstants.OPENDRAWER, false);
editor.commit();
//find first new comic
String title;
for (int i = 0; i < ComicCollection.getInstance().getComics().length; i++) {
title = ComicCollection.getInstance().getComics()[i].getTitle();
if (pref.getBoolean(title, false)) {
drawerList.setItemChecked(i, true);
drawerList.smoothScrollToPosition(i);
viewPager.setCurrentItem(i, false);
break;
}
}
}
}
protected void refresh(RefreshListener refreshListener) {
drawerLayout.setDrawerLockMode(DrawerLayout.LOCK_MODE_LOCKED_OPEN);
new SilentDownload(this.getApplicationContext(), refreshListener).startSilentDownload();
}
private void stopAutoHideUI() {
handler.removeCallbacks(runnable);
}
// @Override
// public boolean onCreateOptionsMenu(Menu menu) {
// // Inflate the menu; this adds items to the action bar if it is present.
// getMenuInflater().inflate(R.menu.home, menu);
// return true;
// }
//
// @Override
// public boolean onPrepareOptionsMenu(Menu menu) {
// stopAutoHideUI();
// menu.findItem(R.id.action_schedule_switch).setTitle(getAlarmStateString());
// return super.onPrepareOptionsMenu(menu);
// }
//
// private String getAlarmStateString() {
// if (ScheduleTaskReceiver.isAlarmSet(this)) {
// return "Set Auto-DL OFF";
// } else {
// return "Set Auto-DL ON";
// }
// }
//
// @Override
// public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
//
// int id = item.getItemId();
//
// //noinspection SimplifiableIfStatement
// if (id == R.id.action_settings) {
// Intent i = new Intent(this, SettingsActivity.class);
// startActivity(i);
// return true;
// }
//
// return super.onOptionsItemSelected(item);
// }
}
<file_sep>##Daily Comics
Application that allows viewing of my personal list of comics strips in one convenient place even when offline. App can automatically download new comic strips if they are updated.
This is an Android Studio project.
This project is for personal use only.
####Features
* View comic strips
* Automatic checks for updates and downloads new comics every 6 hrs (with wifi connection)
* Manual update (with wifi connection)
* Update notifications
* Error notifications
* Last update time
####TODO
* Better customization of features (example set update interval)
* In-app comic enable-diable options
* Image pinchzoom
* Image scaling based on dimensions for better viewing
* Automatic image src finding
####How to add comic
1. Add comic information to 'assets/comic_collection.json'
2. Define image src location in 'ca/kklee/comics/comic/ComicDOMDictionary.java'
* location should be the same with every update
#####Demo APK
<a rel='nofollow' href='https://github.com/kklee305/Comics/tree/master/apk' border='0' style='cursor:default'><img src='https://chart.googleapis.com/chart?cht=qr&chl=https%3A%2F%2Fgithub.com%2Fkklee305%2FComics%2Fblob%2Fmaster%2Fapk%2FComics.apk%3Fraw%3Dtrue&chs=180x180&choe=UTF-8&chld=L|2' alt=''></a>
<img src="https://github.com/kklee305/Comics/blob/master/screenshots/New%20Comics.png" width="300">
<img src="https://github.com/kklee305/Comics/blob/master/screenshots/Notifications.png" width="300">
<img src="https://github.com/kklee305/Comics/blob/master/screenshots/Scrolling.png" width="300">
<file_sep>package ca.kklee.comics.navdrawer;
import android.support.v4.view.ViewPager;
import android.support.v4.widget.DrawerLayout;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import ca.kklee.comics.HomeActivity;
import ca.kklee.comics.comic.ComicCollection;
/**
* Created by Keith on 09/07/2014.
*/
public class DrawerItemClickListener implements OnItemClickListener {
private ViewPager viewPager;
private DrawerLayout drawerLayout;
private HomeActivity activity;
public DrawerItemClickListener(HomeActivity activity, ViewPager viewPager, DrawerLayout drawerLayout) {
this.viewPager = viewPager;
this.drawerLayout = drawerLayout;
this.activity = activity;
}
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
if (i == ComicCollection.getInstance().getComics().length) {
} else if (i < ComicCollection.getInstance().getComics().length) {
viewPager.setCurrentItem(i, true);
drawerLayout.closeDrawers();
}
}
}
<file_sep>package ca.kklee.comics.scheduletask;
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.widget.Toast;
import com.kklee.utilities.Logger;
import java.util.Calendar;
/**
* Created by Keith on 28/06/2014.
*/
public class ScheduleTaskReceiver extends BroadcastReceiver {
private static final long INTERVAL_MILLIS = AlarmManager.INTERVAL_HOUR * 6;
public static void startScheduledTask(Context context) {
Intent intent = new Intent(context, ScheduleTaskReceiver.class);
if (isAlarmSet(context)) {
Logger.i("Alarm already set");
return;
}
PendingIntent alarmIntent = PendingIntent.getBroadcast(context, 0, intent, PendingIntent.FLAG_UPDATE_CURRENT);
Calendar calendar = Calendar.getInstance();
calendar.set(Calendar.HOUR_OF_DAY, nearestQuarter(calendar));
calendar.set(Calendar.MINUTE, 00);
calendar.set(Calendar.SECOND, 00);
AlarmManager alarms = (AlarmManager) context.getSystemService(Context.ALARM_SERVICE);
alarms.setRepeating(AlarmManager.RTC_WAKEUP, calendar.getTimeInMillis(), INTERVAL_MILLIS, alarmIntent);
OnBootCompletedReceiver.registerMe(context);
Toast.makeText(context, "Scheduled Download Started", Toast.LENGTH_LONG).show();
}
public static void startDebugging(Context context) {
Intent intent = new Intent(context, ScheduleTaskReceiver.class);
PendingIntent alarmIntent = PendingIntent.getBroadcast(context, 0, intent, PendingIntent.FLAG_UPDATE_CURRENT);
AlarmManager alarms = (AlarmManager) context.getSystemService(Context.ALARM_SERVICE);
alarms.set(AlarmManager.RTC_WAKEUP, System.currentTimeMillis() + 1000 * 20, alarmIntent);
SharedPreferences preferences = context.getSharedPreferences("debuggingAlarm", 0);
SharedPreferences.Editor editor2 = preferences.edit();
editor2.putBoolean("alarmOff", true);
editor2.apply();
}
public static boolean isAlarmSet(Context context) {
Intent intent = new Intent(context, ScheduleTaskReceiver.class);
return PendingIntent.getBroadcast(context, 0, intent, PendingIntent.FLAG_NO_CREATE) != null;
}
public static void cancelAlarm(Context context) {
Intent intent = new Intent(context, ScheduleTaskReceiver.class);
PendingIntent alarmIntent = PendingIntent.getBroadcast(context, 0, intent, PendingIntent.FLAG_CANCEL_CURRENT);
AlarmManager alarms = (AlarmManager) context.getSystemService(Context.ALARM_SERVICE);
alarms.cancel(alarmIntent);
alarmIntent.cancel();
OnBootCompletedReceiver.unregisterMe(context);
Toast.makeText(context, "Scheduled Download Cancelled", Toast.LENGTH_LONG).show();
}
private static int nearestQuarter(Calendar calendar) {
switch (calendar.get(Calendar.HOUR_OF_DAY)) {
case 0:
case 1:
case 2:
// return 3;
case 3:
case 4:
case 5:
return 6;
case 6:
case 7:
case 8:
// return 9;
default:
return 0;
}
}
@Override
public void onReceive(Context context, Intent intent) {
new SilentDownload(context, null).startSilentDownload();
}
}
<file_sep>include ':Comics', ':Utilities', ':AdvanceViews'
<file_sep>package ca.kklee.comics;
/**
* Created by Keith on 28/07/2014.
*/
public class SharedPrefConstants {
public static final String COMICNEWFLAG = "0";
public static final String COMICUPDATETIME = "1";
public static final String COMICERRORFLAG = "2";
public static final String OPENDRAWER = "drawer";
public static final String LASTUPDATE = "update";
}
<file_sep>package ca.kklee.comics.comic;
import android.graphics.Bitmap;
import com.kklee.utilities.Logger;
import java.io.File;
import java.io.IOException;
import ca.kklee.comics.BitmapLoader;
import ca.kklee.comics.FileUtil;
/**
* Created by Keith on 04/06/2014.
*/
public class Comic {
private String title;
private String shortForm;
private String url;
private Boolean enabled;
private Bitmap bitmap;
public String getTitle() {
if (shortForm == null || shortForm.equals(""))
return title;
return shortForm;
}
public String getFullTitle() {
return title;
}
public String getUrl() {
return url;
}
public Bitmap getBitmap() {
if (bitmap == null) {
bitmap = getBitmapFromFile();
}
return bitmap;
}
public void setBitmap(Bitmap bitmap) {
this.bitmap = bitmap;
}
public int getFileHashCode() {
File file = BitmapLoader.findFile(title);
if (file == null) {
return 0;
}
String code = file.getName();
code = code.replace(title + "_", "").replace(".png", "");
return Integer.parseInt(code);
}
public Bitmap getBitmapFromFile() {
File file = BitmapLoader.findFile(title);
if (file != null) {
return BitmapLoader.loadBitmap(file);
} else {
Logger.w("File not found: " + title);
return null;
}
}
public void saveBitmap(Bitmap bitmap, int hashCode) {
setBitmap(bitmap);
File file = BitmapLoader.findFile(title);
if (file != null) {
try {
if (!file.getCanonicalFile().delete())
Logger.wtf("File for %s not deleted!", title);
FileUtil.scanFile(file);
} catch (IOException e) {
Logger.e("IOException", e);
}
}
BitmapLoader.saveBitmap(title + "_" + hashCode, bitmap);
}
public void clearBitmap() {
this.bitmap = null;
}
public String toString() {
return title + " | " + url + " | " + " | " + enabled;
}
public Boolean getEnabled() {
return enabled;
}
public void setEnabled(Boolean enabled) {
this.enabled = enabled;
}
}<file_sep>package ca.kklee.comics;
import android.app.Activity;
import android.app.Dialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.support.v7.app.AlertDialog;
import android.widget.Toast;
import ca.kklee.comics.scheduletask.ScheduleTaskReceiver;
/**
* Created by Keith on 30/06/2014.
*/
public class DebugDialogFactory {
public static Dialog createDialog(final HomeActivity activity) {
AlertDialog.Builder builder = new AlertDialog.Builder(activity, R.style.AppCompatAlertDialogStyle); //2 is for theme
builder.setTitle("Debugging")
.setItems(enumToStringList(activity), new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
switch (i) {
case 0:
if (ScheduleTaskReceiver.isAlarmSet(activity)) {
ScheduleTaskReceiver.cancelAlarm(activity);
} else {
ScheduleTaskReceiver.startScheduledTask(activity);
}
break;
case 1:
Toast.makeText(activity, "Keith made this", Toast.LENGTH_SHORT).show();
break;
case 2:
BitmapLoader.clearBitmap();
Intent intent = new Intent(activity, HomeActivity.class);
activity.finish();
activity.startActivity(intent);
break;
case 4:
debugging(activity);
break;
}
}
})
;
Dialog dialog = builder.create();
return dialog;
}
private static void debugging(Activity activity) {
ScheduleTaskReceiver.cancelAlarm(activity);
ScheduleTaskReceiver.startDebugging(activity);
}
private static String[] enumToStringList(Activity activity) {
String[] menuList = new String[MenuItems.values().length];
int i = 0;
for (MenuItems m : MenuItems.values()) {
menuList[i] = m.name();
if (menuList[i].equals(MenuItems.ALARM.name())) {
menuList[i] = getAlarmStateString(activity);
}
i++;
}
return menuList;
}
private static String getAlarmStateString(Activity activity) {
if (ScheduleTaskReceiver.isAlarmSet(activity)) {
return "Set Auto-Refresh OFF";
} else {
return "Set Auto-Refresh ON";
}
}
private enum MenuItems {ALARM, ABOUT}
}
<file_sep>package ca.kklee.comics.comic;
import android.content.Context;
import android.content.SharedPreferences;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.AsyncTask;
import android.util.Patterns;
import android.view.View;
import android.webkit.URLUtil;
import android.widget.ImageView;
import com.kklee.utilities.Logger;
import org.jsoup.Connection;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import ca.kklee.comics.AppConfig;
import ca.kklee.comics.R;
import ca.kklee.comics.SharedPrefConstants;
import ca.kklee.comics.scheduletask.NewComicListener;
/**
* Created by Keith on 05/06/2014.
*/
public class ComicLoader extends AsyncTask<String, Void, Bitmap> {
private int id;
private NewComicListener newComicListener;
private View rootView;
private String imageUrlString;
private ResultCode result = ResultCode.ERROR;
public ComicLoader(View rootView, int id, NewComicListener newComicListener) {
this.rootView = rootView;
this.id = id;
this.newComicListener = newComicListener;
}
@Override
protected Bitmap doInBackground(String... strings) {
Comic comic = ComicCollection.getInstance().getComics()[id];
URL imageUrl = downloadDom(strings[0]);
if (imageUrl == null) {
return null;
}
int newFileCode = imageUrl.toString().hashCode();
int oldFileCode = comic.getFileHashCode();
if (newFileCode == oldFileCode) {
result = ResultCode.NOUPDATE;
return null;
}
imageUrlString = imageUrl.toString();
return downloadImage(imageUrl);
}
@Override
protected void onPostExecute(Bitmap bitmap) {
if (bitmap != null) {
ComicCollection.getInstance().getComics()[id].saveBitmap(bitmap, imageUrlString.hashCode());
result = ResultCode.UPDATED;
if (rootView != null) {
ImageView imageView = (ImageView) rootView.findViewById(R.id.image_view);
imageView.setImageBitmap(bitmap);
imageView.setVisibility(View.VISIBLE);
rootView.findViewById(R.id.loading).setVisibility(View.GONE);
}
}
newComicResponse(result);
if (rootView != null && result.equals(ResultCode.ERROR)) {
ImageView errorView = (ImageView) rootView.findViewById(R.id.error_view);
errorView.setBackground(rootView.getResources().getDrawable(R.drawable.error));
errorView.setVisibility(View.VISIBLE);
rootView.findViewById(R.id.loading).setVisibility(View.GONE);
}
}
private URL downloadDom(String comicUrl) {
Logger.i("Attempt DOM Retrieval: " + comicUrl);
if (!Patterns.WEB_URL.matcher(comicUrl).matches()) {
Logger.e("Base URL not valid: " + comicUrl);
return null;
}
Document dom = getDom(comicUrl);
if (dom == null) {
return null;
}
URL imageUrl = null;
String imageUrlFromDOM = ComicDOMDictionary.getImageUrlFromDOM(dom, id);
try {
imageUrl = new URL(imageUrlFromDOM);
} catch (Exception e) {
Logger.e("Failed to create url: %s from dom for " + ComicCollection.getInstance().getComics()[id].getFullTitle(), e, imageUrlFromDOM);
}
return imageUrl;
}
private Document getDom(String url) {
try {
Connection.Response response = Jsoup.connect(url).timeout(10000).execute();
int statusCode = response.statusCode();
if (statusCode == 200) {
return response.parse();
} else {
Logger.e("Error getting dom: Response code " + statusCode);
}
} catch (IOException e) {
Logger.e("Exception getting dom: ", e);
}
return null;
}
private Bitmap downloadImage(URL url) {
Logger.i("Attempt DL image: " + url);
if (url == null || !URLUtil.isValidUrl(url.toString())) {
Logger.e("Download image url not valid: " + url);
return null;
}
try {
HttpURLConnection urlConnection = (HttpURLConnection) url.openConnection();
urlConnection.setDoInput(true);
urlConnection.connect();
int responseCode = urlConnection.getResponseCode();
if (responseCode != HttpURLConnection.HTTP_OK) {
Logger.e("Http response code not ok: " + responseCode + " ||| " + url);
return null;
}
InputStream inputStream = new BufferedInputStream(urlConnection.getInputStream());
try {
Bitmap bitmap = BitmapFactory.decodeStream(inputStream);
Logger.i("Success DLImage: " + url);
return bitmap;
} catch (OutOfMemoryError e) {
Logger.e("OutOfMemoryError while decoding bitmap, attempting downscale", e);
// return downscaleBitmap(url, new BufferedInputStream(urlConnection.getInputStream()));
} finally {
inputStream.close();
}
} catch (IOException e) {
Logger.e("IOException: ", e);
e.printStackTrace();
}
return null;
}
private void newComicResponse(ResultCode response) {
String title = ComicCollection.getInstance().getComics()[id].getTitle();
Context context = AppConfig.getContext();
SharedPreferences prefForNew = context.getSharedPreferences(SharedPrefConstants.COMICNEWFLAG, 0);
SharedPreferences.Editor editorForNew = prefForNew.edit();
SharedPreferences prefForError = context.getSharedPreferences(SharedPrefConstants.COMICERRORFLAG, 0);
SharedPreferences.Editor editorForError = prefForError.edit();
SharedPreferences prefForTime = context.getSharedPreferences(SharedPrefConstants.COMICUPDATETIME, 0);
SharedPreferences.Editor editorForTime = prefForTime.edit();
switch (response) {
case UPDATED:
editorForNew.putBoolean(title, true);
editorForNew.commit();
editorForTime.putLong(title, System.currentTimeMillis());
editorForTime.apply();
case NOUPDATE:
editorForError.putBoolean(title, false);
break;
case ERROR:
editorForError.putBoolean(title, true);
break;
}
editorForError.apply();
if (newComicListener != null) {
newComicListener.onDomCheckCompleted(title);
}
}
// private Bitmap downscaleBitmap(URL url, InputStream inputStream) {
// BitmapFactory.Options options = new BitmapFactory.Options();
// options.inJustDecodeBounds = true;
// BitmapFactory.decodeStream(inputStream, null, options);
// int bitmapW = options.outWidth;
// int bitmapH = options.outHeight;
// Logger.d("bitmap width: %d, height: %d", bitmapW, bitmapH);
//
//
// int scaleFactor = (int) Math.max(1.0, Math.min((double) bitmapW / (double) 2, (double) bitmapH / (double) 2)); //1, 2, 3, 4, 5, 6, ...
// scaleFactor = (int) Math.pow(2.0, Math.floor(Math.log((double) scaleFactor) / Math.log(2.0))); //1, 2, 4, 8, ...
//
// options.inJustDecodeBounds = false;
// options.inSampleSize = scaleFactor;
// options.inPurgeable = true;
//
// Bitmap bitmap = null;
// do {
// try {
// Logger.d("scaleFactor: " + scaleFactor);
// scaleFactor *= 2;
// bitmap = BitmapFactory.decodeStream(inputStream, null, options);
// } catch (OutOfMemoryError e) {
// options.inSampleSize = scaleFactor;
//
// }
// } while (bitmap == null && scaleFactor <= 256);
// if (bitmap == null) {
// Logger.d("OutOfMemoryError: downscale failed");
// } else {
// Logger.i("Success DLImage: " + url);
// }
//
// return bitmap;
// }
private enum ResultCode {
NOUPDATE, UPDATED, ERROR
}
}
<file_sep>package ca.kklee.comics.viewpager;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentStatePagerAdapter;
import ca.kklee.comics.SharedPrefConstants;
import ca.kklee.comics.comic.ComicCollection;
/**
* Created by Keith on 02/06/2014.
*/
public class SectionsPagerAdapter extends FragmentStatePagerAdapter {
private SharedPreferences prefForNew;
public SectionsPagerAdapter(FragmentManager fm, Activity activity) {
super(fm);
prefForNew = activity.getSharedPreferences(SharedPrefConstants.COMICNEWFLAG, Context.MODE_PRIVATE);
}
@Override
public Fragment getItem(int position) {
Bundle bundle = new Bundle();
bundle.putInt("ID", position);
ComicFragment comicFragment = new ComicFragment();
comicFragment.setArguments(bundle);
return comicFragment;
}
@Override
public int getCount() {
return ComicCollection.getInstance().getComics().length;
}
public CharSequence getPageTitle(int position) {
return ComicCollection.getInstance().getComics()[position].getTitle();
}
@Override
public int getItemPosition(Object object) {
ComicFragment fragment = (ComicFragment) object;
int id = fragment.getArguments().getInt("ID");
String title = ComicCollection.getInstance().getComics()[id].getTitle();
if (prefForNew.getBoolean(title, false)) {
ComicCollection.getInstance().getComics()[id].clearBitmap();
return POSITION_NONE;
} else {
return POSITION_UNCHANGED;
}
}
}
<file_sep>package ca.kklee.comics.scheduletask;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import com.kklee.utilities.ConnectionUtil;
import com.kklee.utilities.Logger;
/**
* Created by Keith on 07/08/2014.
*/
public class OnWifiReconnectedReceiver extends BroadcastReceiver {
public static void registerMe(Context context) {
context.getPackageManager().setComponentEnabledSetting(
new ComponentName(context, OnWifiReconnectedReceiver.class),
PackageManager.COMPONENT_ENABLED_STATE_ENABLED,
PackageManager.DONT_KILL_APP);
Logger.i("OnWifiConnReceiver enabled");
}
public static void unregisterMe(Context context) {
context.getPackageManager().setComponentEnabledSetting(
new ComponentName(context, OnBootCompletedReceiver.class),
PackageManager.COMPONENT_ENABLED_STATE_DISABLED,
PackageManager.DONT_KILL_APP);
Logger.i("OnWifiConnReceiver disabled");
}
@Override
public void onReceive(Context context, Intent intent) {
Logger.d("Wifi br onReceived");
if (ConnectionUtil.isOnline(context)) {
Logger.i("Resuming Silent Download");
new SilentDownload(context, null).startSilentDownload();
unregisterMe(context);
}
}
}
<file_sep>package ca.kklee.comics.options;
import android.content.Context;
import android.graphics.Typeface;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.SwitchCompat;
import android.support.v7.widget.Toolbar;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.CompoundButton;
import android.widget.ListView;
import android.widget.TextView;
import java.util.ArrayList;
import ca.kklee.comics.R;
import ca.kklee.comics.scheduletask.ScheduleTaskReceiver;
/**
* Created by Keith on 24/06/2015.
*/
public class OptionsActivity extends AppCompatActivity {
private ListView list;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_options);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
list = (ListView) findViewById(R.id.options_list);
ArrayList<String> dummyList = new ArrayList<String>();
ArrayAdapter<String> adapter = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, dummyList); //:P
list.setAdapter(adapter);
initAutoRefreshSwitchView();
}
private void initAutoRefreshSwitchView() {
View refreshSwitchLayout = ((LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE)).inflate(R.layout.options_switch_layout, null, false);
SwitchCompat switchCompat = (SwitchCompat) refreshSwitchLayout.findViewById(R.id.options_refresh_switch);
TextView option_text = (TextView) refreshSwitchLayout.findViewById(R.id.options_text);
option_text.setTypeface(Typeface.createFromAsset(getAssets(), "fonts/ComicNeue-Regular-Oblique.ttf"));
switchCompat.setChecked(ScheduleTaskReceiver.isAlarmSet(this));
final Context context = this.getApplicationContext();
switchCompat.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if (isChecked) {
if (!ScheduleTaskReceiver.isAlarmSet(context)) {
ScheduleTaskReceiver.startScheduledTask(context);
buttonView.setChecked(ScheduleTaskReceiver.isAlarmSet(context));
}
} else {
if (ScheduleTaskReceiver.isAlarmSet(context)) {
ScheduleTaskReceiver.cancelAlarm(context);
buttonView.setChecked(ScheduleTaskReceiver.isAlarmSet(context));
}
}
}
});
list.addFooterView(refreshSwitchLayout);
}
}
|
d59aa1554ea181f976761cd28c11ef1fc32a84cf
|
[
"Markdown",
"Java",
"Gradle"
] | 12
|
Java
|
kklee305/Comics
|
ec1eec212d9f73a5f334b1c33ef315677d75179a
|
884d5161a5075e52c4038b0ea55db04a79b538ed
|
refs/heads/master
|
<file_sep>import React from 'react';
import {View, Text, StyleSheet, Image, TouchableOpacity} from 'react-native';
import Form from '../Component/Form.js';
import SignupForm from '../Component/SingupForm';
import Login from "./Login";
const LogoImg = require('../img/Mainlogo.png');
const Signup = ({navigation}) => {
// singup() {
// Actions.singup();
// }
return (
<View style={styles.container}>
<Image
source={LogoImg}
style={{
marginTop: 70,
width: 100,
height: 110,
marginBottom: 10,
}}
/>
{/* logo img */}
<Text style={{marginBottom: 50, fontSize: 18, }}>
Welcome to IKIK!
</Text>
{/*logo text*/}
{/* <Form type="Singup"/> */}
<SignupForm/>
<View style={styles.signupCont}>
<Text style={styles.signupText}>Already have an account? </Text>
<TouchableOpacity onPress = {()=> navigation.navigate('Login')}>
<Text style={styles.signupBtn}>Sign In</Text>
</TouchableOpacity>
</View>
</View>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: 'center',
justifyContent: 'center',
},
signupCont: {
flexGrow: 1,
alignItems: 'center',
justifyContent: 'flex-end',
// paddingVertical: 10,
flexDirection: 'row',
},
signupText: {
// color: 'rgba(255,255,255,0.6)',
fontSize: 16,
},
signupBtn: {
color: '#87ceaf',
fontSize: 16,
fontWeight: '700',
},
});
export default Signup;
<file_sep>import React from 'react';
import Login from './Page/Login';
import Signup from './Page/Signup';
import Class from './Page/class';
import {createStackNavigator} from '@react-navigation/stack';
const RootStack = createStackNavigator();
const RootStackScreen = ({navigation}) => {
return (
<RootStack.Navigator headermMde="none">
<RootStack.Screen name="Login" component={Login} />
<RootStack.Screen name="Signup" component={Signup} />
<RootStack.Screen name="AddClass" component={Class} />
</RootStack.Navigator>
);
};
export default RootStackScreen;
<file_sep>import React, {Component} from 'react';
import {View, Text, StyleSheet, Image, TouchableOpacity} from 'react-native';
import Form from '../Component/Form.js';
import Signup from "../Page/Signup";
import Class from "../Page/class";
const LogoImg = require('../img/Mainlogo.png');
const Login = ({navigation}) => {
return (
<View style={styles.container}>
<Image
source={LogoImg}
style={{
marginTop: 150,
width: 100,
height: 110,
marginBottom: 10,
}}
/>
{/* logo img */}
<Text style={{marginBottom: 50, fontSize: 18}}>Welcome to IKIK!</Text>
{/*logo text*/}
<Form type="Login" />
<View style={styles.signupCont}>
<Text style={styles.signupText}>Don't have an account yet? </Text>
<TouchableOpacity onPress = {()=> navigation.navigate('Signup')}>
<Text style={styles.signupBtn}>Sing Up</Text>
</TouchableOpacity>
<TouchableOpacity onPress= {()=> navigation.navigate('AddClass')}>
<Text style={styles.singupBtn}>AddClass</Text>
</TouchableOpacity>
</View>
</View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: 'center',
justifyContent: 'center',
},
signupCont: {
flexGrow: 1,
alignItems: 'center',
justifyContent: 'flex-end',
paddingVertical: 10,
flexDirection: 'row',
},
signupText: {
// color: 'rgba(255,255,255,0.6)',
fontSize: 16,
},
signupBtn: {
color: '#87ceaf',
fontSize: 16,
fontWeight: '700',
},
});
export default Login;
<file_sep>/**
* @format
*/
import 'react-native-gesture-handler';
import {AppRegistry} from 'react-native';
import App from './App';
import {name as appName} from './app.json';
// import {NavigationContainer} from '@react-navigation/native';
// import {createStackNavigator} from '@react-navigation/stack';
// import Login from './Page/Login';
// import Signup from './Page/Signup';
// const AuthStack = createStackNavigator();
// export default () => (
// <NavigationContainer>
// <AuthStack.Navigator>
// <AuthStack.Screen name="Login" component={Login} />
// <AuthStack.Screen name="Signup" component={Signup} />
// </AuthStack.Navigator>
// </NavigationContainer>
// );
AppRegistry.registerComponent(appName, () => App);
<file_sep>import React, {useState} from 'react';
import {StyleSheet, Text, View, FlatList} from 'react-native';
// import './class.css';
import AddClass from '../Component/addclass.js';
import AddClass2 from '../Component/addclass2';
const Class = () => {
const [addClass, setaddClass] = useState([{text: 'buy coffee', key: '1'}]);
const pressHandler = (key) => {
setaddClass((prevClass) => {
return prevClass.filter((addClass) => addClass.key != key);
});
};
const submitHandler = (text) => {
setaddClass((prevClass) => {
return [{text: text, key: Math.random().toString()}, ...prevClass];
});
};
return (
<View>
<View>
<AddClass2 submitHandler={submitHandler} />
<FlatList
data={addClass}
style={styles.item}
renderItem={({item}) => (
<AddClass item={item} pressHandler={pressHandler} />
)}
/>
</View>
</View>
);
};
const styles = StyleSheet.create({
item: {
marginBottom: 10,
paddingHorizontal: 8,
paddingVertical: 6,
borderBottomWidth: 1,
borderBottomColor: '#b9ffe0',
},
});
export default Class;
<file_sep>import React, {Component} from 'react';
import {View, StyleSheet, Text} from 'react-native';
import RootStackScreen from './RootStackScreen';
import {NavigationContainer} from '@react-navigation/native';
import {createStackNavigator} from '@react-navigation/stack';
const RootStack = createStackNavigator();
const App = ({navigation}) => {
return(
<NavigationContainer>
<RootStackScreen/>
</NavigationContainer>
)
}
// class App extends Component {
// constructor(props){
// super(props)
// }
// render() {
// return (
// <NavigationContainer>
// <AuthStack.Navigator>
// <AuthStack.Screen name="Login" component={Login} initial />
// <AuthStack.Screen name="Signup" component={Signup} />
// </AuthStack.Navigator>
// </NavigationContainer>
// <View style={styles.mainView}>
// <Routes/>
// <Signup/>
// <Login/>
// </View>
// );
// }
// }
// const styles = StyleSheet.create({
// mainView: {
// flex: 1,
// paddingTop: 50,
// alignItems: 'center',
// justifyContent: 'center',
// fontSize: 18,
// },
// });
export default App;
|
175433d8ba5986a2ebc3072e0d1141686c2d0ed1
|
[
"JavaScript"
] | 6
|
JavaScript
|
kookmincapstone2/IKIK_Android
|
723d89e8d26e8b9908d542e7ab5e8aeff740474a
|
8f0cc425c2271a5177b61875c05dc1a3998316e2
|
refs/heads/master
|
<repo_name>emilam/CanvasBreakout<file_sep>/game.js
/*
** Simple canvas version of breakout
** by <NAME>
*/
PADDLE_SPEED = 10;
LEFT_KEY = 37;
RIGHT_KEY = 39;
SPACE_BAR = 32;
breakout = {};
$(function() {
breakout.pageLoaded();
});
/**
* Called to draw the screen. This is called over and over again
**/
breakout.gameLoop = function() {
breakout.clearBoard();
breakout.updatePaddle();
breakout.drawPaddle();
breakout.updateBalls();
breakout.drawBalls();
breakout.drawWall();
breakout.drawStatus();
if (breakout.lives <= 0) {
clearInterval(breakout.gameLoopID);
breakout.drawGameOver();
}
if (breakout.isNoBlocksLeft()) {
breakout.level += 1;
breakout.lives += 1;
breakout.score += 10000;
breakout.newLevel();
}
};
breakout.isNoBlocksLeft = function() {
for (var x = 0; x < breakout.blocks.length; x++) {
var block = breakout.blocks[x];
if (block.hits > 0)
return false;
}
return true;
};
/**
* Called when the page is finished loading. Sets up the gameboard
**/
breakout.pageLoaded = function() {
breakout.initGameBoard();
breakout.setupKeyboard();
breakout.setupMouse();
breakout.newGame();
};
/**
* Clears the canvas of anything drawn on it
**/
breakout.clearBoard = function() {
var context = breakout.gameBoard.context;
context.clearRect(0, 0, breakout.gameBoard.width, breakout.gameBoard.height);
};
/**
* Setup variables we want to use to keep track of the gameboard
**/
breakout.initGameBoard = function() {
var canvas = $("#gameboard")[0];
//Fix thr stretch on the canvas from my css
canvas.width = $("#container").width();
canvas.height = $("#container").height();
breakout.gameBoard = {};
breakout.gameBoard.canvas = canvas;
breakout.gameBoard.context = canvas.getContext("2d");
breakout.gameBoard.width = canvas.width;
breakout.gameBoard.height = canvas.height;
};
/**
* Reset game pieces for a new game to start
**/
breakout.newGame = function() {
breakout.lives = 3;
breakout.level = 1;
breakout.score = 0;
breakout.setupPaddle();
breakout.newLevel();
//Start the gameloop! 15 is a long time. 10 might be better
breakout.gameLoopID = setInterval(breakout.gameLoop, 20);
};
/**
* Sets the screen up for a new level. Does not advance the level.
**/
breakout.newLevel = function() {
breakout.balls = [];
breakout.blocks = [];
breakout.addDefaultBall();
breakout.addBlockWall(0.07 * breakout.level);
};
breakout.drawGameOver = function() {
var context = breakout.gameBoard.context;
context.fillStyle = "#eee";
context.font = "40px sans-seif";
var y = breakout.gameBoard.height / 2;
var x = breakout.gameBoard.width / 2 - 120;
context.fillText("GAME OVER", x, y);
context.fillStyle = "#2f2";
var newGameX = x + 30;
var newGameY = y + 60;
context.fillText("New Game", newGameX, newGameY);
// We subtract 45 because of the baeline of the font
breakout.newGameButton = {x: newGameX, y: newGameY - 45, height: 45, width: 200, visible: true};
};
/**
* Draw score and lives status at the bottom
**/
breakout.drawStatus = function() {
var padding = 10;
var y = breakout.gameBoard.height - padding;
var context = breakout.gameBoard.context;
context.fillStyle = "#aaa";
context.font = "18px sans-serif";
// Draw lives
var liveX = padding;
context.fillText("Lives: " + breakout.lives, liveX, y);
// Draw score
var scoreX = breakout.gameBoard.width - 200;
context.fillText("Score: " + breakout.score, scoreX, y);
};
/**
* Setup the ball paddle
**/
breakout.setupPaddle = function() {
breakout.paddle = {
x: breakout.gameBoard.width / 2, //somewhere in the middle
y: breakout.gameBoard.height - 40, //somewhere near the bottom
width: 80, // big paddle
height: 15, // not so tall though
color: "#3333aa", //blue paddle
direction: 0, // + is moving right - is moving left
};
breakout.paddle.middleX = function() {
return breakout.paddle.x + breakout.paddle.width / 2;
}
};
/**
* Draw the paddle on the screen
**/
breakout.drawPaddle = function() {
var context = breakout.gameBoard.context;
context.fillStyle = breakout.paddle.color;
context.beginPath();
context.rect(
breakout.paddle.x,
breakout.paddle.y,
breakout.paddle.width,
breakout.paddle.height
);
context.closePath();
context.fill();
}
breakout.updatePaddle = function() {
if (breakout.paddle.direction == 0) {
return;
}
if (breakout.paddle.direction < 0 && (breakout.paddle.x <= 0)) {
return;
} else if (breakout.paddle.direction > 0 && (breakout.paddle.x + breakout.paddle.width >= breakout.gameBoard.width)) {
return;
}
breakout.paddle.x += breakout.paddle.direction;
};
/**
* Random color for blocks
**/
breakout.blockColor = function(block) {
switch (block.hits) {
case 1:
return "#e5e";
case 2:
return "#929";
case 3:
return "#666";
default:
return "#999";
}
};
breakout.drawWall = function() {
$.each(breakout.blocks, function(index, block) {
if (block.hits <= 0) {
return;
}
var context = breakout.gameBoard.context;
context.fillStyle = breakout.blockColor(block);
context.beginPath();
context.rect(block.x, block.y, block.width, block.height);
context.closePath();
context.fill();
});
};
/**
* Add a block to the wall at position i
**/
breakout.addBlockToWall = function(position) {
// For the beginning levels we will have bigger blocks
var numBlocksWide = Math.min(10, breakout.level + 5);
var blockHeight = 50;
var sidePadding = 100; // How are away from the walls do we want to be
var topPadding = 80;
var blockPadding = 1;
var blockWidth = (breakout.gameBoard.width - sidePadding*2 - (blockPadding * numBlocksWide)) / numBlocksWide;
var column = position % numBlocksWide;
var row = Math.floor((position - column) / numBlocksWide);
var x = (column * blockWidth) + sidePadding + (blockPadding * (column - 1));
var y = (row * blockHeight) + topPadding + (blockPadding * row);
var hits = Math.min(3, breakout.level);
breakout.addBlock(x, y, blockWidth, blockHeight, hits);
};
/**
* Randomly generates blocks for the level
* We do no promise more blocks each level just a greater chance
**/
breakout.addBlockWall = function(blockChance) {
//A simple catch to make sure our level has some blocks
while (breakout.blocks.length == 0) {
for (var i = 0; i < 40; i++) {
if (Math.random() <= blockChance) {
breakout.addBlockToWall(i);
}
}
blockChance += 0.05;
}
}
breakout.addBlock = function(x, y, width, height, hits) {
if (breakout.blocks == undefined)
breakout.blocks = [];
var block = {
x: x,
y: y,
width: width,
height: height,
hits: hits
};
breakout.blocks.push(block);
};
/**
* Add a default game ball
**/
breakout.addDefaultBall = function() {
breakout.addBall(10, 10, 3.0, 5.5, 10);
};
/**
* Add a ball to the gameboard
**/
breakout.addBall = function(startX, startY, xVelocity, yVelocity, radius) {
if (breakout.balls == undefined)
breakout.balls = [];
// TODO: Add random colors to the balls!
var ball = {
x: startX,
y: startY,
xVelocity: xVelocity,
yVelocity: yVelocity,
radius: radius,
staged: true, // staged means the ball is connected to the paddle
alive: true
};
breakout.balls.push(ball);
};
/**
* Update the location of each of the balls
**/
breakout.updateBalls = function() {
var paddle = breakout.paddle;
//Update all the balls locations
$.each(breakout.balls, function(index, ball) {
if (!ball.alive) {
return;
}
if (!ball.staged) {
breakout.updateMovingBall(ball, paddle);
} else {
// The ball is still connected to the paddle
ball.x = paddle.x + paddle.width / 2;
ball.y = paddle.y - ball.radius;
}
if (!ball.alive) {
breakout.lives--;
if (breakout.lives > 0) {
breakout.addDefaultBall();
}
}
});
};
breakout.unstageBalls = function() {
$.each(breakout.balls, function(index, ball) {
ball.staged = false;
});
};
/**
* Update a ball that is moving around the screen
**/
breakout.updateMovingBall = function(ball, paddle) {
var futureX = ball.x + ball.xVelocity;
var futureY = ball.y + ball.yVelocity;
var reverseX = false;
var reverseY = false;
// Check if the ball went off the bottom of the board
if (futureY >= breakout.gameBoard.height) {
ball.alive = false;
}
if (futureX >= breakout.gameBoard.width || futureX <= 0) {
reverseX = true;
}
if (futureY <= 0) {
reverseY = true
} else if (breakout.isCollision(ball.x, ball.y, ball.radius, paddle)) {
reverseY = true;
// Increase the velocity of the ball based upon where it hit the paddle
ball.xVelocity += (ball.x - paddle.middleX()) / 10;
if (Math.abs(ball.xVelocity) > PADDLE_SPEED) {
}
}
for(var index = 0; index < breakout.blocks.length; index++) {
var block = breakout.blocks[index];
// No reason processing if we have already reversed
if(reverseX && reverseY) {
break;
}
if (block.hits <= 0) {
continue;
}
if (breakout.isCollision(futureX, futureY, ball.radius, block)) {
if (breakout.isVerticalCollision(ball.x, ball.y, ball.radius, block)) {
reverseY = true;
} else {
reverseX = true;
}
block.hits -= 1;
breakout.score += 1000;
break;
}
}
if (reverseX) ball.xVelocity *= -1;
if (reverseY) ball.yVelocity *= -1;
ball.y += ball.yVelocity;
ball.x += ball.xVelocity;
};
/**
* Draw the balls on the screen
**/
breakout.drawBalls = function() {
$.each(breakout.balls, function(index, ball) {
if (!ball.alive) {
return;
}
var context = breakout.gameBoard.context;
context.fillStyle = "#22ee22";
context.beginPath();
context.arc(ball.x, ball.y, ball.radius, 0, Math.PI * 2, true);
context.closePath();
context.fill();
});
};
/**
* Naive check to see if we have a collision
**/
breakout.isCollision = function(x, y, radius, rect) {
return ((x - radius <= rect.x + rect.width) && (x + radius >= rect.x) &&
(y - radius <= rect.y + rect.height) && (y + radius >= rect.y));
};
/**
* Check for a collision on top or bottom fo a rect
* You must already know that you are going to have a collision
**/
breakout.isVerticalCollision = function(x, y, radius, rect) {
//If we have a collision, but we are "already" in collision for the x range then we know its a vertical collision
return ((x - radius <= rect.x + rect.width) && (x + radius >= rect.x));
};
/**
* Setup the code to listen for mouse events
**/
breakout.setupMouse = function() {
$(breakout.gameBoard.canvas).click(breakout.mouseClicked);
};
breakout.mouseClicked = function(event) {
if (breakout.newGameButton && breakout.newGameButton.visible) {
if (breakout.isCollision(event.offsetX, event.offsetY, 2, breakout.newGameButton)) {
breakout.newGameButton.visible = false;
breakout.newGame();
}
}
};
/**
* Setup the code to listen to the keyboard
**/
breakout.setupKeyboard = function() {
$(document).keydown(breakout.keyPressed);
$(document).keyup(breakout.keyReleased);
};
breakout.keyPressed = function(event) {
if (event.keyCode == LEFT_KEY) {
breakout.paddle.direction = -PADDLE_SPEED;
} else if(event.keyCode == RIGHT_KEY) {
breakout.paddle.direction = PADDLE_SPEED;
} else if(event.keyCode == SPACE_BAR) {
breakout.unstageBalls();
}
};
breakout.keyReleased = function(event) {
if (event.keyCode == LEFT_KEY || event.keyCode == RIGHT_KEY) {
breakout.paddle.direction = 0;
}
};
|
4229c2068fed8b294b173ca23b493f8da2f9b269
|
[
"JavaScript"
] | 1
|
JavaScript
|
emilam/CanvasBreakout
|
544a26d4192ac95b76aeac914569398759242482
|
7d8c0d08a5c9072267041431c6f01c34922f9c9c
|
refs/heads/master
|
<file_sep># -*- coding: spec -*-
# This is a comparison of DSL syntax to what is generated
import six
import nose
import nose.tools
from sure import expect, this
import json
from noseOfYeti.tokeniser.support import noy_sup_setUp
from unittest import TestCase
import nose
import pkg_resources
import python_jsonschema_objects as pjs
describe TestCase, 'markdown extraction':
before_each:
md = pkg_resources.resource_filename('python_jsonschema_objects',
'../README.md')
self.examples = pjs.markdown_support.extract_code_blocks(md)
self.examples = {json.loads(v)['title']: json.loads(v) for v in self.examples['schema']}
self.example = self.examples['Example Schema']
it 'loads schema files':
self.examples.should.have.key('Other')
describe 'ObjectBuilder':
it 'should load memory: references':
builder = pjs.ObjectBuilder(self.examples['Other'], resolved=self.examples)
builder.should.be.ok
builder.validate.when.called_with({'MyAddress': 1234}).should.throw(pjs.ValidationError)
builder.validate.when.called_with({'MyAddress': '1234'}).should_not.throw(pjs.ValidationError)
it 'should be able to read an object':
for nm, ex in six.iteritems(self.examples):
builder = pjs.ObjectBuilder(ex, resolved=self.examples)
builder.should.be.ok
context "oneOf":
before_each:
builder = pjs.ObjectBuilder(self.examples['OneOf'], resolved=self.examples)
builder.should.be.ok
self.OneOf = builder.classes['Oneof']
it 'should validate against any of the provided schemas':
self.OneOf.from_json.when.called_with('{"MyData": "an address"}').should_not.throw()
self.OneOf.from_json.when.called_with('{"MyData": 1234}').should_not.throw()
it 'should fail to validate when given something that does not match':
self.OneOf.from_json.when.called_with(
'{"MyData": 1234.234}'
).should.throw(pjs.ValidationError)
context "additionalProperties":
before_each:
builder = pjs.ObjectBuilder(self.examples['Example Schema'], resolved=self.examples)
builder.should.be.ok
self.Person = builder.classes['ExampleSchema']
builder = pjs.ObjectBuilder(self.examples['Other'], resolved=self.examples)
builder.should.be.ok
self.Other = builder.classes['Other']
it 'should allow additionalProperties by default':
def set_attribute(object):
object.randomAttribute = 4
person = self.Person()
set_attribute.when.called_with(person).should_not.throw(Exception)
int(person.randomAttribute).should.equal(4)
it 'should be oky with additionalProperties == True':
builder = pjs.ObjectBuilder(self.examples['AddlPropsAllowed'], resolved=self.examples)
builder.should.be.ok
test = builder.classes['Addlpropsallowed']()
test.randomAttribute = 40
it 'should still raise errors when accessing undefined attributes':
person = self.Person()
#person.should_not.have.property('randomAttribute')
it 'should not allow undefined attributes if false':
def set_attribute(object):
object.randomAttribute = 4
other = self.Other()
set_attribute.when.called_with(other).should.throw(pjs.ValidationError)
context 'PersonExample':
before_each:
self.builder = pjs.ObjectBuilder(self.example, resolved=self.examples )
namespace = self.builder.build_classes()
self.Person = namespace.ExampleSchema
it 'should allow empty objects':
person = self.Person()
person.should.be.ok
it 'should allow attributes to be given':
person = self.Person(firstName="James",
lastName="Bond", age=35)
str(person.firstName).should.equal("James")
str(person.lastName).should.equal("Bond")
int(person.age).should.equal(35)
person.should.be.ok
it 'should validate when decoding from json':
self.Person.from_json.when.called_with(
'{"firstName":"James"}'
).should.throw(pjs.ValidationError)
it 'should validate enumerations':
person = self.Person()
def set_gender(gender):
person.gender = gender
set_gender.when.called_with("robot").should.throw(pjs.ValidationError)
set_gender.when.called_with("male").should_not.throw(pjs.ValidationError)
set_gender.when.called_with("female").should_not.throw(pjs.ValidationError)
it 'should validate mixed-type enumerations':
person = self.Person()
def set_status(status):
person.deceased = status
set_status.when.called_with("robot").should.throw(pjs.ValidationError)
set_status.when.called_with("yes").should_not.throw(pjs.ValidationError)
set_status.when.called_with("no").should_not.throw(pjs.ValidationError)
set_status.when.called_with(1).should_not.throw(pjs.ValidationError)
set_status.when.called_with(2).should.throw(pjs.ValidationError)
set_status.when.called_with(2.3).should.throw(pjs.ValidationError)
it 'should allow non-required attributes to be missing':
person = self.Person(firstName="James",
lastName="Bond")
person.should.be.ok
str(person.firstName).should.equal("James")
str(person.lastName).should.equal("Bond")
it 'should not allow required attributes to be missing':
person = self.Person(firstName="James")
person.validate.when.called_with().should.throw(
pjs.ValidationError
)
it 'should validate minimum age':
self.Person.when.called_with(
firstName="James", lastName="Bond",
age=-10).should.throw(pjs.ValidationError)
person = self.Person(firstName="James",
lastName="Bond")
def setage(x):
person.age = x
setage.when.called_with(-1).should.throw(pjs.ValidationError)
it 'should validate before serializing':
person = self.Person(firstName="James",
lastName="Bond")
person._properties['age'] = -1
person.serialize.when.called_with().should.throw(pjs.ValidationError)
it 'should remove null values when serializing':
person = self.Person(firstName="James",
lastName="Bond")
json_str = person.serialize()
json.loads(json_str).should_not.have.key('age')
it 'should serialize lists':
person = self.Person(
firstName="James",
lastName="Bond",
dogs=["Lassie", "Bobo"]
)
json_str = person.serialize()
json.loads(json_str).should.equal(
{
'firstName': "James",
'lastName': "Bond",
'dogs': ["Lassie", "Bobo"]
}
)
it 'should transform into dictionaries recursively"':
pdict = dict(
firstName="James",
lastName="Bond",
dogs=["Lassie", "Bobo"]
)
person = self.Person( **pdict)
person.as_dict().should.equal(pdict)
<file_sep>Markdown==2.4
inflection==0.2.0
jsonpatch==1.3
jsonpointer==1.2
jsonschema==2.3.0
pandocfilters==1.2
six==1.5.2
<file_sep># -*- coding: spec -*-
import nose
import nose.tools
from sure import expect, this
import pkg_resources
import json
from noseOfYeti.tokeniser.support import noy_sup_setUp
from unittest import TestCase
import nose
import python_jsonschema_objects as pjs
from python_jsonschema_objects.validators import ValidationError, ArrayValidator
md = pkg_resources.resource_filename('python_jsonschema_objects',
'../README.md')
examples = pjs.markdown_support.extract_code_blocks(md)
example = json.loads(examples['schema'][0])
builder = pjs.ObjectBuilder(example)
Person = builder.build_classes().ExampleSchema
describe TestCase, 'ArrayValidator':
before_each:
pass
context '#create':
it 'should support empty validations':
ArrayValidator.create.when.called_with('hello').should_not.throw()
context 'item validations':
it 'should support dictionaries as item validators':
ArrayValidator.create.when.called_with(
'hello',
item_constraint={'type': 'string'}
).should_not.throw()
it 'should support tuples as item validators':
ArrayValidator.create.when.called_with(
'hello',
item_constraint=[{'type': 'string'}, {'type': 'string'}]
).should_not.throw()
it 'should throw an error if item validators are not classes or dicts':
ArrayValidator.create.when.called_with(
'hello',
item_constraint=['winner']
).should.throw(TypeError)
context 'validate':
it 'should validate basic array types':
validator = ArrayValidator.create(
'test',
item_constraint={'type': 'number'}
)
instance = validator([1,2,3,4])
instance.validate.when.called_with().should_not.throw(ValidationError)
instance = validator([1, 2, "Hello"])
instance.validate.when.called_with().should.throw(ValidationError)
it 'should validate basic tuple types':
validator = ArrayValidator.create(
'test',
item_constraint=[{'type': 'number'}, {'type': 'number'}]
)
instance = validator([1,2,3,4])
instance.validate.when.called_with().should_not.throw(ValidationError)
instance = validator([1, "Hello"])
instance.validate.when.called_with().should.throw(ValidationError)
it 'should validate arrays with object types':
validator = ArrayValidator.create(
'test',
item_constraint=Person
)
instance = validator([{'firstName': 'winner', 'lastName': 'Smith'} ])
instance.validate.when.called_with().should_not.throw(ValidationError)
instance = validator([{'firstName': 'winner', 'lastName': 'Dinosaur'}, {'firstName': 'BadMan'} ])
instance.validate.when.called_with().should.throw(ValidationError)
it 'should validate tuples with mixed types':
validator = ArrayValidator.create(
'test',
item_constraint=[Person, {'type': 'number'}]
)
instance = validator([{'firstName': 'winner', 'lastName': 'Dinosaur'}, 'fried' ])
instance.validate.when.called_with().should.throw(ValidationError)
instance = validator([{'firstName': 'winner', 'lastName': 'Dinosaur'}, 12324 ])
instance.validate.when.called_with().should_not.throw(ValidationError)
it 'should validate nested arrays':
validator = ArrayValidator.create(
'test',
item_constraint={'type': 'array', 'items': {'type': 'integer'}}
)
instance = validator([[1,2,4,5], [1,2,4]])
instance.validate.when.called_with().should_not.throw(ValidationError)
instance = validator([[1,2,'h',5], [1,2,4]])
instance.validate.when.called_with().should.throw(ValidationError)
instance = validator([[1,2,'h',5], [1,2,'4']])
instance.validate.when.called_with().should.throw(ValidationError)
it 'should validate length':
validator = ArrayValidator.create(
'test',
minItems=1,
maxItems=3
)
instance = validator(range(1))
instance.validate.when.called_with().should_not.throw(ValidationError)
instance = validator(range(2))
instance.validate.when.called_with().should_not.throw(ValidationError)
instance = validator(range(3))
instance.validate.when.called_with().should_not.throw(ValidationError)
instance = validator(range(4))
instance.validate.when.called_with().should.throw(ValidationError)
instance = validator([])
instance.validate.when.called_with().should.throw(ValidationError)
it 'should validate uniqueness':
validator = ArrayValidator.create(
'test',
uniqueItems=True
)
instance = validator([])
instance.validate.when.called_with().should_not.throw(ValidationError)
instance = validator([1,2,3,4])
instance.validate.when.called_with().should_not.throw(ValidationError)
instance = validator([1,2,2,4])
instance.validate.when.called_with().should.throw(ValidationError, 'uniqueness')
<file_sep>
[tox]
envlist = py27, py34
[testenv]
install_command = pip install --no-compile {opts} {packages}
commands = coverage run {envbindir}/nosetests --with-noy --rednose -s {posargs}
coverage html --omit=*test* --include=*python_jsonschema_objects*
deps =
-rrequirements.txt
nose
coverage
rednose
https://github.com/cwacek/sure/tarball/master
https://github.com/delfick/nose-of-yeti/tarball/master
|
5d6295543ca5af9000152f59e773cfaffbcb1df6
|
[
"Python",
"Text",
"INI"
] | 4
|
Python
|
Fitblip/python-jsonschema-objects
|
bec0c0506b4866f6c24d63e41941e05067bf3bcf
|
2d6549c4dcdb39d228086cc081c699eb34a7bd9d
|
refs/heads/master
|
<file_sep>../DanQ/predict-X.py<file_sep>#!/usr/bin/env python
from Bio import SeqIO
import click
from copy import deepcopy
import numpy as np
import pandas as pd
import torch
from torch.utils.data import DataLoader, TensorDataset
from models.danq import DanQ
from utils.io import parse_fasta_file, write
from utils.data import one_hot_encode, reverse_complement
CONTEXT_SETTINGS = {
"help_option_names": ["-h", "--help"],
}
@click.command(context_settings=CONTEXT_SETTINGS, no_args_is_help=True)
@click.option(
"-f", "--fasta-file",
help="FASTA file with sequences.",
metavar="FILENAME",
required=True
)
@click.option(
"-o", "--out-file",
help="Output file.",
metavar="FILENAME",
required=True
)
@click.option(
"-r", "--rev-complement",
help="Predict on reverse complement sequences.",
is_flag=True,
default=False
)
@click.option(
"-s", "--state-dict",
help="Model state dict to use.",
metavar="FILENAME",
required=True
)
@click.option(
"-t", "--threads",
default=1,
help="Number of CPU threads to use.",
show_default=True
)
def predict(fasta_file, out_file, state_dict, rev_complement=False, threads=1):
_predict(fasta_file, out_file, state_dict, rev_complement, threads)
def _predict(fasta_file, out_file, state_dict, rev_complement=False, threads=1):
# Sequences
sequences = []
for seq_record in parse_fasta_file(fasta_file):
sequences.append((seq_record.id, str(seq_record.seq).upper()))
df = pd.DataFrame(sequences, columns=["Id", "Sequence"])
# One-hot encode
encoded_sequences = []
for seq in df["Sequence"]:
encoded_sequences.append(one_hot_encode(seq))
encoded_sequences = np.array(encoded_sequences)
# TensorDataset
ix = np.array([[i] for i in range(len(sequences))])
dataset = TensorDataset(torch.Tensor(encoded_sequences), torch.Tensor(ix))
if rev_complement:
encoded_sequences_rc = np.array(reverse_complement(encoded_sequences))
dataset_rc = TensorDataset(
torch.Tensor(encoded_sequences_rc), torch.Tensor(ix)
)
# DataLoader
parameters = dict(batch_size=64, num_workers=threads)
dataloader = DataLoader(dataset, **parameters)
if rev_complement:
dataloader_rc = DataLoader(dataset_rc, **parameters)
# Predict
sequence_length = len(sequences[0][1])
predictions = __predict(sequence_length, 1, dataloader, state_dict)
if rev_complement:
predictions_rc = __predict(
sequence_length, 1, dataloader_rc, state_dict
)
else:
predictions_rc = np.empty((len(predictions)))
predictions_rc[:] = np.NaN
# Save predictions
zipped_predictions = np.array(
list(zip(df["Id"].to_list(), predictions, predictions_rc[:]))
)
df = pd.DataFrame(zipped_predictions, columns=["Id", "Fwd", "Rev"])
df.to_csv(out_file, compression="gzip", index=False)
def __predict(sequence_length, n_features, dataloader, state_dict):
predictions = None
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
model = DanQ(sequence_length, n_features).to(device)
model.load_state_dict(torch.load(state_dict))
model.eval() # set the model in evaluation mode
for seqs, labels in dataloader:
x = seqs.to(device) # shape = (batch_size, 4, 200)
labels = labels.to(device)
with torch.no_grad():
# Forward pass
outputs = model(x)
# Save predictions
if predictions is None:
predictions = outputs.data.cpu().numpy()
else:
predictions = np.append(
predictions, outputs.data.cpu().numpy(), axis=0
)
return(predictions.flatten())
if __name__ == "__main__":
predict()<file_sep>#!/bin/bash
python get-encode.py --genome mm10 --feature accessibility
python get-encode.py --genome mm10 --feature tf
# Extract uniform DNase-seq regions of length 200 bp
if [ ! -f ./DNase-seq.200bp.bed ]; then
cut -f 1,2,4,6 ./DNase-seq.bed | awk '{print($1"\t"$2+$4-100"\t"$2+$4+100"\t"$3);}' | \
LC_ALL=C sort --parallel=8 -T ./ -k1,1 -k2,2n > ./DNase-seq.200bp.bed
fi
# Extract uniform CTCF ChIP-seq regions of length 200 bp centered at the peak max
if [ ! -f ./TF.CTCF.200bp.bed ]; then
cut -f 1,2,4,6 ./TF.CTCF.bed | awk '{print($1"\t"$2+$4-100"\t"$2+$4+100"\t"$3);}' | \
LC_ALL=C sort --parallel=8 -T ./ -k1,1 -k2,2n > ./TF.CTCF.200bp.bed
fi
<file_sep># CTCF-CBrown
<file_sep>import numpy as np
import os
from sklearn.metrics import average_precision_score
from sklearn.metrics import matthews_corrcoef
from sklearn.metrics import roc_auc_score
import torch
from utils.io import write
metrics = dict(
roc_auc=roc_auc_score,
average_precision=average_precision_score,
m_corr_coef=matthews_corrcoef
)
class Predictor(object):
"""
Adapted from:
https://selene.flatironinstitute.org/selene.html#trainmodel
This class ties together the various objects and methods needed to train
and validate a model.
{Predictor} saves a checkpoint model after every epoch as well as a
best-performing model to `output_dir`.
{Predictor} also outputs the training and validation losses to monitor if the
model is still improving of if there are signs of overfitting, etc.
Parameters
----------
model : `torch.nn.Module`
The model to use.
generator : `torch.utils.data.DataLoader`
The sequence generator.
output_dir : str, optional
Default is current working directory. The output directory to save
plots and predictions.
verbose: bool, optional
Default is `False`.
Attributes
----------
model : `torch.nn.Module`
The model to use.
generator : `torch.utils.data.DataLoader`
The generator of sequences.
metrics : dict
A dictionary that maps metric names (`str`) to metric functions. By
default, this contains `"roc_auc"`, which maps to
`sklearn.metrics.roc_auc_score`, `"average_precision"`, which maps to
`sklearn.metrics.average_precision_score`, and `"m_corr_coef"`, which
maps to `sklearn.metrics.matthews_corrcoef`.
output_dir : str
The directory to save predictions and plots.
"""
def __init__(
self, architecture, state_dict, generator, output_dir="./",
verbose=False
):
"""
Constructs a new `Predictor` object.
"""
self.generator = generator
self.output_dir = output_dir
os.makedirs(self.output_dir, exist_ok=True)
self._verbose = verbose
# Load model
model_file
def __initialize_model(self, architecture, lr=0.003):
"""
Adapted from:
https://selene.flatironinstitute.org/utils.html#initialize-model
Initialize model (and associated criterion and optimizer)
Parameters
----------
architecture : str
Available model architectures: `danq`.
lr : float
Learning rate.
"""
if architecture == "danq":
from models.danq import DanQ
self.model = model_class(data.shape[-1], 1).to(self._device)
self.criterion = get_criterion()
self.optimizer = get_optimizer(self.model.parameters(), lr)<file_sep>from .performance_metrics import PerformanceMetrics
<file_sep>import json
import matplotlib.pyplot as plt
import numpy as np
import os
import pandas as pd
import seaborn as sns
import shutil
from sklearn.metrics import (
average_precision_score, matthews_corrcoef, precision_recall_curve,
roc_curve, roc_auc_score
)
from time import time
import torch
from utils.io import write
from utils.pytorchtools import EarlyStopping
class Trainer(object):
"""
Adapted from:
https://selene.flatironinstitute.org/selene.html#trainmodel
This class ties together the various objects and methods needed to train
and validate a model.
{Trainer} saves a checkpoint model after every epoch as well as a
best-performing model to `output_dir`.
{Trainer} also outputs the training and validation losses to monitor if the
model is still improving of if there are signs of overfitting, etc.
Parameters
----------
architecture : str
The model architecture to train.
feature : str
The name of the feature to train.
generators : dict
A dictionary that maps the `train`, `validation` and `test` steps
to their corresponding `torch.utils.data.DataLoader` instances.
lr: float, optional
Default is 0.003. Sets the learning rate.
max_epochs : int, optional
Default is 100. The maximum number of epochs to iterate over.
output_dir : str, optional
Default is current working directory. The output directory to save
model checkpoints and logs in.
verbose: bool, optional
Default is `False`.
Attributes
----------
architecture : str
The name of the model architecture.
criterion : `torch.nn._Loss`
The loss function to optimize.
feature : str
The name of the feature.
generators : dict
The `torch.utils.data.DataLoader` for the `train`, `validation` and
`test` sets.
lr : float
The learning rate.
max_epochs : int
The maximum number of epochs to iterate over.
model : `torch.nn.Module`
The model to train.
optimizer : `torch.optim.Optimizer`
The optimizer to minimize loss with.
output_dir : str
The directory to save the model and files.
state_dict : str
Path to the model's `state_dict`.
"""
def __init__(
self, architecture, feature, generators, lr=0.003, max_epochs=100,
output_dir="./", verbose=False
):
"""
Constructs a new `Trainer` object.
"""
self.architecture = architecture
self.feature = feature
self.generators = generators
self.lr = lr
self.max_epochs = max_epochs
self.output_dir = output_dir
os.makedirs(self.output_dir, exist_ok=True)
self._verbose = verbose
# CUDA
self._use_cuda = torch.cuda.is_available()
self._device = torch.device("cuda:0" if self._use_cuda else "cpu")
# Losses
self.train_losses = []
self.validation_losses = []
# Predictions
self.predictions = None
self.labels = None
# Metrics
self.metrics = {}
# Initialize model
self.__initialize_model(architecture, lr)
if self._verbose:
write(
None,
"Training parameters: batch size {0}, "
"maximum number of epochs: {1}, "
"use cuda: {2}".format(
generators["train"].__dict__["batch_size"],
self.max_epochs,
self._use_cuda
)
)
def __initialize_model(self, architecture, lr=0.003):
"""
Adapted from:
https://selene.flatironinstitute.org/utils.html#initialize-model
Initialize model (and associated criterion and optimizer)
Parameters
----------
architecture : str
Available model architectures: `danq`.
lr : float
Learning rate.
"""
if architecture == "danq":
from models.danq import (
DanQ as model_class, get_criterion, get_optimizer
)
data, labels = next(iter(self.generators["train"]))
self.model = model_class(data.shape[-1], 1).to(self._device)
self.criterion = get_criterion()
self.optimizer = get_optimizer(self.model.parameters(), lr)
self.state_dict = os.path.join(self.output_dir, "model.pth.tar")
def train_and_validate(self):
"""Trains the model and measures validation performance."""
early_stopping = EarlyStopping(20, True, path=self.state_dict)
epoch_len = len(str(self.max_epochs))
for epoch in range(1, self.max_epochs + 1):
# Train
t_time = time()
t_losses = self.train()
t_loss = np.average(t_losses)
t_time = time() - t_time
self.train_losses.append(t_losses)
# Validate
v_time = time()
v_losses = self.validate()
v_loss = np.average(v_losses)
v_time = time() - v_time
self.validation_losses.append(v_losses)
if self._verbose:
write(
None,
(f'[{epoch:>{epoch_len}}/{self.max_epochs:>{epoch_len}}] '
+f'train_loss: {t_loss:.5f} ({t_time:.3f} sec) '
+f'valid_loss: {v_loss:.5f} ({v_time:.3f} sec)')
)
# EarlyStopping needs to check if the validation loss has decresed,
# and if it has, it will save the current model.
early_stopping(v_loss, self.model)
if early_stopping.early_stop:
if self._verbose:
write(None, "Stop!!!")
break
def train(self):
"""
Returns
-------
list
All losses.
"""
self.model.train() # set the model in train mode
losses = []
for inputs, targets in self.generators["train"]:
inputs = inputs.to(self._device)
targets = targets.to(self._device)
# Zero existing gradients so they don't add up
self.optimizer.zero_grad()
# Forward pass
outputs = self.model(inputs)
loss = self.criterion(outputs, targets)
# Back-propagate and optimize
loss.backward()
self.optimizer.step()
losses.append(loss.item())
return(losses)
def validate(self):
"""
Returns
-------
list
All losses.
"""
self.model.eval() # set the model in evaluation mode
losses = []
for inputs, targets in self.generators["validation"]:
inputs = inputs.to(self._device)
targets = targets.to(self._device)
with torch.no_grad():
# Forward pass
outputs = self.model(inputs)
loss = self.criterion(outputs, targets)
losses.append(loss.item())
return(losses)
def visualize_loss(self):
# Losses to DataFrame
data = []
for i in range(len(self.train_losses)):
for j in range(len(self.train_losses[i])):
data.append(["train", i+1, j+1, self.train_losses[i][j]])
for i in range(len(self.validation_losses)):
for j in range(len(self.validation_losses[i])):
data.append(
["validation", i+1, j+1, self.validation_losses[i][j]]
)
df = pd.DataFrame(data, columns=["Mode", "Epoch", "Batch", "Loss"])
# Seaborn aesthetics
sns.set_context("paper", font_scale=1.5, rc={"lines.linewidth": 1.5})
sns.set_palette(sns.color_palette(["#1965B0", "#DC050C"]))
# Plot losses
g = sns.lineplot(x="Epoch", y="Loss", hue="Mode", data=df)
# Plot best epoch (i.e. lowest validation loss)
best_epoch = df[(df.Mode == "validation")][["Epoch", "Loss"]]\
.groupby("Epoch").mean().idxmin()
g.axvline(
int(best_epoch), linestyle=":", color="dimgray", label="best epoch"
)
# Plot legend
g.legend_.remove()
handles, labels = g.axes.get_legend_handles_labels()
plt.legend(handles, labels, frameon=False)
# Fine-tune plot
g.set(xlim=(0, int(df["Epoch"].max()) + 1))
g.set(ylim=(0, 0.5))
# Remove spines
sns.despine()
# Save & close
fig = g.get_figure()
fig.savefig(os.path.join(self.output_dir, "loss.png"))
plt.close(fig)
def test(self):
# Load the best model
self.model.load_state_dict(torch.load(self.state_dict))
self.model.eval() # set the model in evaluation mode
for inputs, targets in self.generators["test"]:
inputs = inputs.to(self._device)
targets = targets.to(self._device)
with torch.no_grad():
# Forward pass
outputs = torch.sigmoid(self.model(inputs))
if self.predictions is None and self.labels is None:
self.predictions = outputs.data.cpu().numpy()
self.labels = targets.data.cpu().numpy()
else:
self.predictions = np.append(
self.predictions, outputs.data.cpu().numpy(), axis=0
)
self.labels = np.append(
self.labels, targets.data.cpu().numpy(), axis=0
)
def compute_performance_metrics(self):
# Metrics
metrics = ["AUCPR", "AUCROC", "MCC"]
# Flatten predictions/labels
predictions = self.predictions.flatten()
labels = self.labels.flatten()
# Losses to DataFrame
for metric in metrics:
if metric == "AUCPR":
score = average_precision_score(labels, predictions)
self.metrics.setdefault(metric, score)
prec, recall, _ = precision_recall_curve(labels, predictions)
# i.e. precision = 0, recall = 1
prec = np.insert(prec, 0, 0., axis=0)
recall = np.insert(recall, 0, 1., axis=0)
data = list(zip(recall, prec))
self.__visualize_metric(data, ["Recall", "Precision"], metric)
elif metric == "AUCROC":
score = roc_auc_score(labels, predictions)
self.metrics.setdefault(metric, score)
fpr, tpr, _ = roc_curve(labels, predictions)
data = list(zip(fpr, tpr))
self.__visualize_metric(data, ["Fpr", "Tpr"], metric)
elif metric == "MCC":
score = matthews_corrcoef(labels, np.rint(predictions))
self.metrics.setdefault(metric, score)
if self._verbose:
write(
None,
(f'Final performance metrics: '
+f'AUCROC: {self.metrics["AUCROC"]:.5f}, '
+f'AUCPR: {self.metrics["AUCPR"]:.5f}, '
+f'MCC: {self.metrics["MCC"]:.5f}')
)
def __visualize_metric(self, data, labels, metric):
# Metric to DataFrame
df = pd.DataFrame(data, columns=labels)
# Seaborn aesthetics
sns.set_context("paper", font_scale=1.5, rc={"lines.linewidth": 1.5})
sns.set_palette(sns.color_palette(["#1965B0"]))
# Plot metric
kwargs = dict(estimator=None, ci=None)
g = sns.lineplot(x=labels[0], y=labels[1], data=df, **kwargs)
# Add metric score
kwargs = dict(horizontalalignment="center", verticalalignment="center")
plt.text(.5, 0, "%s = %.5f" % (metric, self.metrics[metric]), **kwargs)
# Remove spines
sns.despine()
# Save & close
fig = g.get_figure()
fig.savefig(os.path.join(self.output_dir, "%s.png" % metric))
plt.close(fig)
def save(self):
# Remove non-serializable keys
trainer_dict = {}
for k, v in self.__dict__.items():
try:
json.dumps(v)
is_JSON_serializable = True
except:
is_JSON_serializable = False
if is_JSON_serializable:
trainer_dict.setdefault(k, v)
else:
trainer_dict.setdefault(k, None)
# Write JSON
json_file = os.path.join(self.output_dir, "model.json")
fh = open(json_file, "w")
json.dump(
trainer_dict, fh, sort_keys=True, indent=4, separators=(",", ": ")
)
fh.close()
write(None, "`Trainer` object saved!\n%s\n" % json_file)
<file_sep>import numpy as np
import os
import shutil
from sklearn.metrics import average_precision_score
from sklearn.metrics import matthews_corrcoef
from sklearn.metrics import roc_auc_score
from time import strftime
from time import time
import torch
# from torch.autograd import Variable
from torch.optim.lr_scheduler import ReduceLROnPlateau
from utils.io import write
from .utils import PerformanceMetrics
# torch.backends.cudnn.benchmark = True
# Default metrics
default_metrics = dict(
roc_auc=roc_auc_score,
average_precision=average_precision_score,
m_corr_coef=matthews_corrcoef
)
class Trainer(object):
"""
Adapted from:
https://selene.flatironinstitute.org/selene.html#trainmodel
This class ties together the various objects and methods needed to train
and validate a model.
{TrainModel} saves a checkpoint model (overwriting it after
`save_checkpoint_every_n_steps`) as well as a best-performing model
(overwriting it after `report_stats_every_n_steps` if the latest validation
performance is better than the previous best-performing model) to
`output_dir`.
{TrainModel} also outputs 2 files that can be used to monitor training
as Selene runs: `selene_sdk.train_model.train.txt` (training loss) and
`selene_sdk.train_model.validation.txt` (validation loss & average
ROC AUC). The columns in these files can be used to quickly visualize
training history (e.g. you can use `matplotlib`, `plt.plot(auc_list)`)
and see, for example, whether the model is still improving, if there
are signs of overfitting, etc.
Parameters
----------
model : torch.nn.Module
The model to train.
criterion : torch.nn._Loss
The loss function to optimize.
optimizer : torch.optim.Optimizer
The optimizer to minimize loss with.
feature_index : dict
A dictionary that maps feature indices (`int`) to names (`int`).
generators : dict
A dictionary that maps the `train`, `validation` and `test` steps
to `torch.utils.data.DataLoader` instances.
cpu_n_threads : int, optional
Default is 1. Sets the number of OpenMP threads used for CPU
operations.
max_steps : int, optional
Default is 10000. The maximum number of mini-batches to iterate
over.
output_dir : str, optional
Default is current working directory. The output directory to save
model checkpoints and logs in.
report_stats_every_n_steps : int
The frequency with which to report summary statistics. You can set
this value to be equivalent to a training epoch
(`n_steps * batch_size`) being the total number of samples seen by
the model so far. Selene evaluates the model on the validation
dataset every `report_stats_every_n_steps` and, if the model obtains
the best performance so far (based on the user-specified loss function),
Selene saves the model state to a file called `best_model.pth.tar` in
`output_dir`.
save_checkpoint_every_n_steps : int or None, optional
Default is 1000. If None, set to the same value as
`report_stats_every_n_steps`
verbose: bool, optional
Default is `False`.
Attributes
----------
feature_index : dict
The names of each feature.
generator : torch.utils.data.DataLoader
The generator for the `train`, `validation` and `test` sets.
loss_criterion : torch.nn._Loss
The loss function to optimize.
model : torch.nn.Module
The model to train.
optimizer_class : torch.optim.Optimizer
The optimizer to minimize loss with.
max_steps : int
The maximum number of mini-batches to iterate over.
metrics : dict
A dictionary that maps metric names (`str`) to metric functions. By
default, this contains `"roc_auc"`, which maps to
`sklearn.metrics.roc_auc_score`, `"average_precision"`, which maps to
`sklearn.metrics.average_precision_score`, and `"m_corr_coef"`, which
maps to `sklearn.metrics.matthews_corrcoef`.
output_dir : str
The directory to save model checkpoints and logs.
nth_step_report_stats : int
The frequency with which to report summary statistics.
nth_step_save_checkpoint : int
The frequency with which to save a model checkpoint.
"""
def __init__(
self,
model,
criterion,
optimizer,
feature_index,
generators,
cpu_n_threads=1,
max_steps=10000,
metrics=default_metrics,
output_dir="./",
report_stats_every_n_steps=1000,
save_checkpoint_every_n_steps=1000,
verbose=False
):
"""
Constructs a new `TrainModel` object.
"""
torch.set_num_threads(cpu_n_threads)
self.model = model
self.criterion = criterion
self.optimizer = optimizer
self.generators = generators
# Optional
self.max_steps = max_steps
self.nth_step_report_stats = report_stats_every_n_steps
self.nth_step_save_checkpoint = None
if not save_checkpoint_every_n_steps:
self.nth_step_save_checkpoint = report_stats_every_n_steps
else:
self.nth_step_save_checkpoint = save_checkpoint_every_n_steps
os.makedirs(output_dir, exist_ok=True)
self.output_dir = output_dir
self.verbose = verbose
# CUDA
self.use_cuda = torch.cuda.is_available()
if self.use_cuda:
self.device = torch.device("cuda:0")
self.model.cuda()
self.criterion.cuda()
else:
self.device = torch.device("cpu")
# Metrics
self._validation_metrics = PerformanceMetrics(
feature_index, 1, metrics
)
self._test_metrics = PerformanceMetrics(
feature_index, 1, metrics
)
# Extra
self._min_loss = float("inf")
self._targets = dict(validation=[], test=[])
for key in self._targets:
for inputs, targets in self.generators[key]:
self._targets[key] += targets
self._targets[key] = np.expand_dims(
np.array(self._targets[key]), axis=1
)
if self.verbose:
write(
None,
"Training parameters set: batch size {0}, "
"number of steps per epoch: {1}, "
"maximum number of steps: {2}, "
"use cuda: {3}".format(
generators["train"].__dict__["batch_size"],
self.nth_step_report_stats,
self.max_steps,
self.use_cuda
)
)
# def _create_validation_set(self, n_samples=None):
# """
# Generates the set of validation examples.
# Parameters
# ----------
# n_samples : int or None, optional
# Default is `None`. The size of the validation set. If `None`,
# will use all validation examples in the sampler.
# """
# logger.info("Creating validation dataset.")
# t_i = time()
# # self._validation_data, self._all_validation_targets = \
# # self.sampler.get_validation_set(
# # self.batch_size, n_samples=n_samples)
# t_f = time()
# logger.info(("{0} s to load {1} validation examples ({2} validation "
# "batches) to evaluate after each training step.").format(
# t_f - t_i,
# len(self._validation_data) * self.batch_size,
# len(self._validation_data)))
# def create_test_set(self):
# """
# Loads the set of test samples.
# We do not create the test set in the `TrainModel` object until
# this method is called, so that we avoid having to load it into
# memory until the model has been trained and is ready to be
# evaluated.
# """
# logger.info("Creating test dataset.")
# t_i = time()
# # self._test_data, self._all_test_targets = \
# # self.sampler.get_test_set(
# # self.batch_size, n_samples=self._n_test_samples)
# t_f = time()
# logger.info(("{0} s to load {1} test examples ({2} test batches) "
# "to evaluate after all training steps.").format(
# t_f - t_i,
# len(self._test_data) * self.batch_size,
# len(self._test_data)))
# np.savez_compressed(
# os.path.join(self.output_dir, "test_targets.npz"),
# data=self._all_test_targets)
def train_and_validate(self):
"""
Trains the model and measures validation performance.
"""
# training_times = []
# min_loss = self._min_loss
# scheduler = ReduceLROnPlateau(
# self.optimizer, "min", patience=16, verbose=True, factor=0.8
# )
for epoch in range(10):
self.model.train() #tell model explicitly that we train
running_loss = 0.0
for seqs, labels in self.generators["train"]:
print(seqs[0][:5])
print(seqs.transpose(1, 2)[0])
exit(0)
print(labels)
x = seqs.to(self.device, dtype=torch.float) #the input here is (batch_size, 4, 200)
labels = labels.to(self.device, dtype=torch.float)
#zero the existing gradients so they don't add up
self.optimizer.zero_grad()
# Forward pass
outputs = self.model(x.transpose(1, 2))
print(outputs)
print(seqs.shape, seqs.transpose(1, 2).shape)
print(labels.shape)
print(outputs.shape)
exit(0)
loss = self.criterion(outputs, labels)
# Backward and optimize
loss.backward()
self.optimizer.step()
running_loss += loss.item()
#save training loss
# return(running_loss / len(self.generators["train"]))
print(running_loss / len(self.generators["train"]))
# # Train
# t = time()
# train_loss = self.train()
# training_times.append(time() - t)
# # Checkpoint
# checkpoint_dict = {
# "epoch": epoch,
# "arch": self.model.__class__.__name__,
# "state_dict": self.model.state_dict(),
# "min_loss": min_loss,
# "optimizer": self.optimizer.state_dict()
# }
# write(
# None,
# "Epoch {0}: train loss is `{1}`.".format(epoch, train_loss)
# )
# checkpoint_filename = "checkpoint-{0}".format(
# strftime("%m%d%H%M%S")
# )
# self.__save_checkpoint(
# checkpoint_dict, False, prefix=checkpoint_filename
# )
# write(
# None,
# "Checkpoint `{0}.pth.tar` saved.".format(checkpoint_filename)
# )
# # Validate
# valid_scores = self.validate()
# validation_loss = valid_scores["loss"]
# exit(0)
# self._train_logger.info(train_loss)
# to_log = [str(validation_loss)]
# for k in sorted(self._validation_metrics.metrics.keys()):
# if k in valid_scores and valid_scores[k]:
# to_log.append(str(valid_scores[k]))
# else:
# to_log.append("NA")
# self._validation_logger.info("\t".join(to_log))
# scheduler.step(math.ceil(validation_loss * 1000.0) / 1000.0)
# if validation_loss < min_loss:
# min_loss = validation_loss
# self.__save_checkpoint({
# "step": step,
# "arch": self.model.__class__.__name__,
# "state_dict": self.model.state_dict(),
# "min_loss": min_loss,
# "optimizer": self.optimizer.state_dict()}, True)
# logger.debug("Updating `best_model.pth.tar`")
# logger.info("training loss: {0}".format(train_loss))
# logger.info("validation loss: {0}".format(validation_loss))
# Logging training and validation on same line requires 2 parsers or more complex parser.
# Separate logging of train/validate is just a grep for validation/train and then same parser.
# self.sampler.save_dataset_to_file("train", close_filehandle=True)
def train(self):
"""
Returns
-------
float
The average loss.
"""
# self.model.train()
# acc_loss = []
# for inputs, targets in self.generators["train"]:
# inputs = inputs.to(device=self.device, dtype=torch.float)
# targets = targets.to(device=self.device, dtype=torch.float)
# self.optimizer.zero_grad()
# predictions = self.model(inputs.transpose(1, 2))
# loss = self.criterion(predictions, targets)
# loss.backward()
# self.optimizer.step()
# batch_losses.append()
# print(batch_losses)
# exit(0)
# return(sum(batch_losses) / len(self.generators["train"]))
self.model.train() #tell model explicitly that we train
running_loss = 0.0
for seqs, labels in self.generators["train"]:
print(seqs)
print(labels)
exit(0)
x = seqs.to(self.device, dtype=torch.float) #the input here is (batch_size, 4, 200)
labels = labels.to(self.device, dtype=torch.float)
#zero the existing gradients so they don't add up
self.optimizer.zero_grad()
# Forward pass
outputs = self.model(x.transpose(1, 2))
loss = self.criterion(outputs, labels)
# Backward and optimize
loss.backward()
self.optimizer.step()
running_loss += loss.item()
#save training loss
return(running_loss / len(self.generators["train"]))
def __evaluate_generator(self, generator):
"""
Makes predictions for some labeled input data.
Parameters
----------
generator : `torch.utils.data.DataLoader`
A `torch.utils.data.DataLoader` instance.
Returns
-------
tuple(float, list(numpy.ndarray))
Returns the average loss, and the list of all predictions.
"""
self.model.eval()
batch_losses = []
batch_predictions = []
for inputs, targets in generator:
inputs = inputs.to(device=self.device, dtype=torch.float)
targets = targets.to(device=self.device, dtype=torch.float)
with torch.no_grad():
predictions = self.model(inputs.transpose(1, 2))
loss = self.criterion(predictions, targets)
batch_predictions.append(predictions.data.cpu().numpy())
batch_losses.append(loss.item())
return(np.average(batch_losses), np.vstack(batch_predictions))
def validate(self):
"""
Measures model validation performance.
Returns
-------
dict
A dictionary, where keys are the names of the loss metrics,
and the values are the average value for that metric over
the validation set.
"""
validation_loss, batch_predictions = self.__evaluate_generator(
self.generators["validation"]
)
average_scores = self._validation_metrics.update(
batch_predictions, np.array(self._targets["validation"])
)
for name, score in average_scores.items():
write(None, "validation {0}: {1}".format(name, score))
average_scores["loss"] = validation_loss
return(average_scores)
# def evaluate(self):
# """
# Measures the model test performance.
# Returns
# -------
# dict
# A dictionary, where keys are the names of the loss metrics,
# and the values are the average value for that metric over
# the test set.
# """
# if self._test_data is None:
# self.create_test_set()
# average_loss, all_predictions = self._evaluate_on_data(
# self._test_data)
# average_scores = self._test_metrics.update(all_predictions,
# self._all_test_targets)
# np.savez_compressed(
# os.path.join(self.output_dir, "test_predictions.npz"),
# data=all_predictions)
# for name, score in average_scores.items():
# logger.info("test {0}: {1}".format(name, score))
# test_performance = os.path.join(
# self.output_dir, "test_performance.txt")
# feature_scores_dict = self._test_metrics.write_feature_scores_to_file(
# test_performance)
# average_scores["loss"] = average_loss
# self._test_metrics.visualize(
# all_predictions, self._all_test_targets, self.output_dir)
# return (average_scores, feature_scores_dict)
def __save_checkpoint(self, state, is_best, prefix="checkpoint"):
"""
Saves snapshot of the model state to file. Will save a checkpoint with
name `<prefix>.pth.tar` and, if this is the model's best performance
so far, will save the state to a `best_model.pth.tar` file as well.
Models are saved in the state dictionary format. This is a more stable
format compared to saving the whole model (which is another option sup-
ported by PyTorch). Note that we do save a number of additional parame-
ters in the dictionary and that the actual `model.state_dict()` is
stored in the `state_dict` key of the dictionary loaded by `torch.load`.
See: https://pytorch.org/docs/stable/notes/serialization.html for more
information about how models are saved in PyTorch.
Parameters
----------
state : dict
Information about the state of the model. Note that this is
not `model.state_dict()`, but rather, a dictionary containing
keys that can be used for continued training in Selene
_in addition_ to a key `state_dict` that contains
`model.state_dict()`.
is_best : bool
Is this the model's best performance so far?
prefix : str, optional
Default is "checkpoint". Specify the checkpoint prefix. Will append
a file extension to the end (e.g. `checkpoint.pth.tar`).
Returns
-------
None
"""
file_name = os.path.join(self.output_dir, "%s.pth.tar" % prefix)
torch.save(state, file_name)
if is_best:
best_file = os.path.join(self.output_dir, "best_model.pth.tar")
shutil.copyfile(file_name, best_file)
def initialize_model(architecture, sequence_length, lr=0.001):
"""
Adapted from:
https://selene.flatironinstitute.org/utils.html#initialize-model
Initialize model (and associated criterion, optimizer)
Parameters
----------
architecture : str
Available model architectures: `danq`, `deeperdeepsea`, `deepsea` and
`heartenn`.
sequence_length : int
Model-specific configuration
lr : float
Learning rate.
Returns
-------
tuple(torch.nn.Module, torch.nn._Loss, torch.optim)
* `torch.nn.Module` - the model architecture
* `torch.nn._Loss` - the loss function associated with the model
* `torch.optim` - the optimizer associated with the model
"""
if architecture == "danq":
from .models.danq import (
DanQ as model_class, get_criterion, get_optimizer
)
# if architecture == "deeperdeepsea":
# from .models.deeperdeepsea import (
# DeeperDeepSEA as model_class, get_loss_criterion, get_optimizer
# )
# if architecture == "deepsea":
# from .models.deepsea import (
# DeepSEA as model_class, get_loss_criterion, get_optimizer
# )
# if architecture == "heartenn":
# from .models.heartenn import (
# HeartENN as model_class, get_loss_criterion, get_optimizer
# )
model = model_class(sequence_length, 1)
# __is_lua_trained_model(model)
criterion = get_criterion()
optimizer = get_optimizer(model.parameters(), lr)
return(model, criterion, optimizer)
# def __is_lua_trained_model(model):
# if hasattr(model, "from_lua"):
# return(model.from_lua)
# from .utils.multi_model_wrapper import MultiModelWrapper
# check_model = model
# if hasattr(model, "model"):
# check_model = model.model
# elif type(model) == MultiModelWrapper and hasattr(model, "sub_models"):
# check_model = model.sub_models[0]
# setattr(model, "from_lua", False)
# setattr(check_model, "from_lua", False)
# for m in check_model.modules():
# if "Conv2d" in m.__class__.__name__:
# setattr(model, "from_lua", True)
# setattr(check_model, "from_lua", True)
# return(model.from_lua)<file_sep>import click
import numpy as np
from utils.io import write
from utils.data import get_data_loaders, get_tensor_datasets, split_data
from utils.trainer import Trainer
CONTEXT_SETTINGS = {
"help_option_names": ["-h", "--help"],
}
@click.command(context_settings=CONTEXT_SETTINGS, no_args_is_help=True)
@click.option(
"-a", "--architecture",
help="Model architecture.",
default="danq",
metavar="STRING",
show_default=True,
type=click.Choice(["danq", "deepsea"], case_sensitive=False)
)
@click.option(
"-l", "--learn-rate",
help="Learning rate.",
default=0.003,
show_default=True
)
@click.option(
"-m", "--max-epochs",
help="Max. number of epochs.",
default=100,
show_default=True
)
@click.option(
"-n", "--name",
help="Transcription factor name.",
required=True,
type=str
)
@click.option(
"--neg-sequences",
help="FASTA file with negative sequences.",
metavar="FILENAME",
required=True,
)
@click.option(
"-o", "--out-dir",
default="./",
help="Output directory.",
metavar="DIRECTORY",
show_default=True
)
@click.option(
"--pos-sequences",
help="FASTA file with positive sequences.",
metavar="FILENAME",
required=True,
)
@click.option(
"-r", "--rev-complement",
help="Train on reverse complement sequences.",
is_flag=True,
default=False
)
@click.option(
"-s", "--seed",
default=123,
help="Seed for random generation.",
show_default=True
)
@click.option(
"-t", "--threads",
default=1,
help="Number of CPU threads to use.",
show_default=True
)
@click.option(
"-v", "--verbose",
help="Verbose mode.",
is_flag=True,
default=False
)
def train(
name, neg_sequences, pos_sequences, architecture="danq", learn_rate=0.003,
max_epochs=100, out_dir="./", rev_complement=False, seed=123, threads=1,
verbose=False
):
"""Train a model."""
if verbose:
write(None, "*** Loading data...")
# Data splits
data_splits = split_data(
pos_sequences, neg_sequences, rev_complement, seed
)
# Tensor datasets
tensor_datasets = get_tensor_datasets(data_splits)
# Data loaders
parameters = dict(batch_size=64, shuffle=True, num_workers=threads)
data_loaders = get_data_loaders(tensor_datasets, parameters)
if verbose:
write(None, "*** Training model...")
# Train model
trainer = Trainer(
architecture, name, data_loaders, learn_rate, max_epochs,
out_dir, verbose
)
trainer.train_and_validate()
trainer.visualize_loss()
trainer.test()
trainer.compute_performance_metrics()
trainer.save()
if __name__ == "__main__":
train()<file_sep>import numpy as np
def one_hot_encode(seqs):
"""
Adapted from:
https://github.com/kundajelab/dragonn/blob/master/dragonn/utils/__init__.py
One hot encodes a list of sequences.
"""
# Initialize
encoded_seqs = []
one_hot_encoder = {
"A": [1., 0., 0., 0.],
"C": [0., 1., 0., 0.],
"G": [0., 0., 1., 0.],
"T": [0., 0., 0., 1.],
"N": [0., 0., 0., 0.]
}
for seq in seqs:
encoded_seq = np.array(
[one_hot_encoder.get(s, [0., 0., 0., 0.]) for s in seq]
)
encoded_seqs.append(encoded_seq)
return(np.array(encoded_seqs))
def reverse_complement(encoded_seqs):
"""
Adapted from:
https://github.com/kundajelab/dragonn/blob/master/dragonn/utils/__init__.py
Reverse complements a list of one hot encoded sequences.
"""
return(encoded_seqs[..., ::-1, ::-1])<file_sep>../DanQ/predict.py<file_sep># Adapted from:
# https://github.com/FunctionLab/selene/blob/master/models/danQ.py
import math
import torch
import torch.nn as nn
class DanQ(nn.Module):
"""DanQ architecture (Quang & Xie, 2016)."""
def __init__(self, sequence_length, n_features):
"""
Parameters
----------
sequence_length : int
Input sequence length
n_features : int
Total number of features to predict
"""
super(DanQ, self).__init__()
self.nnet = nn.Sequential(
nn.Conv1d(4, 320, kernel_size=26),
nn.ReLU(inplace=True),
nn.MaxPool1d(kernel_size=13, stride=13),
nn.Dropout(0.2)
)
self.bdlstm = nn.Sequential(
nn.LSTM(
320, 320, num_layers=1, batch_first=True, bidirectional=True
)
)
self._n_channels = math.floor((sequence_length - 25) / 13)
self.classifier = nn.Sequential(
nn.Dropout(0.5),
nn.Linear(self._n_channels * 640, 925),
nn.ReLU(inplace=True),
nn.Linear(925, n_features),
nn.Sigmoid()
)
def forward(self, x):
"""Forward propagation of a batch."""
out = self.nnet(x)
reshape_out = out.transpose(0, 1).transpose(0, 2)
out, _ = self.bdlstm(reshape_out)
out = out.transpose(0, 1)
reshape_out = out.contiguous().view(
out.size(0), 640 * self._n_channels)
predict = self.classifier(reshape_out)
return(predict)
def get_criterion():
"""
Specify the appropriate loss function (criterion) for this model.
Returns
-------
torch.nn._Loss
"""
return(nn.BCELoss())
def get_optimizer(params, lr=0.001):
return(torch.optim.Adam(params, lr=lr))<file_sep>#!/usr/bin/env python
import os
from pybedtools import BedTool
import sys
# Initialize
intersections = []
tss = BedTool("./data/UBA1.bed")
# Untar and uncompress Hi-C.tgz (i.e. TADs from Hicarus, unpublished)
hic_dir = "./data/Hicarus/"
for d in os.listdir(hic_dir):
if os.path.isdir(os.path.join(hic_dir, d)):
tads_file = os.path.join(hic_dir, d, "tads", "tads-hg38.50kb.bed")
if os.path.exists(tads_file):
tads = BedTool(tads_file)
for i in tads.intersect(tss, wa=True, wb=True):
intersections.append((i.fields[-1], d, i.fields[0], i.fields[1],
i.fields[2], i.name))
# Unzip hg38.TADs.zip (i.e. TADs from the 3D Genome Browser)
hic_dir = "./data/3dGenomeBrowser/"
for f in os.listdir(hic_dir):
tads_file = os.path.join(hic_dir, f)
if os.path.exists(tads_file):
tads = BedTool(tads_file)
for i in tads.intersect(tss, wa=True, wb=True):
intersections.append((i.fields[-1], f, i.fields[0], i.fields[1],
i.fields[2], "."))
for i in intersections:
print("\t".join(map(str, i)))
<file_sep>#!/bin/bash
if [ ! -f matrix2d.ReMap+UniBind.sparse.npz ]; then
wget http://expdata.cmmt.ubc.ca/downloads/TF-Binding-Matrix/matrix/UCSC/200bp/matrix2d.ReMap+UniBind.sparse.npz
fi
if [ ! -f regions_idx.pickle.gz ]; then
wget http://expdata.cmmt.ubc.ca/downloads/TF-Binding-Matrix/matrix/UCSC/200bp/regions_idx.pickle.gz
fi
if [ ! -f sequences.200bp.fa.gz ]; then
wget http://expdata.cmmt.ubc.ca/downloads/TF-Binding-Matrix/matrix/UCSC/200bp/sequences.200bp.fa.gz
fi
if [ ! -f tfs_idx.pickle.gz ]; then
wget http://expdata.cmmt.ubc.ca/downloads/TF-Binding-Matrix/matrix/UCSC/200bp/tfs_idx.pickle.gz
fi
<file_sep>#!/bin/bash
# Initialize
DANQ_DIR=../DanQ
CHRX_DIR=../Genomes/X
# Make predictions with CTCF models trained w/ DragoNN
# https://github.com/kundajelab/dragonn
for M in CTCF CTCF-fwd
do
# Initialize
STAT_DICT=${DANQ_DIR}/${M}/model.pth.tar
# mm10
OUT_DIR=./mm10/${M}
mkdir -p $OUT_DIR
for T in cerebellum forebrain heart hindbrain intestine kidney liver lung midbrain stomach thymus
do
for S in pos_seqs neg_seqs
do
echo "*** predict ${T}, ${S}, ${M}"
SEQ_FILE=../Sequences/mm10/${S}.${T}.fa
OUT_FILE=${OUT_DIR}/${S}.${T}.txt.gz
if [ ! -f $OUT_FILE ]; then
python predict.py -f $SEQ_FILE -o $OUT_FILE -s $STAT_DICT -r
fi
done
done
for G in bosTau6 CHIR_1.0 equCab3 gorGor4 gorGor5 hg38 mm9 oviAri3 panPan3 panTro6 ponAbe3 susScr11
do
echo "*** predict ${G}, ${M}"
OUT_DIR=./${G}
mkdir -p $OUT_DIR
SEQ_FILE=${CHRX_DIR}/${G}.chrX.fa
OUT_FILE=./${OUT_DIR}/${M}.chrX.txt.gz
if [ ! -f $OUT_FILE ]; then
python predict-X.py -f $SEQ_FILE -o $OUT_FILE -s $STAT_DICT -r
fi
done
done
<file_sep>#!/bin/bash
# Get CTCF predictions in the set of positive/negative CTCF-bound sequences
gunzip ./pos_seqs.fa.gz
python ../JASPAR-UCSC-tracks/scan_sequence.py --fasta-file ./pos_seqs.fa --profiles-dir ../JASPAR-UCSC-tracks/profiles/ --output-dir ./pos_seqs/
gzip ./pos_seqs.fa
gunzip ./neg_seqs.fa.gz
python ../JASPAR-UCSC-tracks/scan_sequence.py --fasta-file ./neg_seqs.fa --profiles-dir ../JASPAR-UCSC-tracks/profiles/ --output-dir ./neg_seqs/
gzip ./neg_seqs.fa<file_sep>#!/usr/bin/env python
import click
import os
from predict import _predict
from utils.io import parse_fasta_file, write
CONTEXT_SETTINGS = {
"help_option_names": ["-h", "--help"],
}
@click.command(context_settings=CONTEXT_SETTINGS, no_args_is_help=True)
@click.option(
"-f", "--fasta-file",
help="FASTA file with sequences.",
metavar="FILENAME",
required=True
)
@click.option(
"-o", "--out-file",
help="Output file.",
metavar="FILENAME",
required=True
)
@click.option(
"-r", "--rev-complement",
help="Predict on reverse complement sequences.",
is_flag=True,
default=False
)
@click.option(
"-s", "--state-dict",
help="Model state dict to use.",
metavar="FILENAME",
required=True
)
@click.option(
"-t", "--threads",
default=1,
help="Number of CPU threads to use.",
show_default=True
)
def predictx(fasta_file, out_file, state_dict, rev_complement=False, threads=1):
# Sequences
fasta_file_200bp = "%s.200bp" % fasta_file
if not os.path.exists(fasta_file_200bp):
for seq_record in parse_fasta_file(fasta_file):
chrom = seq_record.id
seq = str(seq_record.seq)
for i in range(0, len(seq) - 100, 100):
write(
fasta_file_200bp,
">%s:%s-%s\n%s" % (chrom, i+1, i+200, seq[i:i+200])
)
if i+200+100 > len(seq):
break
_predict(fasta_file_200bp, out_file, state_dict, rev_complement, threads)
if __name__ == "__main__":
predictx()<file_sep>"""
This module specifies the {MultiModelWrapper} class, currently intended for use
through Selene's API (as opposed to the CLI).
Loads multiple models and concatenates their outputs.
"""
import torch
import torch.nn as nn
class MultiModelWrapper(nn.Module):
"""
The multi-model wrapper class can be used to concatenate the outputs of
multiple models along a pre-specified axis. The wrapper can be used to load
and run multiple trained models during prediction functions. This class
should not be used for training. We also have not yet provided support for
using this class through the CLI.
This class can be used to initialize `selene_sdk.predict.AnalyzeSequences`
with a corresponding list of `trained_model_path`s. Please ensure the
ordering of the two lists (`sub_models` here and `trained_model_path` in
AnalyzeSequences) match.
Parameters
----------
sub_models : list(torch.nn.Module)
The sub-models that are used in this multi-model wrapper class.
concat_dim : int, optional
Default is 1. The dimension along which to concatenate the models'
predictions.
"""
def __init__(self, sub_models, concat_dim=1):
super(MultiModelWrapper, self).__init__()
self.sub_models = sub_models
self._concat_dim = concat_dim
def cuda(self):
for sm in self.sub_models:
sm.cuda()
def eval(self):
for sm in self.sub_models:
sm.eval()
def forward(self, x):
return torch.cat(
[sm(x) for sm in self.sub_models], self._concat_dim
)<file_sep>#!/usr/bin/env bash
# i.e. enable conda (de)activate
eval "$(conda shell.bash hook)"
# Create conda environment
conda create -n CTCF -c bioconda -c conda-forge -c pytorch \
biopython=1.78 click=7.1.2 curl=7.71.1 entrez-direct=13.9 \
jupyterlab=2.2.8 matplotlib=3.2.2 numpy=1.19.1 pandas=1.1.3 \
plotly=4.11.0 pybedtools=0.8.1 pyfaidx=0.5.9.1 python=3.8.5 \
seaborn=0.11.0 scikit-learn=0.23.2 scipy=1.5.2 torchvision=0.7.0 \
tqdm=4.50.2
<file_sep>from sklearn.metrics import average_precision_score
from sklearn.metrics import matthews_corrcoef
from sklearn.metrics import roc_auc_score
class TrainModel(object):
"""
Adapted from:
https://selene.flatironinstitute.org/selene.html#trainmodel
This class ties together the various objects and methods needed to
train and validate a model.
{TrainModel} saves a checkpoint model (overwriting it after
`save_checkpoint_every_n_steps`) as well as a best-performing model
(overwriting it after `report_stats_every_n_steps` if the latest
validation performance is better than the previous best-performing
model) to `output_dir`.
{TrainModel} also outputs 2 files that can be used to monitor training
as Selene runs: `selene_sdk.train_model.train.txt` (training loss) and
`selene_sdk.train_model.validation.txt` (validation loss & average
ROC AUC). The columns in these files can be used to quickly visualize
training history (e.g. you can use `matplotlib`, `plt.plot(auc_list)`)
and see, for example, whether the model is still improving, if there are
signs of overfitting, etc.
Parameters
----------
model : torch.nn.Module
The model to train.
generators : dict
A dictionary mapping the `train`, `validation` and `test` steps to
their corresponding `torch.utils.data.DataLoader` instances.
loss_criterion : torch.nn._Loss
The loss function to optimize.
optimizer_class : torch.optim.Optimizer
The optimizer to minimize loss with.
optimizer_kwargs : dict
The dictionary of keyword arguments to pass to the optimizer's
constructor.
max_steps : int
The maximum number of mini-batches to iterate over.
report_stats_every_n_steps : int
The frequency with which to report summary statistics. You can
set this value to be equivalent to a training epoch
(`n_steps * batch_size`) being the total number of samples
seen by the model so far. Selene evaluates the model on the validation
dataset every `report_stats_every_n_steps` and, if the model obtains
the best performance so far (based on the user-specified loss function),
Selene saves the model state to a file called `best_model.pth.tar` in
`output_dir`.
output_dir : str
The output directory to save model checkpoints and logs in.
save_checkpoint_every_n_steps : int or None, optional
Default is 1000. If None, set to the same value as
`report_stats_every_n_steps`
save_new_checkpoints_after_n_steps : int or None, optional
Default is None. The number of steps after which Selene will
continually save new checkpoint model weights files
(`checkpoint-<TIMESTAMP>.pth.tar`) every
`save_checkpoint_every_n_steps`. Before this point,
the file `checkpoint.pth.tar` is overwritten every
`save_checkpoint_every_n_steps` to limit the memory requirements.
n_validation_samples : int or None, optional
Default is `None`. Specify the number of validation samples in the
validation set. If `n_validation_samples` is `None` and the data sampler
used is the `selene_sdk.samplers.IntervalsSampler` or
`selene_sdk.samplers.RandomSampler`, we will retrieve 32000
validation samples. If `None` and using
`selene_sdk.samplers.MultiFileSampler`, we will use all
available validation samples from the appropriate data file.
n_test_samples : int or None, optional
Default is `None`. Specify the number of test samples in the test set.
If `n_test_samples` is `None` and
- the sampler you specified has no test partition, you should not
specify `evaluate` as one of the operations in the `ops` list.
That is, Selene will not automatically evaluate your trained
model on a test dataset, because the sampler you are using does
not have any test data.
- the sampler you use is of type `selene_sdk.samplers.OnlineSampler`
(and the test partition exists), we will retrieve 640000 test
samples.
- the sampler you use is of type
`selene_sdk.samplers.MultiFileSampler` (and the test partition
exists), we will use all the test samples available in the
appropriate data file.
cpu_n_threads : int, optional
Default is 1. Sets the number of OpenMP threads used for parallelizing
CPU operations.
use_cuda : bool, optional
Default is `False`. Specify whether a CUDA-enabled GPU is available
for torch to use during training.
logging_verbosity : {0, 1, 2}, optional
Default is 2. Set the logging verbosity level.
* 0 - Only warnings will be logged.
* 1 - Information and warnings will be logged.
* 2 - Debug messages, information, and warnings will all be\
logged.
checkpoint_resume : str or None, optional
Default is `None`. If `checkpoint_resume` is not None, it should be the
path to a model file generated by `torch.save` that can now be read
using `torch.load`.
Attributes
----------
model : torch.nn.Module
The model to train.
generator : torch.utils.data.DataLoader
The example generator.
loss_criterion : torch.nn._Loss
The loss function to optimize.
optimizer_class : torch.optim.Optimizer
The optimizer to minimize loss with.
max_steps : int
The maximum number of mini-batches to iterate over.
nth_step_report_stats : int
The frequency with which to report summary statistics.
nth_step_save_checkpoint : int
The frequency with which to save a model checkpoint.
use_cuda : bool
If `True`, use a CUDA-enabled GPU. If `False`, use the CPU.
output_dir : str
The directory to save model checkpoints and logs.
training_loss : list(float)
The current training loss.
metrics : dict
A dictionary that maps metric names (`str`) to metric functions.
By default, this contains `"roc_auc"`, which maps to
`sklearn.metrics.roc_auc_score`, `"average_precision"`, which maps to
`sklearn.metrics.average_precision_score`, and `"m_corr_coef"`, which
maps to `sklearn.metrics.matthews_corrcoef`
"""
def __init__(
self,
model,
generators,
loss_criterion,
optimizer_class,
optimizer_kwargs,
max_steps,
report_stats_every_n_steps,
output_dir,
save_checkpoint_every_n_steps=1000,
save_new_checkpoints_after_n_steps=None,
report_gt_feature_n_positives=10,
n_validation_samples=None,
n_test_samples=None,
cpu_n_threads=1,
use_cuda=False,
logging_verbosity=2,
checkpoint_resume=None,
metrics={
"roc_auc": roc_auc_score,
"average_precision": average_precision_score,
"m_corr_coef": matthews_corrcoef
}
):
"""
Constructs a new `TrainModel` object.
"""
self.model = model
self.generators = generators
self.criterion = loss_criterion
self.optimizer = optimizer_class(
self.model.parameters(), **optimizer_kwargs
)
self.max_steps = max_steps
self.nth_step_report_stats = report_stats_every_n_steps
self.nth_step_save_checkpoint = None
if not save_checkpoint_every_n_steps:
self.nth_step_save_checkpoint = report_stats_every_n_steps
else:
self.nth_step_save_checkpoint = save_checkpoint_every_n_steps
self.save_new_checkpoints = save_new_checkpoints_after_n_steps
print(
"Training parameters set: batch size {0}, "
"number of steps per 'epoch': {1}, "
"maximum number of steps: {2}".format(
generators["train"].__dict__["batch_size"],
self.nth_step_report_stats,
self.max_steps
)
)
exit(0)
torch.set_num_threads(cpu_n_threads)
self.use_cuda = use_cuda
if self.use_cuda:
self.model.cuda()
self.criterion.cuda()
logger.debug("Set modules to use CUDA")
os.makedirs(output_dir, exist_ok=True)
self.output_dir = output_dir
initialize_logger(
os.path.join(self.output_dir, "{0}.log".format(__name__)),
verbosity=logging_verbosity)
self._create_validation_set(n_samples=n_validation_samples)
self._validation_metrics = PerformanceMetrics(
# self.sampler.get_feature_from_index,
report_gt_feature_n_positives=report_gt_feature_n_positives,
metrics=metrics)
if "test" in self.sampler.modes:
self._test_data = None
self._n_test_samples = n_test_samples
self._test_metrics = PerformanceMetrics(
# self.sampler.get_feature_from_index,
report_gt_feature_n_positives=report_gt_feature_n_positives,
metrics=metrics)
self._start_step = 0
self._min_loss = float("inf") # TODO: Should this be set when it is used later? Would need to if we want to train model 2x in one run.
if checkpoint_resume is not None:
checkpoint = torch.load(
checkpoint_resume,
map_location=lambda storage, location: storage)
if "state_dict" not in checkpoint:
raise ValueError("Selene does not support continued "
"training of models that were not originally "
"trained using Selene.")
self.model = load_model_from_state_dict(
checkpoint["state_dict"], self.model)
self._start_step = checkpoint["step"]
if self._start_step >= self.max_steps:
self.max_steps += self._start_step
self._min_loss = checkpoint["min_loss"]
self.optimizer.load_state_dict(
checkpoint["optimizer"])
if self.use_cuda:
for state in self.optimizer.state.values():
for k, v in state.items():
if isinstance(v, torch.Tensor):
state[k] = v.cuda()
logger.info(
("Resuming from checkpoint: step {0}, min loss {1}").format(
self._start_step, self._min_loss))
self._train_logger = _metrics_logger(
"{0}.train".format(__name__), self.output_dir)
self._validation_logger = _metrics_logger(
"{0}.validation".format(__name__), self.output_dir)
self._train_logger.info("loss")
self._validation_logger.info("\t".join(["loss"] +
sorted([x for x in self._validation_metrics.metrics.keys()])))
# def _create_validation_set(self, n_samples=None):
# """
# Generates the set of validation examples.
# Parameters
# ----------
# n_samples : int or None, optional
# Default is `None`. The size of the validation set. If `None`,
# will use all validation examples in the sampler.
# """
# logger.info("Creating validation dataset.")
# t_i = time()
# # self._validation_data, self._all_validation_targets = \
# # self.sampler.get_validation_set(
# # self.batch_size, n_samples=n_samples)
# t_f = time()
# logger.info(("{0} s to load {1} validation examples ({2} validation "
# "batches) to evaluate after each training step.").format(
# t_f - t_i,
# len(self._validation_data) * self.batch_size,
# len(self._validation_data)))
# def create_test_set(self):
# """
# Loads the set of test samples.
# We do not create the test set in the `TrainModel` object until
# this method is called, so that we avoid having to load it into
# memory until the model has been trained and is ready to be
# evaluated.
# """
# logger.info("Creating test dataset.")
# t_i = time()
# # self._test_data, self._all_test_targets = \
# # self.sampler.get_test_set(
# # self.batch_size, n_samples=self._n_test_samples)
# t_f = time()
# logger.info(("{0} s to load {1} test examples ({2} test batches) "
# "to evaluate after all training steps.").format(
# t_f - t_i,
# len(self._test_data) * self.batch_size,
# len(self._test_data)))
# np.savez_compressed(
# os.path.join(self.output_dir, "test_targets.npz"),
# data=self._all_test_targets)
# def _get_batch(self):
# """
# Fetches a mini-batch of examples
# Returns
# -------
# tuple(numpy.ndarray, numpy.ndarray)
# A tuple containing the examples and targets.
# """
# t_i_sampling = time()
# # batch_sequences, batch_targets = self.sampler.sample(
# # batch_size=self.batch_size)
# t_f_sampling = time()
# logger.debug(
# ("[BATCH] Time to sample {0} examples: {1} s.").format(
# self.batch_size,
# t_f_sampling - t_i_sampling))
# return (batch_sequences, batch_targets)
# def train_and_validate(self):
# """
# Trains the model and measures validation performance.
# """
# min_loss = self._min_loss
# scheduler = ReduceLROnPlateau(
# self.optimizer,
# 'min',
# patience=16,
# verbose=True,
# factor=0.8)
# time_per_step = []
# for step in range(self._start_step, self.max_steps):
# t_i = time()
# train_loss = self.train()
# t_f = time()
# time_per_step.append(t_f - t_i)
# if step % self.nth_step_save_checkpoint == 0:
# checkpoint_dict = {
# "step": step,
# "arch": self.model.__class__.__name__,
# "state_dict": self.model.state_dict(),
# "min_loss": min_loss,
# "optimizer": self.optimizer.state_dict()
# }
# if self.save_new_checkpoints is not None and \
# self.save_new_checkpoints >= step:
# checkpoint_filename = "checkpoint-{0}".format(
# strftime("%m%d%H%M%S"))
# self._save_checkpoint(
# checkpoint_dict, False, filename=checkpoint_filename)
# logger.debug("Saving checkpoint `{0}.pth.tar`".format(
# checkpoint_filename))
# else:
# self._save_checkpoint(
# checkpoint_dict, False)
# # TODO: Should we have some way to report training stats without running validation?
# if step and step % self.nth_step_report_stats == 0:
# logger.info(("[STEP {0}] average number "
# "of steps per second: {1:.1f}").format(
# step, 1. / np.average(time_per_step)))
# time_per_step = []
# valid_scores = self.validate()
# validation_loss = valid_scores["loss"]
# self._train_logger.info(train_loss)
# to_log = [str(validation_loss)]
# for k in sorted(self._validation_metrics.metrics.keys()):
# if k in valid_scores and valid_scores[k]:
# to_log.append(str(valid_scores[k]))
# else:
# to_log.append("NA")
# self._validation_logger.info("\t".join(to_log))
# scheduler.step(math.ceil(validation_loss * 1000.0) / 1000.0)
# if validation_loss < min_loss:
# min_loss = validation_loss
# self._save_checkpoint({
# "step": step,
# "arch": self.model.__class__.__name__,
# "state_dict": self.model.state_dict(),
# "min_loss": min_loss,
# "optimizer": self.optimizer.state_dict()}, True)
# logger.debug("Updating `best_model.pth.tar`")
# logger.info("training loss: {0}".format(train_loss))
# logger.info("validation loss: {0}".format(validation_loss))
# # Logging training and validation on same line requires 2 parsers or more complex parser.
# # Separate logging of train/validate is just a grep for validation/train and then same parser.
# # self.sampler.save_dataset_to_file("train", close_filehandle=True)
# def train(self):
# """
# Trains the model on a batch of data.
# Returns
# -------
# float
# The training loss.
# """
# self.model.train()
# # self.sampler.set_mode("train")
# inputs, targets = self._get_batch()
# inputs = torch.Tensor(inputs)
# targets = torch.Tensor(targets)
# if self.use_cuda:
# inputs = inputs.cuda()
# targets = targets.cuda()
# inputs = Variable(inputs)
# targets = Variable(targets)
# predictions = self.model(inputs.transpose(1, 2))
# loss = self.criterion(predictions, targets)
# self.optimizer.zero_grad()
# loss.backward()
# self.optimizer.step()
# return loss.item()
# def _evaluate_on_data(self, data_in_batches):
# """
# Makes predictions for some labeled input data.
# Parameters
# ----------
# data_in_batches : list(tuple(numpy.ndarray, numpy.ndarray))
# A list of tuples of the data, where the first element is
# the example, and the second element is the label.
# Returns
# -------
# tuple(float, list(numpy.ndarray))
# Returns the average loss, and the list of all predictions.
# """
# self.model.eval()
# batch_losses = []
# all_predictions = []
# for (inputs, targets) in data_in_batches:
# inputs = torch.Tensor(inputs)
# targets = torch.Tensor(targets)
# if self.use_cuda:
# inputs = inputs.cuda()
# targets = targets.cuda()
# with torch.no_grad():
# inputs = Variable(inputs)
# targets = Variable(targets)
# predictions = self.model(inputs.transpose(1, 2))
# loss = self.criterion(predictions, targets)
# all_predictions.append(
# predictions.data.cpu().numpy())
# batch_losses.append(loss.item())
# all_predictions = np.vstack(all_predictions)
# return np.average(batch_losses), all_predictions
# def validate(self):
# """
# Measures model validation performance.
# Returns
# -------
# dict
# A dictionary, where keys are the names of the loss metrics,
# and the values are the average value for that metric over
# the validation set.
# """
# average_loss, all_predictions = self._evaluate_on_data(
# self._validation_data)
# average_scores = self._validation_metrics.update(all_predictions,
# self._all_validation_targets)
# for name, score in average_scores.items():
# logger.info("validation {0}: {1}".format(name, score))
# average_scores["loss"] = average_loss
# return average_scores
# def evaluate(self):
# """
# Measures the model test performance.
# Returns
# -------
# dict
# A dictionary, where keys are the names of the loss metrics,
# and the values are the average value for that metric over
# the test set.
# """
# if self._test_data is None:
# self.create_test_set()
# average_loss, all_predictions = self._evaluate_on_data(
# self._test_data)
# average_scores = self._test_metrics.update(all_predictions,
# self._all_test_targets)
# np.savez_compressed(
# os.path.join(self.output_dir, "test_predictions.npz"),
# data=all_predictions)
# for name, score in average_scores.items():
# logger.info("test {0}: {1}".format(name, score))
# test_performance = os.path.join(
# self.output_dir, "test_performance.txt")
# feature_scores_dict = self._test_metrics.write_feature_scores_to_file(
# test_performance)
# average_scores["loss"] = average_loss
# self._test_metrics.visualize(
# all_predictions, self._all_test_targets, self.output_dir)
# return (average_scores, feature_scores_dict)
# def _save_checkpoint(self,
# state,
# is_best,
# filename="checkpoint"):
# """
# Saves snapshot of the model state to file. Will save a checkpoint
# with name `<filename>.pth.tar` and, if this is the model's best
# performance so far, will save the state to a `best_model.pth.tar`
# file as well.
# Models are saved in the state dictionary format. This is a more
# stable format compared to saving the whole model (which is another
# option supported by PyTorch). Note that we do save a number of
# additional, Selene-specific parameters in the dictionary
# and that the actual `model.state_dict()` is stored in the `state_dict`
# key of the dictionary loaded by `torch.load`.
# See: https://pytorch.org/docs/stable/notes/serialization.html for more
# information about how models are saved in PyTorch.
# Parameters
# ----------
# state : dict
# Information about the state of the model. Note that this is
# not `model.state_dict()`, but rather, a dictionary containing
# keys that can be used for continued training in Selene
# _in addition_ to a key `state_dict` that contains
# `model.state_dict()`.
# is_best : bool
# Is this the model's best performance so far?
# filename : str, optional
# Default is "checkpoint". Specify the checkpoint filename. Will
# append a file extension to the end of the `filename`
# (e.g. `checkpoint.pth.tar`).
# Returns
# -------
# None
# """
# logger.debug("[TRAIN] {0}: Saving model state to file.".format(
# state["step"]))
# cp_filepath = os.path.join(
# self.output_dir, filename)
# torch.save(state, "{0}.pth.tar".format(cp_filepath))
# if is_best:
# best_filepath = os.path.join(self.output_dir, "best_model")
# shutil.copyfile("{0}.pth.tar".format(cp_filepath),
# "{0}.pth.tar".format(best_filepath))
def initialize_model(
architecture, sequence_length, learn_rate=0.003, rev_complement=False
):
"""
Adapted from:
https://selene.flatironinstitute.org/utils.html#initialize-model
Initialize model (and associated criterion, optimizer)
Parameters
----------
architecture : str
Available model architectures: `danq`, `deeperdeepsea`, `deepsea` and
`heartenn`.
sequence_length : int
Model-specific configuration
learn_rate : float
Learning rate.
rev_complement : bool
Returns
-------
tuple(torch.nn.Module, torch.nn._Loss, torch.optim, dict)
* `torch.nn.Module` - the model architecture
* `torch.nn._Loss` - the loss function associated with the model
* `torch.optim` - the optimizer associated with the model
* `dict` - the optimizer arguments
"""
if model_architecture == "danq":
from ..models.danq import (
DanQ as model_class,
criterion,
get_optimizer
)
if model_architecture == "deeperdeepsea":
from ..models.deeperdeepsea import (
DeeperDeepSEA as model_class,
criterion,
get_optimizer
)
if model_architecture == "deepsea":
from ..models.deepsea import (
DeepSEA as model_class,
criterion,
get_optimizer
)
if model_architecture == "heartenn":
from ..models.heartenn import (
HeartENN as model_class,
criterion,
get_optimizer
)
print("here")
# model = model_class(**model_configs["class_args"])
# if "non_strand_specific" in model_configs:
# from selene_sdk.utils import NonStrandSpecific
# model = NonStrandSpecific(model, mode=model_configs["non_strand_specific"])
# _is_lua_trained_model(model)
# criterion = module.criterion()
# if train and isinstance(lr, float):
# optim_class, optim_kwargs = module.get_optimizer(lr)
# return model, criterion, optim_class, optim_kwargs
# elif train:
# raise ValueError("Learning rate must be specified as a float "
# "but was {0}".format(lr))
# return model, criterion
class NonStrandSpecific(Module):
"""
A torch.nn.Module that wraps a user-specified model architecture if the
architecture does not need to account for sequence strand-specificity.
Parameters
----------
model : torch.nn.Module
The user-specified model architecture.
mode : {'mean', 'max'}, optional
Default is 'mean'. NonStrandSpecific will pass the input and the
reverse-complement of the input into `model`. The mode specifies
whether we should output the mean or max of the predictions as
the non-strand specific prediction.
Attributes
----------
model : torch.nn.Module
The user-specified model architecture.
mode : {'mean', 'max'}
How to handle outputting a non-strand specific prediction.
"""
def __init__(self, model, mode="mean"):
super(NonStrandSpecific, self).__init__()
self.model = model
if mode != "mean" and mode != "max":
raise ValueError("Mode should be one of 'mean' or 'max' but was"
"{0}.".format(mode))
self.mode = mode
self.from_lua = _is_lua_trained_model(model)
def forward(self, input):
reverse_input = None
if self.from_lua:
reverse_input = _flip(
_flip(torch.squeeze(input, 2), 1), 2).unsqueeze_(2)
else:
reverse_input = _flip(_flip(input, 1), 2)
output = self.model.forward(input)
output_from_rev = self.model.forward(reverse_input)
if self.mode == "mean":
return (output + output_from_rev) / 2
else:
return torch.max(output, output_from_rev)<file_sep>import numpy as np
from sklearn.model_selection import train_test_split
import torch
from torch.utils.data import DataLoader, TensorDataset
from .io import parse_fasta_file
# Defaults
default_parameters = dict(batch_size=64, shuffle=True, num_workers=1)
def get_data_loaders(tensor_datasets, kwargs=default_parameters):
data_loaders = {}
for k, v in tensor_datasets.items():
data_loaders.setdefault(k, DataLoader(v, **kwargs))
return(data_loaders)
def get_tensor_datasets(data_splits):
tensor_datasets = {}
for k, v in data_splits.items():
data = torch.Tensor(v[0])
labels = torch.Tensor(v[1])
tensor_datasets.setdefault(k, TensorDataset(data, labels))
return(tensor_datasets)
def split_data(pos_sequences, neg_sequences, rev_complement=False, seed=123):
# Data
pos_encoded_seqs = one_hot_encode_FASTA_file(pos_sequences)
neg_encoded_seqs = one_hot_encode_FASTA_file(neg_sequences)
data = np.concatenate((pos_encoded_seqs, neg_encoded_seqs))
# Labels
pos_labels = np.ones((len(pos_encoded_seqs), 1))
neg_labels = np.zeros((len(neg_encoded_seqs) ,1))
labels = np.concatenate((pos_labels, neg_labels))
# Data splits
indices = list(range(len(data)))
train, test = train_test_split(indices, test_size=0.2, random_state=seed)
validation, test = train_test_split(test, test_size=0.5, random_state=seed)
data_splits = {
"train": [data[train], labels[train]],
"validation": [data[validation], labels[validation]],
"test": [data[test], labels[test]]
}
# Reverse complement
if rev_complement:
data, labels = data_splits["train"][0], data_splits["train"][1]
data_splits["train"][0] = np.append(
data, reverse_complement(data), axis=0
)
data_splits["train"][1] = np.append(labels, labels, axis=0)
return(data_splits)
def one_hot_encode_FASTA_file(fasta_file):
"""One hot encodes sequences in a FASTA file."""
# Initialize
encoded_seqs = []
for seq_record in parse_fasta_file(fasta_file):
encoded_seqs.append(one_hot_encode(str(seq_record.seq).upper()))
return(np.array(encoded_seqs))
def one_hot_encode(seq):
"""One hot encodes a sequence."""
seq = seq.replace("A", "0")
seq = seq.replace("C", "1")
seq = seq.replace("G", "2")
seq = seq.replace("T", "3")
encoded_seq = np.zeros((4, len(seq)), dtype="float16")
for i in range(len(seq)):
if seq[i].isdigit():
encoded_seq[int(seq[i]), i] = 1
else:
# i.e. Ns
encoded_seq[:, i] = 0.25
return(encoded_seq)
def one_hot_decode(encoded_seq):
"""Reverts a sequence's one hot encoding."""
seq = []
code = list("ACGT")
for i in encoded_seq.transpose(1, 0):
try:
seq.append(code[int(np.where(i == 1)[0])])
except:
# i.e. N?
seq.append("N")
return("".join(seq))
def reverse_complement(encoded_seqs):
"""Reverse complements one hot encoding for a list of sequences."""
return(encoded_seqs[..., ::-1, ::-1])<file_sep>import os
import argparse
import pandas as pd
import gzip
import numpy as np
from Bio import SeqIO
from Bio.Seq import reverse_complement
from Bio.SeqUtils import GC
parser = argparse.ArgumentParser(description="Sample strand-specific GC comparable sequences for the TF (pos/neg sets)")
parser.add_argument("--scans-file", default=None, help="File with motif predictions from JASPAR")
parser.add_argument("--neg-file", default=None, help="FASTA file with unbound sequences (i.e. neg)")
parser.add_argument("--pos-file", default=None, help="FASTA file with bound sequences (i.e. pos)")
parser.add_argument("--out-dir", default=".", help="Directory where to output sampled sequences")
args = parser.parse_args()
scans_file = args.scans_file
neg_file = args.neg_file
pos_file = args.pos_file
out_dir = args.out_dir
##########################################################
# Load the fwd/rev motif predictions
##########################################################
nonzero = set()
fwd = set()
rev = set()
# Read in chunks
for chunk in pd.read_csv(scans_file, compression="gzip", header=None,
encoding="utf8", sep="\t", chunksize=1024,
comment="#", engine="python"):
for index, row in chunk.iterrows():
row = row.tolist()
if row[-1] == "+":
fwd.add(row[0])
else:
rev.add(row[0])
# Ignore ambiguous sequences with CTCF motifs in both strands
for i in list(fwd ^ rev):
nonzero.add(i)
print("Data is loaded!")
##########################################################
##########################################################
# Extracting the fasta sequences for 0s and 1s
##########################################################
zero = set()
fasta_ids_nonzero = {}
fasta_ids_zero = {}
for fasta in SeqIO.parse(open(neg_file), "fasta"):
name, sequence = fasta.id, str(fasta.seq)
name = int(name.split(":")[0])
fasta_ids_zero[name] = sequence
fasta_ids_zero[-name] = reverse_complement(sequence)
for fasta in SeqIO.parse(open(pos_file), "fasta"):
name, sequence = fasta.id, str(fasta.seq)
name = int(name.split(":")[0])
if name in nonzero:
if name in fwd:
fasta_ids_nonzero.setdefault("fwd", {})
fasta_ids_nonzero.setdefault("rev", {})
fasta_ids_nonzero["fwd"][name] = sequence
fasta_ids_nonzero["rev"][-name] = reverse_complement(sequence)
else:
fasta_ids_nonzero.setdefault("fwd", {})
fasta_ids_nonzero.setdefault("rev", {})
fasta_ids_nonzero["rev"][name] = sequence
fasta_ids_nonzero["fwd"][-name] = reverse_complement(sequence)
fasta_ids_zero = pd.Series(fasta_ids_zero)
print("Sequences are extracted!")
##########################################################
for strand in ["fwd", "rev"]:
print("Strand is %s!" % strand)
######################################################
# Sample new 0s
######################################################
data_series = pd.Series(fasta_ids_nonzero[strand])
nonzero_gc = data_series.apply(lambda x: GC(x.upper()))
zero_gc = fasta_ids_zero.apply(lambda x: GC(x.upper()))
bins = [0,10,20,30,40,50,60,70,80,90,100]
labels = [10,20,30,40,50,60,70,80,90,100]
#assigning bins from nonzero
binned_nonzero = pd.cut(nonzero_gc, bins = bins, labels = labels)
#assigning bins from zero
binned_zero = pd.cut(zero_gc, bins = bins, labels = labels)
#sampling new zeros
new_zero_ind = []
for l in labels:
num_nonzero = len(binned_nonzero[binned_nonzero == l])
num_zero = len(binned_zero[binned_zero == l])
#if there are no nonzero peaks, continue
if num_nonzero == 0 or num_zero == 0:
continue
if num_zero >= num_nonzero:
#sample without replacement
sampled_bins = binned_zero[binned_zero == l].sample(n=num_nonzero, replace=False)
new_zero_ind = new_zero_ind + list(sampled_bins.index)
if num_nonzero > num_zero:
print("For bin %s we have more nonzeros than zeros!" % l)
sampled_bins = binned_zero[binned_zero == l]
new_zero_ind = new_zero_ind + list(sampled_bins.index)
fasta_new_ids_zero = fasta_ids_zero[new_zero_ind]
new_zero_gc = fasta_new_ids_zero.apply(lambda x: GC(x.upper()))
print("Sequences are sampled!")
######################################################
######################################################
# Saving files
######################################################
fasta_file = os.path.join(out_dir, "neg_seqs.%s.fa" % strand)
with open(fasta_file, "w") as f:
for items in fasta_new_ids_zero.iteritems():
name, sequence = items
f.write(">" + str(name) + "\n")
f.write(sequence + "\n")
fasta_file = os.path.join(out_dir, "pos_seqs.%s.fa" % strand)
with open(fasta_file, "w") as f:
for items in data_series.iteritems():
name, sequence = items
f.write(">" + str(name) + "\n")
f.write(sequence + "\n")
print("Files are saved!")
######################################################
break
print("You are good to go!")
<file_sep>import sys
#++++++++++++++++#
# I/O Utilities #
#++++++++++++++++#
def __file_handle(file_name, mode="r"):
"""
@input:
file_name {str}
mode {str} e.g. "r", rb", etc.; default = "r"
@yield: {str}
"""
raiseValueError = False
# Open a file handle
if file_name.endswith(".gz"):
try:
import gzip
fh = gzip.open(file_name, mode)
except:
raiseValueError = True
elif file_name.endswith(".zip"):
try:
from zipfile import ZipFile
zf = ZipFile(file_name, mode)
for f in zf.infolist():
# i.e. only handles the first file
fh = zf.open(f, mode)
break
except:
raiseValueError = True
else:
try:
fh = open(file_name, mode)
except:
raiseValueError = True
if raiseValueError:
raise ValueError("Could not open file handle: %s" % file_name)
return(fh)
def parse_file(file_name):
"""
Parses a file and yields lines one by one.
@input:
file_name {str}
@yield: {str}
"""
fh = __file_handle(file_name)
# For each line...
for line in fh:
yield(line.strip("\n"))
fh.close()
def parse_csv_file(file_name, sep=","):
"""
Parses a CSV file and yields lines one by one as a list.
@input:
file_name {str}
sep {str} e.g. "\t"; default = ","
@yield: {list}
"""
import pandas as pd
fh = __file_handle(file_name)
# Read in chunks
for chunk in pd.read_csv(
fh, header=None, encoding="utf8", sep=sep, chunksize=1024, comment="#"
):
for index, row in chunk.iterrows():
yield(row.tolist())
fh.close()
def parse_tsv_file(file_name):
"""
Parses a TSV file and yields lines one by one as a list.
@input:
file_name {str}
@yield: {list}
"""
# For each line...
for line in parse_csv_file(file_name, sep="\t"):
yield(line)
def parse_json_file(file_name):
import json
return(json.loads("\n".join([l for l in parse_file(file_name)])))
def parse_fasta_file(file_name):
"""
Parses a FASTA file and yields {SeqRecord} objects one by one.
@input:
file_name {str}
@yield: {SeqRecord}
"""
from Bio import SeqIO
fh = __file_handle(file_name, mode="rt")
# For each SeqRecord...
for seq_record in SeqIO.parse(fh, "fasta"):
yield(seq_record)
fh.close()
def write(file_name=None, content=None, overwrite=False):
"""
Writes content to a file. If overwrite=False, content will be appended at
the end of the file. If file_name=None, writes content to STDOUT.
@input:
file_name {str}
content {str}
overwrite {bool}
"""
if file_name:
if overwrite:
mode = "w"
else:
mode = "a"
fh = __file_handle(file_name, mode=mode)
fh.write("%s\n" % content)
fh.close()
else:
sys.stdout.write("%s\n" % content)<file_sep>#!/bin/bash
for g in bosTau6 equCab3 gorGor4 hg38 mm9 oviAri3 panPan3 panTro6 ponAbe3 susScr11
do
if [ ! -f ${g}.chrX.fa ]; then
mkdir ${g}
cd ${g}
wget ftp://hgdownload.soe.ucsc.edu/goldenPath/${g}/bigZips/${g}.fa.gz
gunzip ${g}.fa.gz
csplit -s -z ${g}.fa '/>/' '{*}'
for i in xx* ; do \
n=$(sed 's/>// ; s/ .*// ; 1q' "$i") ; \
mv "$i" "$n.fa" ; \
done
cd ..
mv ${g}/chrX.fa ${g}.chrX.fa
rm -rf ${g}
fi
done
if [ ! -f gorGor5.chrX.fa ]; then
efetch -db nuccore -id LT578347.1 -format fasta > gorGor5.chrX.fa
fi
if [ ! -f CHIR_1.0.chrX.fa ]; then
efetch -db nuccore -id CM001739.1 -format fasta > CHIR_1.0.chrX.fa
fi
<file_sep>#!/usr/bin/env python
import argparse
import os
from pybedtools import BedTool
from __init__ import ENCODE, ParseUtils
usage_msg = """
usage: %s --encode-dir DIR --fasta-file FILE
[-h] [options]
""" % os.path.basename(__file__)
help_msg = """%s
builds multiple matrix of CTCF-bound and open regions across
and cells/tissues from ENCODE
--encode-dir DIR output directory from get_encode.py
--fasta-file FILE from get_mm10.sh (i.e. mm10.fa)
optional arguments:
-h, --help show this help message and exit
-o, --out-dir DIR output directory (default = "./")
""" % usage_msg
#-------------#
# Functions #
#-------------#
def parse_args():
"""
This function parses arguments provided via the command line and returns an
{argparse} object.
"""
parser = argparse.ArgumentParser(add_help=False)
# Mandatory args
parser.add_argument("--encode-dir")
parser.add_argument("--fasta-file")
# Optional args
optional_group = parser.add_argument_group("optional arguments")
optional_group.add_argument("-h", "--help", action="store_true")
optional_group.add_argument("-o", "--out-dir", default=".")
args = parser.parse_args()
check_args(args)
return(args)
def check_args(args):
"""
This function checks an {argparse} object.
"""
# Print help
if args.help:
print(help_msg)
exit(0)
# Check mandatory arguments
if not args.encode_dir or not args.fasta_file:
error = ["%s\n%s" % (usage_msg, os.path.basename(__file__)), "error",
"arguments \"--encode-dir\" \"--fasta-file\" are required\n"]
print(": ".join(error))
exit(0)
def main():
# Parse arguments
args = parse_args()
# Build matrices
build_matrix(args.encode_dir, args.fasta_file, args.out_dir)
def build_matrix(encode_dir, fasta_file, out_dir="."):
"""
e.g. ./get_sequences.py --encode-dir ../ENCODE/ --fasta-file ../Genomes/mm10/mm10.fa
"""
# Create output dir
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
#######################################################
# First of all, figure out the common samples between #
# DNase-seq and CTCF ChIP-seq experiments. #
#######################################################
pkl_file = "metadata.mm10.accessibility.tsv.pickle.gz"
encodes_acc = ParseUtils.load_pickle(os.path.join(encode_dir, pkl_file))
samples_acc = set([e.biosample_name for e in encodes_acc.values()])
pkl_file = "metadata.mm10.tf.tsv.pickle.gz"
encodes_tfs = ParseUtils.load_pickle(os.path.join(encode_dir, pkl_file))
samples_tfs = set([e.biosample_name for e in encodes_tfs.values()])
samples = samples_acc.intersection(samples_tfs)
#######################################################
# Now, for each sample, create a high-quality set of #
# positive and negative sequences for DragoNN: #
# (*) Positive sequences are CTCF ChIP-seq regions in #
# which the peak max overlaps an open region #
# (*) Negative sequences are open regions that do not #
# overlap a CTCF ChIP-seq region #
#######################################################
for sample in sorted(samples):
# Skip if DNase-seq file for this sample already exist
dnase_seq_file = os.path.join(out_dir, ".DNase-seq.%s.bed" % sample)
if not os.path.exists(dnase_seq_file):
# Initialize
intervals = []
a = BedTool(os.path.join(encode_dir, "DNase-seq.200bp.bed"))
for interval in a:
encode_acc = encodes_acc[interval.fields[3]]
if encode_acc.biosample_name != sample:
continue
intervals.append(interval)
b = BedTool("\n".join(map(str, intervals)), from_string=True).saveas(dnase_seq_file)
# Skip if DNase-seq file for this sample already exist
chip_seq_file = os.path.join(out_dir, ".TF.CTCF.%s.bed" % sample)
if not os.path.exists(chip_seq_file):
# Initialize
intervals = []
a = BedTool(os.path.join(encode_dir, "TF.CTCF.200bp.bed"))
for interval in a:
encode_tfs = encodes_tfs[interval.fields[3]]
if encode_tfs.biosample_name != sample:
continue
intervals.append(interval)
b = BedTool("\n".join(map(str, intervals)), from_string=True).saveas(chip_seq_file)
# Skip if positive sequences for this sample already exist
sequences_file = os.path.join(out_dir, "pos_seqs.%s.fa" % sample)
if not os.path.exists(sequences_file):
# Skip if positive regions for this sample already exist
bed_file = os.path.join(out_dir, ".pos_seqs.%s.bed" % sample)
if not os.path.exists(bed_file):
# Initialize
intervals = set()
a = BedTool(chip_seq_file)
b = BedTool(dnase_seq_file)
for interval in a.intersect(b, sorted=True, wa=True, f=0.5, r=True, stream=True):
intervals.add(interval)
c = BedTool("\n".join(map(str, intervals)), from_string=True).saveas(bed_file)
# Get BED and FASTA files
a = BedTool(bed_file)
s = a.sequence(fi=fasta_file)
# Write
with open(sequences_file, "w") as f:
f.write(open(s.seqfn).read())
# Skip if negative sequences for this sample already exist
sequences_file = os.path.join(out_dir, "neg_seqs.%s.fa" % sample)
if not os.path.exists(sequences_file):
# Skip if positive regions for this sample already exist
bed_file = os.path.join(out_dir, ".neg_seqs.%s.bed" % sample)
if not os.path.exists(bed_file):
# Initialize
intervals = set()
b = BedTool(chip_seq_file)
a = BedTool(dnase_seq_file)
for interval in a.subtract(b, sorted=True, A=True, stream=True):
intervals.add(interval)
c = BedTool("\n".join(map(str, intervals)), from_string=True).saveas(bed_file)
# Get BED and FASTA files
a = BedTool(bed_file)
s = a.sequence(fi=fasta_file)
# Write
with open(sequences_file, "w") as f:
f.write(open(s.seqfn).read())
#-------------#
# Main #
#-------------#
if __name__ == "__main__":
main()
<file_sep>#!/bin/bash
python get-sequences.py --encode-dir ../../ENCODE/ --fasta-file ../../Genomes/mm10/mm10.fa
<file_sep>"""
DanQ architecture (Quang & Xie, 2016).
"""
import math
import numpy as np
import torch
import torch.nn as nn
class DanQ(nn.Module):
"""
Parameters
----------
sequence_length : int
The length of the sequences on which the model trains and and makes
predictions.
n_targets : int
The number of targets (classes) to predict.
Attributes
----------
nnet : torch.nn.Sequential
Some description.
bdlstm : torch.nn.Sequential
Some description.
classifier : torch.nn.Sequential
The linear classifier and sigmoid transformation components of the
model.
"""
def __init__(self, sequence_length, n_targets):
super(DanQ, self).__init__()
self.nnet = nn.Sequential(
nn.Conv1d(4, 320, kernel_size=26),
nn.ReLU(inplace=True),
nn.MaxPool1d(
kernel_size=13, stride=13),
nn.Dropout(0.2)
)
self.bdlstm = nn.Sequential(
nn.LSTM(
320, 320, num_layers=1, batch_first=True, bidirectional=True
)
)
self._n_channels = math.floor((sequence_length - 25) / 13)
self.classifier = nn.Sequential(
nn.Dropout(0.5),
nn.Linear(self._n_channels * 640, 925),
nn.ReLU(inplace=True),
nn.Linear(925, n_targets),
nn.Sigmoid()
)
def forward(self, x):
"""Forward propagation of a batch."""
out = self.nnet(x)
reshape_out = out.transpose(0, 1).transpose(0, 2)
out, _ = self.bdlstm(reshape_out)
out = out.transpose(0, 1)
reshape_out = out.contiguous().view(
out.size(0), 640 * self._n_channels)
return(self.classifier(reshape_out))
def get_criterion():
"""
Specify the appropriate loss function (criterion) for this model.
Returns
-------
torch.nn._Loss
"""
# return(nn.BCELoss())
return(nn.BCEWithLogitsLoss())
# def get_optimizer(lr=0.001):
def get_optimizer(params, lr=0.001):
# return(torch.optim.RMSprop, {"lr": lr})
return(torch.optim.Adam(params, lr=lr))
|
88760a1250514a8b29a2dca2f917e38da227a42a
|
[
"Markdown",
"Python",
"Shell"
] | 27
|
Python
|
wassermanlab/CTCF
|
7ce1b347cca880afd0688f363247f1844ad268ae
|
ac935a020c9fa2108f0cddc0751510e8f16bad0b
|
refs/heads/master
|
<repo_name>MelliFern/eloquent_javascript<file_sep>/ch5_02_agediff.js
var ancestry_file = require('./ancestry_file');
var ancestry_array = JSON.parse(ancestry_file);
// for each row find the age difference
var getAgeDiffArray = function(callback ){
var ageDiffArray = [];
var mother = {};
var person = {};
for(var i=0; i<ancestry_array.length; i++){
person = ancestry_array[i];
mother = callback(person.mother);
if(mother.name!= null)
ageDiffArray.push(person.born - mother.born);
}
return ageDiffArray;
};
// find array with name
var getMother = function(name){
var motherName={};
for(var i=0; i<ancestry_array.length; i++){
if(ancestry_array[i].name == name){
motherName.name = ancestry_array[i].name;
motherName.born = ancestry_array[i].born;
break;
}
}
return motherName;
}
var getAverage = function(callback1){
var array = callback1(getMother);
var sum = 0;
for(var i=0; i<array.length; i++){
sum += array[i];
}
return(sum/array.length);
};
console.log(getAverage(getAgeDiffArray));
<file_sep>/ch2_01_min.js
var min = function(val1, val2){
return (val1<val2 ? val1:val2);
}
console.log(min(100, 20));
console.log(min(20, 20));
console.log(min(-100, 20));
console.log(min('aaa', 'bbb'));
<file_sep>/ch5_00_bookExample.js
var forEach = function(array,action){
for(var i=0; i<array.length; i++)
action(array[i]);
}
var number = [1,2,3,4,5], sum=0;
forEach(number, function(number){
sum+=number;
});
console.log(sum);
/*----------------------------------------------------------*/
var greaterThan = function (m){
return function(n){
return(n>m);
}
}
var greaterThan10 = greaterThan(10);
console.log(greaterThan10(20));
/*----------------------------------------------------------*/
var ancestry_file = require('./ancestry_file');
var ancestry_array = JSON.parse(ancestry_file);
console.log("/*----------------------------------------------*/");
function filter(array,test){
var passed=[];
for(var i=0; i< array.length; i++){
if(test(array[i]))
passed.push(array[i]);
}
return passed;
}
console.log('filter born between 1990 and 1925');
console.log(filter(ancestry_array, function(person){
return person.born > 1900 && person.born < 1925;
}));
console.log('filter Father - <NAME>');
console.log(filter(ancestry_array, function(person){
return person.father == "<NAME>";
}));
console.log("/*----------------------------------------------*/");
function map(array,transform){
var mapped=[];
for(var i=0;i<array.length; i++)
mapped.push(transform(array[i]));
return mapped;
}
var overNinety = ancestry_array.filter(function(person){
return person.died - person.born > 90
});
console.log('Over Ninety');
console.log(map(overNinety, function(person){
return person.name;
}));
console.log("/*----------------------------------------------*/");
console.log("/* Reduce*/");
function reduce(array, combine,start){
var current = start;
for(var i=0; i<array.length; i++)
current = combine(current, array[i]);
return current;
}
console.log(reduce([1,2,3,4], function(a,b){
return a+b;
}, 0));
console.log("/*----------------------------------------------*/");
console.log("/* Reduce*/");
console.log(ancestry_array.reduce(function(min,cur){
if(cur.born < min.born)
return cur;
else return min;
}));
console.log("/*----------------------------------------------*/");
console.log("/* Min */");
var min = ancestry_array[0];
for(var i=0; i< ancestry_array.length; i++){
var cur= ancestry_array[i];
if(cur.born < min.born)
min = cur;
};
console.log(min);
function average(array){
function plus(a,b){return a+b;}
return array.reduce(plus)/array.length;
}
function age(p){return p.died - p.born}
function male(p){return p.sex == "m" }
function female(p){return p.sex =="f"}
console.log(average(ancestry_array.filter(male).map(age)));
console.log(average(ancestry_array.filter(female).map(age)));
console.log("/*----------------------------------------------*/");
console.log("/* Great - great - greate */");
var byName = {};
ancestry_array.forEach(function(person){
byName[person.name] = person;
});
console.log(byName["<NAME>"]);
function reduceAncestors(person, f, defaultValue){
function valueFor(person){
if(person == null)
return defaultValue;
else
return f(person, valueFor(byName[person.mother]),
valueFor(byName[person.father]));
}
return valueFor(person);
}
console.log("/*----------------------------------------------*/");
console.log("/* shared DNA */");
function sharedDNA(person, fromMother, fromFather){
if(person.name == "<NAME>")
return 1 ;
else
return (fromMother + fromFather)/2;
}
var ph = byName['<NAME>'];
console.log(reduceAncestors(ph, sharedDNA, 0)/4);
console.log("/*----------------------------------------------*/");
console.log("/* count Ancestors */");
function countAncestors(person, test){
function combine(person, fromMother, fromFather){
var thisOneCounts = test(person);
return fromMother + fromFather + (thisOneCounts ? 1:0);
}
return reduceAncestors(person, combine, 0);
}
function longLivingPercentage(person){
var all = countAncestors(person, function(person){
return true;
});
var longLiving = countAncestors(person, function(person){
return (person.died - person.born) >= 70;
});
return longLiving/all;
}
console.log(longLivingPercentage(byName["<NAME>"]));
console.log("/*----------------------------------------------*/");
console.log("/* Binding */");
var theSet = ["<NAME>", "<NAME>", "<NAME>"];
function isInSet(set,person){
return set.indexOf(person.name) > -1;
}
console.log(ancestry_array.filter(function(person){
return isInSet(theSet, person);
}));
console.log(ancestry_array.filter(isInSet.bind(null,theSet)));
<file_sep>/ch4_03_list.js
var arrayToList = function(array,idx){
var list ={};
if(idx >= array.length)
return null;
list.value=array[idx];
list.rest = arrayToList(array, ++idx);
return list;
};
var listToArray = function(list, idx, array){
if(list == null)
return array;
array[idx] = list.value;
console.dir(array);
array = listToArray(list.rest,++idx, array);
return array;
};
console.dir(arrayToList([1,2,3],0));
console.dir(listToArray({ value: 10,
rest: { value: 20,
rest: { value: 30,
rest: null } } }, 0,[]
));<file_sep>/ch5_01_reduce.js
/*
function reduce(array,combine,start){
var current = start;
for(var i=0; i<array.length; i++)
current = combine(current, array[i]);
return current;
}
console.log(reduce([1,2,3,4], function(a,b){
return a+b;
},0));
/*-------------------------------------------------*/
console.log("***************************");
var flatten = function (array, combine, mainArray){
for(var i=0; i<array.length; i++)
mainArray = combine(mainArray, array[i]);
return mainArray;
};
var inputArray = [[1,2,3],[10,20,30],[100,200]];
var resultArray = flatten(inputArray, function(combineArray, array)
{
for(var i=0; i<array.length; i++)
combineArray.push(array[i]);
return combineArray;
},
[]);
console.log("result ");
console.dir(resultArray);
<file_sep>/ch4_04_deep_comparison.js
var getProperty = function(obj){
var properties=[];
var idx = 0;
for(var propertyName in obj){
properties[idx]= propertyName;
idx++
}
return properties;
}
//console.dir(getProperty(5,5));
var isObject = function(object){
if(typeof(object)=='object')
return true;
else return false;
};
var compare = function(operand1, operand2, property){
if(operand1[property] == operand2[property])
return true;
else
return false;
}
var deepCompare = function(operand1, operand2){
var isEqual = false;
if(typeof(operand1) == typeof(operand2)){
if(isObject(operand1)){
var properties1 = getProperty(operand1);
var properties2 = getProperty(operand2);
if(properties1.length != properties2.length)
return false;
for (var i=0;i<properties1.length; i++){
if(!compare(operand1, operand2, properties1[i]))
return false;
}
return true;
}
else
if(operand1 == operand2)
return true;
else
return false;
}
else
return false;
};
console.log(deepCompare(5,5));
console.log(deepCompare(6,5));
//console.dir(getProperty({id:1, name:'mel', title:'senior dev engg'}));
console.log(deepCompare({id:1, name:'mel', title:'senior dev engg'},
{id:1, name:'mel', title:'senior dev engg'}));
console.log(deepCompare({id:1, name:'mel', title:'senior dev engg'},
{id:1, name:'melissa', title:'senior dev engg'}));
console.log(deepCompare({id:1, name:'mel', title:'senior dev engg'},
{id:1, name:'mel', title:'senior dev engg', age:24}));
//console.log(deepCompare(5,5));
<file_sep>/ch5_04_every_some.js
var ancestry_file = require('./ancestry_file');
var ancestry_array = JSON.parse(ancestry_file);
var every = function(array, callback){
for(var i=0;i<array.lenght; i++)
if(!callback(array[i]))
return false
return true;
};
var some = function(array, callback){
for(var i=0;i<array.lenght; i++)
if(callback(array[i]))
return true;
return false;
};
console.log([100, 10, 50].every(function(val){
return (val%10 ==0)
}));
console.log([100, 10, 9].some(function(val){
return (val%3 ==0)
}));
console.log([100, 10, 50,8].every(function(val){
return (val%10 ==0)
}));
console.log([100, 10, 50].some(function(val){
return (val%17 ==0)
}));<file_sep>/ch6_01_vector.js
function vector (x,y) {
this.x = x;
this.y = y;
};
vector.prototype.setLocation = function(x,y){
this.x = x;
this.y = y;
};
vector.prototype.plus = function(vector2){
return {x:this.x + vector2.x,
y:this.y + vector2.y}
};
vector.prototype.minus = function(vector2){
return {x:this.x - vector2.x,
y:this.y - vector2.y}
};
var point1 = new vector(2,2);
point1.setLocation(2,4);
console.dir(point1.plus({x:5, y:10}));
point1.setLocation(20,30);
console.dir(point1.minus({x:5, y:10}));<file_sep>/ch2_02_chess_board.js
var chessBoard = function(size){
for(j=0; j<size; j++){
if(j%2==0){
for(var i =0; i< size; i++){
if(i%2==0)
process.stdout.write("#");
else
process.stdout.write(' ');
};
}
else{
for(var i =0; i< size; i++){
if(i%2==0)
process.stdout.write(' ');
else
process.stdout.write('#');
};
}
console.log('');
};
};
chessBoard(16);<file_sep>/ch6_03_sequence_interface.js
var ArraySeq = function(array){
this.pos = -1;
this.array = array;
};
ArraySeq.prototype.next = function(){
this.pos++;
if(this.pos >= this.array.length)
return false;
return this.array[this.pos];
};
ArraySeq.prototype.current = function(){
return this.array[this.pos];
}
var RangeSeq = function(start, end){
this.range =[];
this.pos = -1;
for(var i=start; i<=end; i++)
this.range.push(i);
};
RangeSeq.prototype.next = function(){
this.pos++;
if(this.pos>= this.range.length )
return false;
return this.range[this.pos];
};
RangeSeq.prototype.current = function(){
return this.range[this.pos];
}
var logFive = function(object){
for(var i=0; i<5;i++){
if(!object.next())
break;
console.log(object.current());
}
};
console.log("\nArraySeq\n");
logFive(new ArraySeq([1,23,433,4534,123]));
console.log("\nRangeSeq\n");
logFive(new RangeSeq(10,23));
/*
var array2 = new RangeSeq(10,23);
console.dir(array2);
for(var i=0;i<3; i++)
console.log(array2.next());
console.log("\nArraySeq\n")
var array1 = new ArraySeq([1,23,433,4534,123]);
for(var i=0;i<3; i++)
console.log(array1.next());*/<file_sep>/ch2_02_FizzBuzz.js
var rangeArray = function(){
var array = [];
for(var i=1; i<=100; i++)
array[i]=i;
for(var i=3; i<=100; i=i+3)
array[i]='Fizz';
for(var i=5; i<=100; i=i+5){
if(i%3 == 0)
array[i] = 'FizzBuzz';
else
array[i] = 'Buzz';
}
return array;
};
console.log(rangeArray());
/*
var divisibleBy = function(val,divisor){
return (val%divisor)==0;
}
var replace = function(array, callback1, callback2){
for(var i=0; i< array.length; i++)
if(callback1())
}; */
|
ef2280e782bc51758580711d7ab752b26eaf6646
|
[
"JavaScript"
] | 11
|
JavaScript
|
MelliFern/eloquent_javascript
|
802afea8119abdd38bf4723164ca6c04621a54ce
|
829cfa439d9212bd21a5185f836370983cef0eb3
|
refs/heads/master
|
<file_sep><?php
include("modelos/productosbdd.php");
$oProv = new Gestor();
/*if(isset($_GET['action'])){
switch($_GET['action']){
case 'addprov':
$oProv->proveedor_agregar(($_POST['id']),($_POST['id2']));
break;
}
}*/
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
<!--scritp para que me cambie el valor del numero-->
<script>
function ChangeNumber(val) {
document.getElementById("cantid").innerHTML = val;
}
</script>
<table class="table table-sm" style="background: lightyellow; width: 1600px; margin: auto; margin-top: 5%;">
<thead>
<tr>
<th>QUE TAL SEÑOR PROVEEDOR, NECESITAMOS QUE NOS LLENE DE SUS PRODUCTOS</th>
</tr>
<tr>
<!--<th>Total que le debemos y si cree en Dios, pues Dios se lo paga: <?php //$oProv->get_total_payment();?></th>
<th>Total de productos que le sacamos fiado: <?php//$oProv->get_total_items();?></th>-->
</tr>
</thead>
<thead>
<tr>
<th scope="col" style="width: 400px">Nombre Producto</th>
<th scope="col">Cantidad en stock</th>
<th scope="col">Cantidad a agregar</th>
<th scope="col"></th>
<th scope="col" style="width: 200px">Precio ARG./</th>
<th scope="col" style="width: 200px">Total</th>
<th scope="col"></th>
<th scope="col"></th>
</tr>
</thead>
<tbody>
<?=$oProv->mostrarProductosAlProv(); ?>
<tr>
<td><br></td>
<td><br></td>
<td><br></td>
<td><br></td>
</tr>
<thead>
<tr>
<th>SR PROVEEDOR, PUEDE AGREGAR UN NUEVO PRODUCTO SI USTED DESEA</th>
</tr>
<tr>
<th scope="col">Nombre Producto</th>
<th scope="col">Descripcion</th>
<th scope="col">Precio</th>
</tr>
</thead>
<form action="php/agregarproveedor.php" method="POST">
<tr>
<td><input name="nombre_p" id="nombre_p" type="text" placeholder="Nombre del nuevo producto" pattern="/^[A-Za-z0-9\s]+$/g{1-50}" required></td>
<td><input name="descripcion" id="descripcion" type="text" placeholder="Descripcion del nuevo producto" pattern="/^[A-Za-z0-9\s]+$/g{1-50}" required></td>
<td><input name="precio" id="precio" type="number" placeholder="Precio del nuevo producto" pattern="^[0-9]+" required></td>
<td><input type="submit" name="addnewproduct" class="btn-comprar" value="Agregar un producto nuevo"></td>
</tr>
</tbody>
</table>
</form>
</body>
</html><file_sep><?php
if(isset($_POST['enviar'])){
if (filter_var($correo, FILTER_VALIDATE_EMAIL)) {
echo "Se ha enviado un correo a ($correo) para la recuperación de contraseña.";
}
else echo "Esa dirección de correo electrónico no existe en nuestra base de datos";
}
?><file_sep><?php
session_start();
error_reporting(0);
include("conexion.php");
class Carrito extends Gestor{
private $carro = array();
public function __construct()
{
if(!isset($_SESSION['CARRITO']))
{
$_SESSION['CARRITO'] = $this->carro;
}
}
/*
public function agregar_item($cod){
$conn = mysqli_connect("localhost", "root", "", "candyshopbdd");
$sql = "SELECT * FROM productos WHERE codigo_producto = $cod";
$respuesta = mysqli_query($conn, $sql);
foreach($_SESSION['CARRITO'] as $key => $value)
{ if($respuesta){
if(array_key_exists($cod, $_SESSION['CARRITO'])){
while($value <= $key['stock']){
echo "producto agregado";
$_SESSION['CARRITO'][$cod] ++;
}
}else {
while($value <= $key['stock']){
echo "producto agregado";
$_SESSION['CARRITO'][$cod] = 1;
}
}
}
}
}*/
public function agregar_item($cod){
if(isset($_SESSION['CARRITO'])){
$conn = mysqli_connect("localhost", "user_modify", "umodify1", "candyshopbdd");
$sql = "UPDATE productos SET stock = stock - 1 WHERE codigo_producto = $cod";
$respuesta = mysqli_query($conn, $sql);
if(array_key_exists($cod, $_SESSION['CARRITO'])){
if($respuesta){
echo "producto agregado";
$_SESSION['CARRITO'][$cod] ++;
header('Location: index.php');
}
}else{
echo "producto agregado";
$_SESSION['CARRITO'][$cod] = 1;
header('Location: index.php');
}
print_r($_SESSION['CARRITO']);
}
}
/*
public function agregar_item($cod){
if(isset($_SESSION['CARRITO'])){
if(array_key_exists($cod, $_SESSION['CARRITO'])){
$_SESSION['CARRITO'][$cod] ++;
}else{
$_SESSION['CARRITO'][$cod] = 1;
}
print_r($_SESSION['CARRITO']);
}
}*/
public function remove_item($cod){
$conn = mysqli_connect("localhost", "user_modify", "umodify1", "candyshopbdd");
foreach($_SESSION['CARRITO'] as $key => $value){
$sql = "UPDATE productos SET stock = stock+$value WHERE codigo_producto = $key";
$respuesta = mysqli_query($conn, $sql);
if($respuesta){
unset($_SESSION['CARRITO'][$key]);
return true;
}
}
}
public function get_items(){
$html = '';
if(isset($_SESSION['CARRITO'])){
foreach ($_SESSION['CARRITO'] as $key => $value){
$conn = mysqli_connect("localhost", "user_normal", "unormal1", "candyshopbdd");
$consulta = mysqli_query($conn, "SELECT * FROM productos WHERE codigo_producto = $key");
$keysearch = mysqli_fetch_array($consulta);
$html = '<tr>
<td>'.$keysearch['nombre_producto'].'</td>
<td align="left">'.$value.'</td>
<td>'.number_format($keysearch['precio'], 2).'</td>
<td>'.number_format($keysearch['precio']*$value,2).'</td>
<td>
<a href="index.php?action=remove&id='.$keysearch['codigo_producto'].'" class="btn-quitar">
Eliminar
</a>
</td>
</tr>';
echo $html;
}
}
}
public function get_total_items(){
$total=0;
if(!empty($_SESSION['CARRITO'])){
foreach ($_SESSION['CARRITO'] as $key => $value){
$total += $value;
}
}
return $total;
}
public function get_total_payment(){
$total= 0;
number_format($total, 2);
if(!empty($_SESSION['CARRITO'])){
foreach ($_SESSION['CARRITO'] as $key => $value){
$conn = mysqli_connect("localhost", "user_normal", "unormal1", "candyshopbdd");
$consulta = mysqli_query($conn, "SELECT * FROM productos WHERE codigo_producto = $key");
$key = mysqli_fetch_array($consulta);
$total += $key['precio']*$value;
}
}
return number_format($total,2);
}
public function remover_product(){
if ($_SESSION['sesioniniciada']){
if(!empty($_SESSION['CARRITO'])){
/*foreach($_SESSION['CARRITO'] as $key => $value){
$conn = mysqli_connect("localhost", "root", "", "candyshopbdd");
$sql = "UPDATE productos SET stock = stock - $value WHERE codigo_producto = $key";
$respuesta = mysqli_query($conn, $sql);*/
echo "<script>alert('Compra exitosa!! ya rastreamos su ubicacion por GPS asi que en breve le estamos enviando un drone con su compra. No tenga miedo, estamos para alimentar su gula.')</script>";
unset($_SESSION['CARRITO']);
}else{ echo "ups, algo ocurrio mal";}
}else{
echo "<script>
alert('Para proceder a la compra, por favor inicie sesion')
window.location= 'iniciarsesion.php'
</script>";}
}
}
?><file_sep><!doctype html>
<html lang = "es" dir = "ltr">
<head>
<meta charset="utf-8">
<link rel="stylesheet" href="css/estilo.css">
<title>Configuracion</title>
</head>
<body>
<?php include 'include/header.php';?>
<?php
if($_SESSION['user'] === 'proveedor'){
include 'proveedor.php';
}else{
echo"Y bueno, la configuracion en otra ocasión la vamos a tener hecha..";
}
?>
<br>
<br>
<br>
<br>
<br>
<br>
<br>
<br>
<br>
<?php include 'include/footer.php';?>
</body>
</html><file_sep><!doctype html>
<html lang = "es" dir = "ltr">
<head>
<meta charset="utf-8">
<title>Terminos y condiciones</title>
<link rel="stylesheet" href="css/estilo.css">
</head>
<body>
<?php require_once 'include/header.php';?>
<div class="container">
<div class="terminos">
<h3>TÉRMINOS Y CONDICIONES GENERALES DE VENTA</h3>
<div class="texto">(N/GCOS/1301)
Definiciones
Vendedor: la entidad Nutreco identificada en el presupuesto o pedido.<br>
Comprador: la parte que celebra un acuerdo con el Vendedor para la compra de Productos o
servicios al Vendedor.<br>
Productos: los productos que se especifican en el acuerdo de compra.<br>
Artículo 1. Aplicabilidad<br>
Estas Condiciones Generales de Venta se aplican y se incorporan a todos los acuerdos de compra
convenidos con el Vendedor, a las ofertas y presupuestos del Vendedor, y a las entregas y servicios
suministrados por el Vendedor.
No podrá acordarse ninguna modificación de estos términos y condiciones si no se hace por escrito y
es firmada por los representantes autorizados de las partes. Asimismo, toda variación reflejada en
un acuerdo de compra específico no se aplicará a otros acuerdos de compra a menos que se
acuerde expresamente por escrito.<br>
Artículo 2. Ofertas y contratos<br>
1. Un presupuesto emitido por el Vendedor no constituirá una oferta vinculante.<br>
2. Un pedido enviado por el Comprador constituye una oferta para celebrar un acuerdo de
compra con el Vendedor de conformidad con las presentes Condiciones Generales de Venta
y los términos de cualquier presupuesto correspondiente del Vendedor.<br>
3. Dicho pedido puede ser aceptado por el Vendedor mediante su aceptación por escrito, su
aceptación oral, o al comenzar a cumplir el Vendedor con sus obligaciones de conformidad
con la existencia de un acuerdo de compra.<br>
Artículo 3. Entrega<br>
1. Las fechas de entrega indicadas deben considerarse solo aproximadas. La entrega efectiva
después de la fecha de entrega indicada no constituirá un incumplimiento del contrato por
parte del Vendedor ni dará derecho al Comprador a recurso alguno.<br>
Los Productos entregados pueden diferir de la descripción en el acuerdo de compra en
cuanto a los embalajes, volúmenes/dimensiones y composición, y siempre que no afecte
negativamente el uso normal de los Productos. Tal diferencia no constituirá un
incumplimiento del contrato por parte del Vendedor y no dará derecho al Comprador a
recurso alguno.<br>
2. Los Productos se entregarán según lo establecido en el acuerdo de compra o, en ausencia
de tales condiciones de entrega, en la modalidad "Free Carrier" (FCA) en las instalaciones
del Vendedor, siempre de acuerdo con las disposiciones de la última versión de los
Incoterms en vigor en el momento en que se celebre el acuerdo de compra. El Vendedor
podrá realizar entregas parciales de los Productos.<br>
3. El Comprador podrá solicitar que el Vendedor organice el transporte de los Productos en
nombre del Comprador, en cuyo caso el Comprador será responsable de todos los costes y
riesgos asociados a dicho transporte.<br>
4. El Comprador está obligado a aceptar la entrega de los Productos por el Vendedor. El
Vendedor podrá almacenar los Productos si el Comprador no acepta dicha entrega, en cuyo
caso el Comprador reembolsará al Vendedor todos los costes y gastos correspondientes
incurridos y seguirá estando obligado a pagar el precio de compra de los Productos. En este
caso, el Vendedor podrá revender los Productos correspondientes en cualquier momento a
un tercero, en cuyo caso el Comprador reembolsará al Vendedor cualquier déficit con
respecto al precio de compra acordado en el acuerdo de compra con el Comprador, así como
todos los costes y gastos incurridos por el Vendedor en relación con el almacenamiento.<br>
5. Si las partes están de acuerdo con alguna variación respecto a la descripción de los
Productos que figuran en el acuerdo de compra, el Vendedor podrá, a su discreción,
modificar la fecha de entrega indicada en el acuerdo de compra mediante notificación al
Comprador, que tendrá efecto en cuanto se reciba.<br>
Artículo 4. Quejas, obligación de inspección y cumplimiento<br>
1. Antes de la entrega de los Productos, el Vendedor podrá disponer que se tomen muestras,
por su cuenta y cargo, y podrá conservar tales muestras durante el plazo de su elección.<br>
2. El Comprador está obligado a inspeccionar los Productos entregados por el Vendedor
inmediatamente después de recibirlos.<br>
El Comprador presentará por escrito cualquier queja relativa a los defectos visibles de los
Productos (incluyendo, entre otras, las quejas relativas a cantidades, dimensiones, pesos,
fechas de caducidad y calidades) tan pronto como sea posible pero siempre dentro de un
plazo de 30 (treinta) días a partir de la fecha de entrega. El hecho de no hacerlo se
entenderá como que el Comprador no tiene nada que reclamar en relación con tales
defectos y se considerará que el Vendedor ha cumplido con sus obligaciones en su totalidad.<br>
3. El Comprador presentará por escrito cualquier queja relativa a defectos ocultos de los
Productos a la mayor brevedad posible pero siempre dentro de un plazo de 8 (ocho) días a
partir de la fecha en que el Comprador tenga conocimiento de los mismos, o debería haberlo
tenido, pero siempre dentro de un plazo de 8 (ocho) días a partir de la fecha recomendada
de caducidad o, en su defecto, dentro de los tres meses siguientes a la entrega de los
Productos. El hecho de no hacerlo se entenderá como que el Comprador no tiene nada que
reclamar en relación con tales defectos y se considerará que el Vendedor ha cumplido con
sus obligaciones en su totalidad.<br>
4. Si el Comprador presenta una queja sobre los Productos adquiridos (de conformidad con el
presente artículo 4), dará al Vendedor la oportunidad de tomar una o varias muestras de los
Productos si el Vendedor lo solicita. Se encargará entonces a un instituto independiente
seleccionado por el Vendedor, a su discreción, que someta a prueba las muestras tomadas
por el Vendedor (ya sea en virtud del artículo 4 (1) o el presente artículo 4 (4), a discreción
del Vendedor), a cargo del Comprador, con el fin de comprobar la existencia de los defectos
en los Productos alegados por el Comprador. El instituto independiente recibirá las
instrucciones pertinentes del Vendedor acerca del método de prueba a seguir, que estará en
conformidad con los procedimientos habituales de muestreo y pruebas del sector. Las
conclusiones de hecho del instituto independiente con respeto a los defectos alegados por el
Comprador serán aceptados por las partes y serán vinculantes para las mismas, como
prueba concluyente de la calidad de los Productos.<br>
5. Sin el consentimiento por escrito del Vendedor, el Comprador no devolverá los Productos al
Vendedor. Los costes de los envíos de devolución correrán a cargo del Comprador a menos
que se acuerde lo contrario por escrito.<br>
6. El Comprador presentará al Vendedor por escrito cualquier queja relativa a una factura
dentro un plazo de 14 (catorce) días a partir de la recepción de la factura, entendiéndose
por el hecho de que no lo haga, que la factura es correcta.
<br>
Artículo 5. Precio<br>
1. A menos que se indique expresamente lo contrario en el acuerdo de compra, todos los
precios acordados se denominan en la moneda de curso legal del Vendedor y no incluyen el
IVA ni ningún otro gravamen estatal, que serán abonados por el Comprador.<br>
2. Los precios se fijarán de acuerdo con las listas de precios del Vendedor vigentes en la fecha
del pedido por parte del Comprador, en la medida en que los Productos estén incluidos en la
lista de precios, o se fijarán de cualquier otro modo que se indique en el acuerdo de compra.<br>
3. Los precios acordados se basan en las condiciones del mercado en el momento en que el
Vendedor acepta el pedido del Comprador. El Vendedor tiene el derecho, en cualquier
momento mediante notificación al Comprador, a ajustar el precio acordado si los factores de
coste que determinan o codeterminan el precio (incluyendo, entre otros, los precios de
mercado de los Productos, ingredientes, materias primas, transporte, gastos de personal,
seguros, tipos de cambio de divisas, impuestos y otros cargos financieros) se incrementan
entre la fecha del acuerdo de compra y la fecha de entrega. Si el aumento de precio
asciende a más del 10% del precio original, el Comprador podrá rescindir el acuerdo de
compra mediante notificación por escrito al Vendedor, siempre que el Vendedor reciba dicha
notificación dentro de un plazo de 3 (tres) días a partir de la fecha de notificación por parte
del Vendedor del aumento del precio.<br>
Artículo 6. Pago<br>
1. El Comprador abonará todas las facturas de acuerdo con las condiciones de pago que se
indican en la factura. En ausencia de tales condiciones, el pago vencerá en un plazo de 14
días a partir de la fecha de la factura. El Comprador pagará en su totalidad todos los
importes adeudados en virtud del acuerdo de compra sin aplicar ninguna deducción,
compensación o cargo por cualesquiera motivos, abonando dichos importes en su totalidad
mediante transferencia al banco del Vendedor o abono en cuenta.<br>
2. El Vendedor tendrá derecho a suspender el cumplimiento de todas sus obligaciones con
respecto al Comprador, incluidas las derivadas de otros acuerdos, hasta que el Comprador
haya pagado todas las cantidades vencidas.<br>
3. En caso de incumplimiento por parte del Comprador de pagar cualquier cantidad adeudada
al Vendedor antes de la fecha de vencimiento correspondiente, el Comprador pagará unos
intereses sobre la cantidad vencida al tipo de EURIBOR (depósitos a 30 días) más un 2%
anual, devengándose dichos intereses todos los días desde la fecha de vencimiento hasta la
fecha de pago efectivo de la cantidad vencida.<br>
4. El cumplimiento de las obligaciones del Comprador podrá reclamarse si el Comprador entra
en concurso de acreedores, es declarado en quiebra o es concedido una moratoria sobre sus
deudas.<br>
5. El Vendedor podrá, a su discreción, en cualquier momento: exigir el pago parcial o total por
adelantado de las cantidades a devengar al Vendedor por parte del Comprador; o exigir al
Comprador que presente un aval de un tercero con respecto a dichos importes, a
satisfacción del Vendedor; o exigir que el pago se realice mediante carta de crédito
irrevocable confirmada por un banco aceptable para el Vendedor. La falta de cumplimiento
por parte del Comprador en cada caso dará derecho al Vendedor a suspender la entrega de
los Productos.<br>
Artículo 7. Reserva de propiedad<br>
1. El Vendedor retiene la titularidad sobre los Productos que entrega en el marco del acuerdo
de compra hasta que el Comprador haya pagado íntegramente (con fondos de inmediata
disposición):<br>
a. el precio de los Productos;<br>
b. el precio de cualquier trabajo realizado o que se realizará en virtud de tal acuerdo de
compra;<br>
c. el precio de los Productos entregados o pendientes de entrega, así como el precio de
cualquier trabajo realizado o que se realizará en virtud de cualquier otro acuerdo de
compra; y<br>
d. las cantidades a pagar en virtud de cualquier reclamación relativa a un
incumplimiento por parte del Comprador del acuerdo de compra.<br>
2. Hasta que la propiedad de los Productos se ceda al Comprador, el Comprador no tendrá
derecho a pignorar o gravar de otro modo los Productos ni a disponer de los mismos sin el
consentimiento previo por escrito del Vendedor.<br>
Está permitida la reventa como parte de las operaciones comerciales normales del
Comprador a menos que el Vendedor haya notificado al Comprador por escrito que desea
ejercer sus derechos derivados de la reserva de propiedad.<br>
3. El Comprador está obligado a mantener separados e identificables los Productos entregados
en función de cada acuerdo de compra y tratar dichos Productos con un cuidado adecuado.<br>
4. El Comprador autoriza irrevocablemente al Vendedor a acceder a los lugares en uso por el
Comprador con el fin de recuperar los Productos entregados en el caso de que se ejerzan los
derechos de titularidad del Vendedor, debiendo el Comprador facilitar la ayuda que solicite
el Vendedor para este fin.<br>
Artículo 8. Responsabilidad e indemnización<br>
1. Nada en el acuerdo de compra limitará o excluirá la responsabilidad del Vendedor por fraude
ni otra responsabilidad en la medida en que esta no pueda ser limitada o excluida según la
ley vigente.<br>
2. La responsabilidad total del Vendedor con respecto a todas las reclamaciones derivadas del
acuerdo de compra o relacionadas con el mismo, ya sea de carácter contractual o
extracontractual (incluida la negligencia) o de otro carácter, no superará la cantidad igual al
precio pagado o pendiente de pago por parte del Comprador en virtud del acuerdo de
compra.<br>
3. La responsabilidad total del Vendedor con respecto a todas las reclamaciones derivadas de o
relacionadas con cada entrega de los Productos en virtud de un acuerdo de compra, ya sea
de carácter contractual o extracontractual (incluida la negligencia) o de otro carácter, no
superará la cantidad igual al precio pagado o pendiente de pago por parte del Comprador
por la entrega de los Productos en virtud del acuerdo de compra.<br>
4. La responsabilidad total del Vendedor con respecto a todas las reclamaciones derivadas de o
relacionadas con cada prestación de un servicio en virtud de un acuerdo de compra, ya sea
de carácter contractual o extracontractual (incluida la negligencia) o de otro carácter, no
superará la cantidad igual al precio pagado o pendiente de pago por parte del Comprador
por dicho servicio en virtud del acuerdo de compra.<br>
5. El Vendedor no tendrá responsabilidad alguna, ya sea de carácter contractual o
extracontractual (incluida la negligencia) o de otro carácter, sobre: la pérdida de beneficios
o beneficios previstos, la pérdida de ingresos o renta, la disminución del fondo de comercio,
ni ninguna otra pérdida indirecta o consecuente de cualquier naturaleza.<br>
6. El Comprador indemnizará al Vendedor y a sus filiales, empleados y representantes frente a
todas las responsabilidades, costes y gastos incurridos en relación con cualquier reclamación
de terceros derivados de o relacionados con cualquier entrega o transformación a posteriori
de los Productos por parte del Comprador. El Comprador está obligado a suscribir y
mantener un seguro adecuado con respecto a reclamaciones de terceros derivados de o
relacionados con cualquier entrega o transformación a posteriori.<br>
7. El Comprador no tendrá derecho a presentar reclamación alguna contra el Vendedor
derivada de o relacionada con el acuerdo de compra, ya sea de carácter contractual o
extracontractual (incluida la negligencia) o de otro carácter, a partir de 12 (doce) meses de
la fecha en que surjan las correspondientes circunstancias que den lugar a una reclamación
o, si fuera posterior, la fecha en la que el Comprador debería haber tenido conocimiento de
las circunstancias que den lugar a una reclamación.<br>
Artículo 9. Asesoramiento, informes, etc.<br>
Si el Vendedor ofrece al Comprador cualquier asesoramiento o asistencia técnica, el Comprador no
contará con la exactitud e integridad de este tipo de asesoramiento y asistencia, ni el Vendedor la
garantiza, por lo que toda responsabilidad del Vendedor derivada de o relacionada con este tipo de
asesoramiento y asistencia queda excluida en la máxima medida permitida por la ley.
Artículo 10. Contenido y garantías<br>
1. Con las salvedades indicadas en este artículo 10, el Vendedor garantiza que en el momento
de la producción hasta la fecha de caducidad indicada, la composición de los Productos que
entrega es la indicada en el embalaje, o si procede, en las especificaciones acordadas en el
acuerdo de compra, siempre que los Productos se encuentren almacenados en las
condiciones adecuadas. Si no se indica ninguna fecha de caducidad, la fecha de caducidad
será 3 (tres) meses a partir de la fecha de entrega. Esta es la única garantía ofrecida. El
Comprador reconoce y las partes convienen que la composición de los Productos puede
cambiar como resultado del paso del tiempo, la temperatura, la humedad y otros factores
ambientales.<br>
2. A excepción de lo establecido en el acuerdo de compra, todas las garantías, condiciones y
otros términos implícitos en la ley vigente quedan excluidos del acuerdo de compra en la
mayor medida permitida por la ley.<br>
3. Las especificaciones acordadas son valores promedio, a menos que se notifique lo contrario
al Comprador, y se aplicará cualquier tolerancia analítica en vigor en el momento de la
entrega a estos valores promedio. Los datos de pruebas solo se adquieren en circunstancias
específicas, por lo que los resultados pueden variar en función del producto y las
circunstancias. Las imágenes, planos y resultados de las pruebas o muestras, tamaños,
peso, estabilidad química y otras especificaciones técnicas se aplican de forma orientativa y
para dar una impresión general del producto.<br>
4. El Vendedor garantiza al Comprador que los servicios acordados en virtud de un acuerdo de
compra se prestarán con el adecuado esmero y destreza.<br>
Artículo 11. Fuerza mayor<br>
1. El Vendedor no será responsable de ningún incumplimiento o retraso en el cumplimiento de
sus obligaciones en virtud del acuerdo de compra en la medida en que dicho incumplimiento
sea impedido, dificultado o retrasado como consecuencia de cualquier fuerza mayor; donde
"fuerza mayor" significa un hecho o circunstancia fuera del control directo del Vendedor,
incluyendo (sin limitación) inundación, incendio, explosión, rayo, terrorismo, restricciones de
transporte, contaminación, riesgo de contaminación, interrupción de las operaciones
comerciales, defectos o daños en los medios de producción, huelgas o acciones similares,
incumplimientos por parte de terceros, medidas del gobierno y la falta de materias primas o
estancamiento en el suministro de materias primas o productos semielaborados.<br>
2. En el caso de que una fuerza mayor se prolongue durante un período de más de cuatro
semanas consecutivas, cualquiera de las partes podrá rescindir el acuerdo de compra
mediante notificación escrita a la otra parte, surtiendo efecto dicha notificación en cuanto se
reciba.<br>
3. Si el Vendedor ha cumplido parcialmente con su obligación cuando se rescinda el acuerdo de
compra en virtud del presente artículo, el Comprador pagará la parte proporcional del precio
acordado en función del trabajo ya realizado.<br>
Artículo 12. Requisitos técnicos y legales<br>
El Vendedor se asegurará que los Productos entregados cumplen con los requisitos técnicos y
legales específicos o normas establecidas por las leyes y reglamentos del país en el que se
producen. El Comprador no importará los Productos a ningún país, sin garantizar que cumplen con
los requisitos técnicos y legales locales vigentes del país de importación. El Comprador indemnizará
al Vendedor frente a toda responsabilidad, costes y gastos incurridos como consecuencia del
incumplimiento por parte del Comprador de este artículo.<br>
Artículo 13. Confidencialidad<br>
Ninguna de las partes revelará información confidencial de la otra a terceros sin el consentimiento
previo por escrito de la otra parte ni utilizará dicha información confidencial, salvo a los efectos del
acuerdo de compra.<br>
Artículo 14. Legislación aplicable y fuero<br>
1. Si la sociedad del Vendedor está constituida en un estado, provincia u otra región
administrativa de un país que tiene un orden jurídico que regula los contratos comerciales,
entonces el acuerdo de compra se regirá e interpretará de acuerdo con las leyes de dicha
región administrativa. De lo contrario, el acuerdo de compra se regirá e interpretará de
acuerdo con las leyes del país en el que esté constituida la sociedad del Vendedor. Queda
excluida la aplicación de la Convención de las Naciones Unidas sobre los Contratos de
Compraventa Internacional de Mercaderías, firmada en Viena el 11 de abril de 1980 (Series
de Tratados de 1981, 184 y 1986, 61).<br>
2. En relación con cualquier asunto derivado de o relacionado con el acuerdo de compra, las
partes acuerdan someterse irrevocablemente a la jurisdicción exclusiva de los tribunales del
estado, provincia u otra región administrativa en la que esté constituida la sociedad del
Vendedor (si procede) o de lo contrario, a la jurisdicción exclusiva de los tribunales del país
en el que esté constituida la sociedad del Vendedor.<br>
Artículo 15. Disposiciones finales<br>
1. Si un tribunal de jurisdicción competente dictamina que alguna de las disposiciones del
acuerdo de compra no es válida, legal o aplicable por cualquier motivo, tal disposición será
separada de las demás disposiciones del acuerdo de compra y el resto continuará en pleno
vigor y efecto.<br>
</div>
</div>
</div>
<?php require_once 'include/footer.php';?>
</body>
</html><file_sep><!doctype html>
<html lang = "es" dir = "ltr">
<head>
<meta charset="utf-8">
<link rel="stylesheet" href="css/estilo.css">
<title>Acerca de FLAAPU SRL</title>
</head>
<body>
<?php include 'include/header.php';?>
<div class="fondo-ad">
<div class="conteiner-ad">
<div class="ad">
<h1>Acerca de Candy Shop S.R.L.</h1>
<p>
Somos una empresa mayorista y minorista que se dedica a la venta de productos dulces altos en glucosa, de esos que te acercan cada vez más a la diabetes. <br>
Nuestro nombre se inspiro en la canción de 50 cent o 50 centavos.
Viste wacho? Alto nombre le pusimos a la empresita, igual tranqui que falta cargar datos, tipo onda los productos y esas cosas, pero como todavía no se como hacerlo, queda en stand by.
<br> Bienvenides, <s>compren mucho asi nos enriquecemos nosotrxs.</s> esto todavía no va porque no hay productos para comprar pero chill, ya va a haber. <br>
Gracias, vuelvas prontos dijo Apus Nahasapeemapetilon. ♥<br>
</p>
</div>
</div>
</div>
<br>
<br>
<!--<img width="300" height="300" src="img/spin.png"> -->
<br>
<br>
<br>
<br>
<br>
<br>
<br>
<br>
<br>
<br>
<br>
<br>
<br>
<br>
<br>
<br>
<hr>
<hr>
<?php include 'include/footer.php';?>
</body>
<file_sep><!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link href="https://fonts.googleapis.com/css2?family=Monoton&display=swap" rel="stylesheet">
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.5.2/css/bootstrap.min.css" integrity="<KEY>" crossorigin="anonymous">
</head>
<body>
<?php
$action = "";
$actionurl = "";
$action2 = "";
$actionurl2 = "";
$urlsesion = "";
if($_SESSION['sesioniniciada'] != '0')
{
$urlsesion = "php/miperfil.php";
$action = "Configuracion";
$actionurl = "configuracion.php";
$action2 = "Salir";
$actionurl2 = "php/deslogear.php";
}else{
$_SESSION['sesioniniciada'] = '';
$action = "Iniciar sesion";
$actionurl = "iniciarsesion.php";
$action2 = "Registrarse";
$actionurl2 = "registrarse.php";
}
?>
<section class="fondoindex">
<ul class="nav justify-content-end">
<!--<p style="color: #F4F1BB" style="text_align:center" style="margin-top: -12px">Bienvenide: </p> -->
<li class="nav-item">
<a class="nav-link" style="color: #F4F1BB" href=<?php echo $urlsesion; ?>><?php echo $_SESSION['sesioniniciada']; ?> </a>
</li>
<li class="nav-item">
<a class="nav-link" style="color: #F4F1BB" href=<?php echo $actionurl; ?>><?php echo $action; ?> </a>
</li>
<li class="nav-item">
<a class="nav-link" style="color: #F4F1BB" href=<?php echo $actionurl2; ?>><?php echo $action2; ?></a>
</li>
<li class="nav-item">
<a href="index.php"><img src="./img/shopping-cart.svg" alt=""></a>
</li>
</ul>
</section>
</body>
</html>
<!--
<section class="fondoindex">
<ul class="nav justify-content-end">
<p style="color: #F4F1BB" style="text_align:center" style="margin-top: -12px">Bienvenide: </p>
<li class="nav-item">
<a class="nav-link" style="color: #F4F1BB" href="php/miperfil.php"> echo $_SESSION['user']; </a>
</li>
<li class="nav-item">
<a class="nav-link" style="color: #F4F1BB" href="configuracion.php">Configuración</a>
</li>
<li class="nav-item">
<a class="nav-link" style="color: #F4F1BB" href="php/deslogear.php">Salir</a>
</li>
</ul>
</section>
</body>
</html>
-->
<file_sep><!doctype html>
<html lang = "es" dir = "ltr">
<head>
<meta charset="utf-8">
<link rel="stylesheet" href="css/estilo.css">
<title>Bienvenide a esta hermosa pagina</title>
</head>
<body>
<?php include 'include/header.php';?>
<h2 style="background: #F4F1BB" style="opacity: 0.8"><font size=20>Ingresó correctamente, bienvenide!</font></h2>
<!-- PORTADA-->
<?php include 'include/portada.php';?>
<!-- FIN PORTADA-->
<hr>
<br>
<br>
<div class="mx-auto" style="width: 200px;">
<img width="300" height="300" src="img/pighappy.png">
</div>
<br>
<br>
<br>
<br>
<br>
<br>
<br>
<br>
<?php require_once 'include/footer.php';?>
</body>
</html>
<file_sep><?php
session_start();
/*error_reporting(0); // una vez que termino el proyecto, activarlo.
if(isset($_SESSION['user'])){
include("barra.php");
}else{
include("barradeslogueado.php");
}
*/
error_reporting(0); // una vez que termino el proyecto, activarlo.
$_SESSION['sesioniniciada'] = '0';
if(isset($_SESSION['user'])){
$_SESSION['sesioniniciada'] = $_SESSION['user'];
include("barra.php");
}else{
$url = $_SERVER["REQUEST_URI"];
$urlinvalida = [
1 => "inicio.php",
2 => "configuracion.php",
3 => "php/deslogear.php",
];
foreach ($urlinvalida as $invalida){
if (strpos($url, $invalida) !== false){
echo "<script>
alert('Usted NO tiene autorización, váyase de aquí')
window.location= 'iniciarsesion.php'
</script>";
}
}
include("barra.php");
}
?>
<file_sep><?php
if(isset($_POST['agregarStock']))
{
$cod = $_POST['producto'];
$cantidad = $_POST['cantidad'];
$conn = new mysqli("localhost", "user_modify", "umodify1", "candyshopbdd");
$sql = "UPDATE productos SET stock=stock+$cantidad WHERE codigo_producto=$cod";
$respuesta = mysqli_query($conn, $sql);
if($respuesta){
echo"<script>alert('producto agregado, gracias señor')</script>";
header("Location: ../configuracion.php");
}
else{
echo "noo, algo salio mal señor, por favor intente de nuevo";
}
}
/*<script type="text/javascript">
function confirmar(){
if(confirm("Realmente desea eliminar este producto?"))
{
return true;
}
return false;
}
</script> */
if(isset ($_POST['eliminarStock'])){
$cod = $_POST['producto'];
$con = new mysqli("localhost", "user_delete", "udelete1", "candyshopbdd");
$sql= "DELETE FROM productos WHERE codigo_producto=$cod";
$resultado=mysqli_query($con, $sql);
if($resultado){
echo "producto eliminado";
header("Location: ../configuracion.php");
}
else {
echo "epa, salio mal algo, no va sa poder eliminarlo";
}
}
?><file_sep><?php
/* SESSIONES */
session_start();
//registrando usuario
if(isset($_POST['registro'])){
$user = $_POST['usrname'];
$password = <PASSWORD>('<PASSWORD>', $_POST['psw']);
$passwordrepeat = hash('<PASSWORD>', $_POST['psw-repeat']);
$correo = $_POST['email'];
require_once('../modelos/validacionbdd.php');
$oUsuario = new Usuario();
if(!empty($user))
{
if(!empty($password)&&(!empty($passwordrepeat)))
{
if($password == $passwordrepeat)
{
if(!empty($correo)){
$oUsuario->setUsuario($user);
$oUsuario->setPassword($password);
$oUsuario->setEmail($correo);
$oUsuario->save(); //compruebo las contraseñas si son iguales, guardo los datos y sino, me tira una alerta.
}else{
echo "Ingresa un correo, por favor";
}
}
else{
echo "<script>alert('contraseñas no coinciden')
window.location='../registrarse.php';</script>";
}
}
else{
echo "Ingresa una contraseña, por favor";
}
}else{
echo "Ingresa un usuario, por favor";
}
}
//logear el usuario
if(isset($_POST['login']))
{
$usuario = $_POST['usuario'];
$password = <PASSWORD>('<PASSWORD>',$_POST['<PASSWORD>']);
$captcha = $_POST['captcha_dato'];
if (!empty($captcha))
{
if($_SESSION['rand_code'] == $captcha)
{
require_once('../modelos/validacionbdd.php');
$oUsuario = new Usuario;
$oUsuario->login($usuario, $password);
//$_SESSION['pass'] = $password;
}else{
echo "<script>
alert('Captcha Incorrecto')
window.location= '../iniciarsesion.php'
</script>";
}
}else {
echo "<script>
alert('Ingrese el captcha para continuar')
window.location= '../iniciarsesion.php'
</script>";
}
}
?><file_sep><!doctype html>
<html lang = "es" dir = "ltr">
<head>
<meta charset="utf-8">
<link rel="stylesheet" href="css/estilo.css">
<title>Registrate con toda la onda</title>
</head>
<body>
<form name="registro" method="post" action="php/procesologin.php">
<?php require 'include/header.php';?>
<section class="form-login2">
<div class="centrando">
<h1>Registro</h1>
<p>Por favor, ingresa todos los valores requeridos en los campos.</p>
<hr>
<li>
<label for="namereg"><b>Usuario</b></label>
<input type="text" class="celda" placeholder="Ingresa usuario" name="usrname" id="user" >
</li>
<br>
<li>
<label for="email"><b>Email</b></label>
<input type="text" class="celda" placeholder="Ingresa Email" name="email" id="email" >
</li>
<br>
<li>
<label for="psw"><b>Password</b></label>
<input type="password" class="celda" placeholder="Ingresa password" name="psw" id="psw" >
</li>
<br>
<li>
<label for="psw-repeat"><b>Repetir password</b></label>
<input type="password" class="celda" placeholder="In<PASSWORD>" name="psw-repeat" id="psw-repeat" >
</li>
<hr>
<!--<li>
<img src="./include/captcha.php"><input type="text" class="celda" name="captcha_dato"><button id="btn_recargar2">Recargar captcha </button>
</li>-->
<br>
<div class="centrandoder">
<p>Si aceptas crear una cuenta, quiere decir que aceptas los <a href="terminosycondiciones.php">Terminos y condiciones</a> de nuestro sitio.</p>
<li>
<button type="submit" class="buttons2" name="registro">Registrarse</button>
</li>
</div>
<div class="centrandoder">
<p>Ya tenes una cuenta?<a href="iniciarsesion.php"> Iniciar sesion</a>.</p>
</div>
<?php
//boton de registro, mando todos los datos a procesologin.php
if(isset($_POST['registro'])){
include("php/procesologin.php");
}
?>
</section>
</form>
<?php require_once 'include/footer.php'; ?>
</body>
</html> <file_sep><!doctype html>
<html lang = "es" dir = "ltr">
<head>
<meta charset="utf-8">
<title>Contacto</title>
<link rel="stylesheet" href="css/estilo.css">
</head>
<body>
<?php include 'include/header.php';?>
<section class="bordetexto">
<br>
<?php echo "<p>En el marco de la emergencia sanitaria (COVID-19) y mientras dure el aislamiento social, preventivo y <br> obligatorio no se realizará atención presencial, quedando disponibles sólo los <br> canales electrónicos y telefónicos.</p>" ?>
</section>
<section class="bloquecontacto">
<div class="container px-lg-5">
<div class="row mx-lg-n5">
<div class="col">
<p>Contacto: </p>
<br>
<a href="mailto:<EMAIL>"><img width="30" height="30" src="img/correo-electronico.svg" alt="Correo"/><?php echo" "?> Enviar un correo electrónico</a>
<br>
<br>
<a href="https://api.whatsapp.com/send?phone=5493435004677&text=Hola!%20quiero%20saber%20de%20los%20productos!"><img width="30" height="30" src="img/whatsapp.svg" alt="wsp"/><?php echo" "?>Enviar un whatsapp!</a>
</div>
<div class="col">
<p>Ubicación:</p>
<iframe src="https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d3389.6482908601856!2d-60.52359797503822!3d-31.83456970411642!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x95b44b973b15bedd%3A0xa59a2314fe8063f2!2sFacultad%20de%20Ciencia%20y%20Tecnolog%C3%ADa%20(UADER)!5e0!3m2!1ses!2sar!4v1599715613430!5m2!1ses!2sar" width="400" height="300" frameborder="0" style="border:0;" allowfullscreen="" aria-hidden="false" tabindex="0"></iframe>
</div>
</div>
</div>
<hr>
</section>
<br>
<br>
<br>
<br>
<br>
<?php require_once 'include/footer.php';?>
</body>
</html><file_sep><?php error_reporting(0);?>
<!doctype html>
<html lang = "es" dir = "ltr">
<head>
<meta charset="utf-8">
<title>Formulario de Login</title>
<link rel="stylesheet" href="css/estilo.css">
<script type="text/javascript" src="https://code.jquery.com/jquery-1.12.2.js" integrity="<KEY> crossorigin="anonymous"></script>
<script type="text/javascript">
$(function(){
$('#btn_recargar').click(function(){
document.location.reload();
return false;
});
});
</script>
</head>
<body>
<form action="php/procesologin.php" method="post">
<?php include 'include/header.php';?>
<section class="form-login">
<h5>Formulario de Login</h5>
<table style="text-align: center">
<tr>
<input class="controls" type="text" name="usuario" value="" placeholder="Usuario" required>
</tr>
<tr>
<input class="controls" type="password" name="contraseña" value="" placeholder="<PASSWORD>" required>
</tr>
<tr>
<td><img src="./include/captcha.php"></td>
</tr>
<tr>
<td><input type="text" name="captcha_dato"></td>
<td><button id="btn_recargar">Recargar captcha</td>
</tr>
<tr>
<td><input class="buttons" type="submit" name="login" value="Ingresar"> </td>
</tr>
</table>
<hr>
<p><a href="forgotpw.php">¿Olvidaste tu contraseña?</a></p>
<?php
if(isset($_POST['login'])){
include("php/procesologin.php");
}
/*if (!isset($_SESSION) && (array_key_exists("error",$_SESSION))){
echo $_SESSION['error'];
}*/
?>
<hr>
</section>
</form>
<?php require_once 'include/footer.php';?>
</body>
</html>
<file_sep><?php
//codigo de fede, gracias fede
if(!isset($_SESSION)){
session_start();
}
if ( !empty($_SESSION['rand_code']) )
{
unset($_SESSION['rand_code']);
}
if (empty($_SESSION['rand_code']))
{
$captcha = "";
$char_permitidos = "123456789abcdefghijklmnpqrstuvwxyzABCDEFGHIJKLMNPQRSTUVWXYZ";
for ($i = 0; $i <= 5; $i++)
{
$captcha .= $char_permitidos{rand(0, 58)};
}
$_SESSION['rand_code'] = $captcha;
}
header ('Content-Type: image/png');
$image = imagecreatetruecolor(80, 30);
$color_texto = imagecolorallocate($image, 235, 99, 107);
imagestring($image, 10, 10, 5,$captcha, $color_texto);
imagepng($image);
imagedestroy($image);
################################################################################################################################################
?>
<file_sep><?php
class Gestor extends Producto{
private $productos;
private $cantidad = 0;
public function __construct()
{
$con = new mysqli('localhost', 'user_normal', 'unormal1', 'candyshopbdd');
//$con = new Conexion();
//$con->CambiarPrivilegio(1);
//error
if ($con ->connect_errno){
echo "Error al conectar con la base de datos";
exit();
}
$i=0;
$sql = "SELECT * from productos";
$resultado = mysqli_query($con, $sql);
//recorro todo el array buscando mi consulta
while($row = mysqli_fetch_array($resultado)){
$oProducto = new Producto($row['codigo_producto'], $row['nombre_producto'], $row['descripcion_producto'], $row['stock'], $row['precio'], $row['imagen']);
$this->productos[$i]= $oProducto;
$i++;
}
$con->close();
//$con->CerrarConexion();
}
//mostrar productos en el index, de todo pal user
public function mostrarProductos(){
for($i = 0; $i<count($this->productos); $i++)
{
$this->productos[$i]->mostrar();
}
}
//mostrar tabla pal proveedor que nos va a de proveer de sus provedurias
public function mostrarProductosAlProv(){
for($i = 0; $i<count($this->productos); $i++)
{
$this->productos[$i]->mostrarPProv();
}
}
}
class Producto{
private $codigo;
private $nombre;
private $descripcion;
private $stock;
private $precio;
private $imagen;
private $cantidad;
//Constructor del producto, cargo todos los datos
public function __construct($cod, $nom, $des, $stock, $pre, $img)
{
$this->codigo = $cod;
$this->nombre = $nom;
$this->descripcion = $des;
$this->stock = $stock;
$this->precio = $pre;
$this->imagen = $img;
}
//getters
/*
public function getCodigo(){
return $this->codigo;
}
public function getNombre(){
return $this->nombre;
}
public function getStock(){
return $this->stock;
}
public function getPrecio(){
return $this->precio;
}*/
/*mostrando los productos de la bdd
@parametro Producto $codigo
@parametro Producto $nombre
@parametro Producto $descripcion
@parametro Producto $stock
@parametro Producto $precio
@parametro Producto $imagen
@return Producto
*/
public function mostrar(){
//quite la etiqueta a de todo el box <a href="index.php?p='.$this->codigo.'"> </a>
echo '
<div class="box-producto">
<div class="producto">
<img src="img/productos/'.$this->imagen.'">
<div class="detail-title"><b>'.$this->nombre.'</b></div>
<div class="description">'.$this->descripcion.'</div>
<div class="detalle-precio">ARG./ $'.$this->precio.'</div>';
if($this->stock>0){
//seria con el id o con el value?
echo '<a href="index.php?action=add&id='.$this->codigo.'" class="btn-agregar">Comprar</a>';}
else{echo '<a class="btn-agregar" disabled>No hay stock</a>';}
echo'
</div>
</div>
';
if(isset($_POST['action']))
{
include("index.php");
}
}
////////////////////ACA ARRANCO CON LO DEL PROVEEDOR
//armo la tablita pa mostrar todos los productos al proveedor que nos va a proveer bue seguia
/*Nota de la creadora: odio usar javascript*/
public function mostrarPProv(){
//$cantidad = '<script>document.write(ChangeNumber())</script>';
echo
'<form action="./php/modificarProducto.php" method="POST">
<tr>
<td>'.$this->nombre.'</td>
<td>'.$this->stock.'</td>
<td><input name="cantidad" type="number" value=0 min=0 pattern="^[0-9]+"><td>
<td style="width: 200px">'.number_format($this->precio, 2).'</td>
<td style="width: 200px">'.number_format($this->precio*$this->cantidad,2).'</td>
<td>
<input name="agregarStock" value="Agregar" type="submit" class="btn-agregar">
</td>
<input name="producto" type="hidden" value="'.$this->codigo.'">
<td>
<input name="eliminarStock" value="Eliminar" type="submit" class="btn-quitar" >
</td>
</tr>
</form>
';
}
/*
//ESTO TENIA QUE HACERLO CON OBJETO PERO ME PETO LA MENTE
//VALIDAR BOTON PA' QUE EL PROVEEDOR AGREGUE LO QUE NOS VA A PROVEER JAJASJKFJAS
public function proveedor_agregar($cantidad, $cod){
$conn = new mysqli("localhost", "user_modify", "umodify1", "candyshop");
$sql = "UPDATE productos SET stock=stock+$cantidad WHERE codigo_producto=$cod";
$respuesta = mysqli_query($conn, $sql);
if($respuesta){
echo"producto agregado, gracias señor";
header("Location: configuracion.php");
}
else{
echo "noo, algo salio mal señor, por favor intente de nuevo";
}
}
//Funcion para que el proveedor pueda ingresar un nuevo producto
public function insertarProductos($nombre, $descripcion, $precio){
$con = new mysqli("localhost", "user_modify", "umodify1", "candyshopbdd");
$imagen = 'imagenotfound.jpg';
$stock = 0;
$sql = "INSERT INTO productos(nombre_producto, descripcion_producto, stock, precio, imagen) VALUES ('$nombre', '$descripcion', $stock, $precio, '$imagen')";
$respuesta = mysqli_query($con, $sql);
if($respuesta){
header("Location: configuracion.php");
echo "gracias señor por proveernos de un nuevo producto <3";
}
else{
echo "noo señor, no pudo agregar su nuevo producto que tanto queremos";
}
}
public function deletearProducto($cod){
$con = new mysqli("localhost", "user_delete", "udelete1", "candyshopbdd");
$sql= "DELETE FROM productos WHERE codigo_producto=$cod";
$resultado=mysqli_query($con, $sql);
if($resultado){
echo "producto eliminado";
}
else {
echo "epa, salio mal algo, no va sa poder eliminarlo";
}
}*/
}
?><file_sep><?php
Class Conexion{
private $con;
public function __construct()
{
$this->con = NULL;
}
public function getCon(){
return $this->con;
}
public function CambiarPrivilegio($priviledge){
switch($priviledge){
case 1:
$this->conex = new mysqli("localhost", "root", "", "candyshopbdd");
break;
case 2: $this->con = new mysqli("localhost", "user_normal", "unormal1", "candyshopbdd");
break;
case 3: $this->con = new mysqli("localhost", "user_modify", "umodify1", "candyshopbdd");
break;
case 4: $this->con = new mysqli("localhost", "user_delete", "udelete1", "candyshopbdd");
break;
}
return $this->con;
}
public function CerrarConexion(){
if($this->con!=NULL){
$this->con->close();
$this->con = NULL;
}
}
}
?><file_sep><!doctype html>
<html lang = "es" dir = "ltr">
<head>
<meta charset="utf-8">
<link rel="stylesheet" type="text/css" href="css/estilo.css">
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.5.2/css/bootstrap.min.css" integrity="<KEY>" crossorigin="anonymous">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<link rel="stylesheet" href="./font-awesome/css/font-awesome.min.css">
<link href="https://fonts.googleapis.com/css?family=Sen&display=swap" rel="stylesheet">
<link href="https://fonts.googleapis.com/css2?family=Raleway:wght@200&display=swap" rel="stylesheet">
<link href="https://fonts.googleapis.com/css2?family=Passion+One&display=swap" rel="stylesheet">
<link href="https://fonts.googleapis.com/css2?family=Balsamiq+Sans&display=swap" rel="stylesheet">
<link href="https://fonts.googleapis.com/css2?family=Monoton&display=swap" rel="stylesheet">
<link href="https://fonts.googleapis.com/css2?family=Advent+Pro:wght@600&family=Open+Sans+Condensed:wght@700&display=swap" rel="stylesheet">
<link href="https://fonts.googleapis.com/css2?family=Advent+Pro:wght@600&family=Open+Sans+Condensed:wght@700&family=Raleway:wght@500&display=swap" rel="stylesheet">
</head>
<body>
<section class="lindura">
<ul class="nav justify-content-left">
<li class="nav-item">
<a class="nav-link active" href="./index.php">INICIO</a>
</li>
<!--
<li class="nav-item">
intento de listado
<div class="dropdown">
<button class="btn dropdown-toggle" type="button" id="dropdownMenuButton" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
<div class="col">
PRODUCTOS
</div>
</button>
<div class="dropdown-menu" style="opacity: 0.9" aria-labelledby="dropdownMenuButton">
<a class="dropdown-item" href="#">• Gomitas</a>
<a class="dropdown-item" href="#">• Chocolates</a>
<a class="dropdown-item" href="./productos.php">• Malvaviscos</a>
<a class="dropdown-item" href="#">• Caramelos</a>
<a class="dropdown-item" href="#">• Más cositas</a>
<a class="dropdown-item" href="#">• OFERTAS!</a>
</div>
</div>
fin de intento
</li>-->
<li class="nav-item">
<a class="nav-link active" href="./contacto.php">CONTACTO</a>
</li>
<li class="nav-item">
<a class="nav-link active" href="./acercade.php">QUIENES SOMOS</a>
</li>
<!-- buscador-->
<li class="nav-item">
<!--form busqueda-->
<input type="text" id="idbusqueda" placeholder="Busca lo que quieras!">
<button class="btn-main btn-search" name="busqueda"><i class="fa fa-search" aria-hidden="true"></i></button>
</li>
<!--fin buscador-->
</ul>
</section>
<!--hacer boton de la busqueda y del carrito-->
<?php include 'php/validarsesion.php';?>
</body>
</html><file_sep><?php
include("modelos/productosbdd.php");
include("modelos/carritobdd.php");
$oProduct = new Gestor();
$oCart = new Carrito();
if (isset($_GET['action'])){
switch($_GET['action']){
case 'add':
$oCart->agregar_item($_GET['id']);
break;
case 'remove':
$oCart->remove_item($_GET['id']);
break;
case 'confirm':
$oCart->remover_product($_GET['id']);
}
}
//print_r($_SESSION);
?>
<!doctype html>
<html lang = "es" dir = "ltr">
<meta charset="utf-8">
<title>FLAAPU'S PAGE</title>
<link rel="stylesheet" href="css/estilo.css">
<script type="text/javascript" src="js/jquery-3.4.1.min.js"></script>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/limonte-sweetalert2/6.11.0/sweetalert2.css"/>
<script src="https://code.jquery.com/jquery-3.2.1.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/limonte-sweetalert2/6.11.0/sweetalert2.js"></script>
</head>
<body>
<form action="index.php" method="get">
<?php include 'include/header.php';?>
<?php include 'include/portada.php';?>
<!-- CARRITO, DESPUES LO PASO A OTRO LADO .PHP -->
<table class="table table-sm" style="background: #e9c7cd">
<thead>
<tr>
<th> MI CARRITO DE COMPRAS</th>
</tr>
<tr>
<th>Total a pagar: <?=$oCart->get_total_payment();?></th>
<th>Total de productos: <?=$oCart->get_total_items();?></th>
</tr>
</thead>
<thead>
<tr>
<th scope="col">Nombre Producto</th>
<th scope="col">Cantidad</th>
<th scope="col">Precio ARG./</th>
<th scope="col">Total</th>
<th scote="col"> </th>
<th scote="col"> </th>
</tr>
</thead>
<tbody>
<?=$oCart->get_items();?>
<tr>
<td></td>
<td></td>
<td></td>
<td></td>
<td><a href="index.php?action=confirm&id='$code'" class="btn-comprar">Confirmar compra</a></td>
<!--<button type="submit" class="btn-comprar" name="confirmar" id='.$this->codigo.'>Confirmar</button></td>-->
</tr>
</tbody>
</table>
<!-- PRODUCTOS MUESTRA -->
<div class="main-content">
<div class="content-page">
<div class="titulo-producto">Nuestros productos</div>
<div class="lista-productos" id="espacio-lista">
<?php $oProduct->mostrarProductos(); ?>
</div>
</div>
</div>
<?php require_once 'include/footer.php';?>
</form>
</body>
</html><file_sep><!doctype html>
<html lang = "es" dir = "ltr">
<head>
<meta charset="utf-8">
<link rel="stylesheet" href="css/estilo.css">
<title>Olvide mi contraseña, aiuda</title>
</head>
<body>
<?php require 'include/header.php';?>
<form action="<?php echo htmlspecialchars($_SERVER['PHP_SELF']);?>" method="post">
<br>
<h1 style="background : #f4f1bb"><font size=30>Recuperando la contraseña que te olvidaste, crack</font></h1>
<br>
<section class="recuperando">
<div class="container">
<div class="row">
<div class="col-9">
<input class="controlsREG" type="email" name="correo" value="" placeholder="Correo electronico" required>
</div>
<br>
<div class="col-4">
<input class="buttons2" type="submit" name="enviar" value="Enviar">
</div>
</div>
</div>
<p>
<?php
if(isset($_POST['enviar'])){
$correo = $_POST['correo'];
include("php/validarcorreo.php");
}
?>
</p>
</section>
</form>
<?php require_once 'include/footer.php';?>
</body>
</html>
<file_sep><?php
class Usuario{
private $username;
private $password;
private $correoelectronico;
private $mysqli;
//constructor y conexion
public function __construct()
{
$this->mysqli = new mysqli('localhost', 'user_normal', 'unormal1', 'candyshopbdd');
$conec = $this->mysqli;
//error
if ($conec ->connect_errno){
echo "Error al conectar con la base de datos";
exit();
}
}
//setters
public function setUsuario($username){
if(ctype_alnum($username)==true){
$this->username = $username;
}
else{ echo "<script>
alert('ERROR EL USUARIO NO ES VALIDO');
window.location='../registrarse.php'
</script>";}
}
public function setPassword($password){
if(ctype_alnum($password)==true){
$this->password = $password;
}
else{ echo "<script>
alert('ERROR LA PASSWORD NO ES VALIDA');
window.location='../registrarse.php'
</script>";}
}
public function setEmail($correoelectronico){
if (filter_var($correoelectronico, FILTER_VALIDATE_EMAIL)) {
$this->correoelectronico = $correoelectronico;
}
}
//vector vector aca lo llamo pa logearme
public function toArray()
{
$vUsuario = array(
'usuario' => $this->username,
'contraseña' => $this->password,
'email' => $this->correoelectronico,
);
return $vUsuario;
}
//guardando
public function save ()
{ $conn = new mysqli('localhost', 'user_modify', 'umodify1', 'candyshopbdd');
$sql = "INSERT INTO usuarios(username, clave, correoelectronico) VALUES ('$this->username','$this->password','$this->correoelectronico')";
$comprobarconexion = mysqli_query($conn,$sql);
if($comprobarconexion)
{ echo "<script>
alert('Te registraste masterrr');
window.location='../index.php'
</script>";}
else{
echo "<script>
alert('Che salio un error al guardar tu usuario, proba de nuevo');
window.location='../registrarse.php'
</script>";
}
}
//comprobando el login
public function login($username, $password)
{
$sql = "SELECT clave from usuarios WHERE username='$username'";
$resultado = $this->mysqli->query($sql);
$row = mysqli_fetch_row($resultado);
if($row['0'] == $password)
{
$_SESSION['user'] = $username;
echo "
<script>
alert('Iniciaste sesion correctamente');
window.location='../inicio.php'
</script>";
}
else{
echo "
<script>
alert('Mmmm clave o usuario incorrecto');
window.location='../iniciarsesion.php'
</script>";
}
}
//estas 2 funciones no las llamo a ningun lado
/*
public function update($usuarios){
$conn = new mysqli('localhost', 'user_modify', 'umodify1', 'candyshopbdd');
$sql="UPDATE usuarios SET usuario='$this->username', contraseña ='<PASSWORD>', email='$this->correoelectronico' WHERE usuario = $usuarios";
$conn->query($sql);
echo "Se ejecuta: <br>";
echo $sql;
}
public function delete($idusuarios)
{ $conn = new mysqli('localhost', 'user_delete', 'umdelete1', 'candyshopbdd');
$sql = "DELETE FROM usuarios WHERE idusuarios = $idusuarios";
$conn->query($sql);
echo "Se ejecuta: <br>";
echo $sql;
}*/
}
?><file_sep>-- phpMyAdmin SQL Dump
-- version 5.0.2
-- https://www.phpmyadmin.net/
--
-- Servidor: 127.0.0.1
-- Tiempo de generación: 03-11-2020 a las 01:08:51
-- Versión del servidor: 10.4.13-MariaDB
-- Versión de PHP: 7.4.8
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Base de datos: `candyshopbdd`
--
-- --------------------------------------------------------
--
-- Estructura de tabla para la tabla `productos`
--
CREATE TABLE `productos` (
`codigo_producto` int(11) NOT NULL,
`nombre_producto` varchar(50) NOT NULL,
`descripcion_producto` varchar(200) NOT NULL,
`stock` int(11) NOT NULL,
`precio` decimal(20,2) NOT NULL,
`imagen` varchar(100) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Volcado de datos para la tabla `productos`
--
INSERT INTO `productos` (`codigo_producto`, `nombre_producto`, `descripcion_producto`, `stock`, `precio`, `imagen`) VALUES
(1, 'Chocolate', 'De dudosa procedencia. x100g', 485, '61.00', 'chocolate.png'),
(2, 'Sugus', 'Cada 200gr', 23, '151.50', 'sugus.jpg'),
(3, 'Alfajor Aguila', 'Alfajor negro. Por unidad', 251, '80.00', 'alfajor-aguila.jpg'),
(4, 'Alfajor Negro 3.0', 'Alfajor negro Bagley. Por unidad', 197, '80.00', 'alfajor-negro.png'),
(5, '<NAME>', 'Alfajor negro Terrabusi. Por unidad', 210, '80.00', 'alfajor-terrabusi.png'),
(6, 'Chocolate Aguila', 'Chocolate Aguila amargo x150gr', 100, '130.00', 'chocolate-aguila.png'),
(7, 'Chocolate Cofler Air', 'Chocolate Negro Cofler Air x55gr', 224, '110.00', 'cofler-air.jpeg'),
(8, 'Chocolate Cofler Frutilla', 'Chocolate Cofler de Yogurt Sabor Frutilla x55gr', 398, '110.50', 'cofler-frutilla.png'),
(9, 'Chocolate Milka', 'Chocolate Milka Extra Cacao x100gr', 121, '159.99', 'milka-extra.jpg'),
(10, 'Chocolate Milka Oreo', 'Chocolate Milka Oreo x100gr', 1500, '150.99', 'milka-oreo.jpg'),
(11, 'Oblea Cofler', 'Oblea Cofler Block', 21, '35.00', 'oblea-block.png'),
(12, 'Terrabusi Snacky', 'Terrabusi Snacky x60gr', 10, '40.00', 'terrabusi-snaky.jpg'),
(13, 'Mantecol', 'Mantecol Bocadito x25grs Libre de Gluten', 12, '65.35', 'mantecol.png'),
(14, '<NAME>', 'Por unidad', 148, '30.72', 'bonobonaguila.png'),
(30, '<NAME>', 'de frutilla, por unidad', 0, '13.00', 'imagenotfound.jpg');
-- --------------------------------------------------------
--
-- Estructura de tabla para la tabla `usuarios`
--
CREATE TABLE `usuarios` (
`username` varchar(10) NOT NULL,
`clave` varchar(150) NOT NULL,
`correoelectronico` varchar(40) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Volcado de datos para la tabla `usuarios`
--
INSERT INTO `usuarios` (`username`, `clave`, `correoelectronico`) VALUES
('fcytuader', '<KEY>', '<EMAIL>'),
('flaapu', '<KEY>', '<EMAIL>'),
('proveedor', '<KEY>', '<EMAIL>'),
('sportup', '<KEY>', '<EMAIL>');
--
-- Índices para tablas volcadas
--
--
-- Indices de la tabla `productos`
--
ALTER TABLE `productos`
ADD PRIMARY KEY (`codigo_producto`);
--
-- Indices de la tabla `usuarios`
--
ALTER TABLE `usuarios`
ADD PRIMARY KEY (`username`),
ADD UNIQUE KEY `correoelectronico` (`correoelectronico`);
--
-- AUTO_INCREMENT de las tablas volcadas
--
--
-- AUTO_INCREMENT de la tabla `productos`
--
ALTER TABLE `productos`
MODIFY `codigo_producto` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=32;
COMMIT;
-- USUARIOS CON PRIVILEGIOS
# Privilegios para `user_delete`@`localhost`
GRANT SELECT, INSERT, UPDATE, DELETE, FILE ON *.* TO `user_delete`@`localhost` IDENTIFIED BY PASSWORD '*<PASSWORD>';
GRANT ALL PRIVILEGES ON `candyshopbdd`.* TO `user_delete`@`localhost`;
# Privilegios para `user_modify`@`localhost`
GRANT SELECT, INSERT, UPDATE, ALTER ON *.* TO `user_modify`@`localhost` IDENTIFIED BY PASSWORD '*<PASSWORD>';
# Privilegios para `user_normal`@`localhost`
GRANT SELECT ON *.* TO `user_normal`@`localhost` IDENTIFIED BY PASSWORD '*<PASSWORD>';
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep><?php
if (isset($_POST['addnewproduct'])){
$nombre = $_POST['nombre_p'];
$descripcion = $_POST['descripcion'];
$precio = $_POST['precio'];
$con = new mysqli("localhost", "user_modify", "umodify1", "candyshopbdd");
$imagen = 'imagenotfound.jpg';
$stock = 0;
$sql = "INSERT INTO productos(nombre_producto, descripcion_producto, stock, precio, imagen) VALUES ('$nombre', '$descripcion', $stock, $precio, '$imagen')";
$respuesta = mysqli_query($con, $sql);
if($respuesta){
header("Location: ../configuracion.php");
echo "gracias señor por proveernos de un nuevo producto <3";
}
else{
echo "noo señor, no pudo agregar su nuevo producto que tanto queremos";
header("Location: ../configuracion.php");
}
}
?>
|
4c8cb43fb2db3747e313d374ca61a6c5853b4c35
|
[
"SQL",
"PHP"
] | 23
|
PHP
|
flaapu/Proyecto-ecommerce
|
55bb03d1f27664df8ed7dc0ecaea40d38d51ea17
|
fe3fe4f663b1199da4efd639a56f1d25a1a79b51
|
refs/heads/master
|
<repo_name>ArthurFig1998/portfolio<file_sep>/dryoar-project/script.js
function checkType()
{
var email = document.getElementById('email');
var phone = document.getElementById('phone');
var emailType = document.getElementById('emailType');
var phoneType = document.getElementById('phoneType');
if(email.checked)
{
emailType.style.display = "block";
phoneType.style.display = "none";
}
else if(phone.checked)
{
phoneType.style.display = "block";
emailType.style.display = "none";
}
}
function jack()
{
var jacksonSec = document.getElementById('jacksonHoleSec');
var yellowSec = document.getElementById('yellowstoneSec');
jacksonSec.style.display = "block";
yellowSec.style.display = "none";
}
function yellow()
{
var jacksonSec = document.getElementById('jacksonHoleSec');
var yellowSec = document.getElementById('yellowstoneSec');
jacksonSec.style.display = "none";
yellowSec.style.display = "block";
}<file_sep>/examples/heroesObject.js
const superman =
{
name: 'Superman',
'real name': '<NAME>',
height: 185, ///in cm///
weight: 215,
hero: true,
villain: false,
allies: ['Batman', 'Supergirl', 'Flash', 'Green Martian', 'Fire Storm'],
fly()
{
return 'To the infinity... and beyond!';
}
};
const flash =
{
name: 'The Flash',
'real name': '<NAME>',
height: 180, ///in cm///
weight: 190,
hero: true,
villain: false,
allies: ['Superman', 'Vibe', 'Killer Frost', '<NAME>', 'Kid Flash', 'Elongated Man'],
run()
{
return 'Run, Barry... Run!';
}
}
console.log(superman.name);
console.log(superman["real" + " " + "name"]);
'city' in superman;
superman.city !== undefined;
superman.hasOwnProperty('city');
for (const key in superman)
{
console.log(key + ": " + superman[key]);
}
for (const key of Object.keys(superman))
{
console.log(key);
}
for (const value of Object.values(superman))
{
console.log(value);
}
delete superman.fly;
console.log(flash.name);
console.log(flash["real" + " " + "name"]);
'city' in flash;
flash.city = 'Central City';
flash.hasOwnProperty('city');
for (const key in flash)
{
console.log(key + ": " + flash[key]);
}
for (const key of Object.keys(flash))
{
console.log(key);
}
for (const value of Object.values(flash))
{
console.log(value);
}
delete flash.run;
const JLeague =
{
superman:
{
realName: '<NAME>'
},
batman:
{
realName: '<NAME>'
},
wonderWoman:
{
realName: '<NAME>'
},
flash:
{
realName: '<NAME>'
},
aquaman:
{
realName: '<NAME>'
},
};
|
105e7a659ceaa1908c9a9606a5fbba10e768c928
|
[
"JavaScript"
] | 2
|
JavaScript
|
ArthurFig1998/portfolio
|
09c97a9498e0f6e68aa8a67711838d29b74165f2
|
769aa82969f9a0ef742770ebb5444d56c4b60714
|
refs/heads/master
|
<repo_name>huytxx/ute-cinema<file_sep>/tickets/migrations/0002_carausel.py
# Generated by Django 3.1.7 on 2021-03-13 13:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tickets', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Carausel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(upload_to='pics/%y/%m/%d/')),
('title', models.CharField(max_length=150)),
('sub_title', models.CharField(max_length=100)),
],
),
]
<file_sep>/ute_cinema_web/urls.py
from django.contrib import admin
from django.urls import path
from tickets import views
from django.conf import settings
from django.conf.urls.static import static
# Khai báo đường dẫn đến các trang
urlpatterns = [
path('admin/', admin.site.urls),
path('khuyen-mai', views.promotion_view),
path('', views.home_view),
path('chi-tiet', views.chi_tiet_view),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
<file_sep>/tickets/apps.py
from django.apps import AppConfig
# tạo lớp đại diện cho app
class TicketsConfig(AppConfig):
name = 'tickets'
<file_sep>/tickets/models.py
from django.db import models
# model đại diện cho vé xem phim
class Ticket(models.Model):
name = models.CharField(max_length=1000) # tên của phim
time = models.CharField(max_length=50) # thời gian chiếu
release_time = models.CharField(max_length=50) # thời gian ra mắt
description = models.TextField() # Thông tin phim
type = models.CharField(max_length=1000) # kiểu phim, (2D, 3D...)
fit_for = models.CharField(max_length=100) # lứa tuổi phù hợp
image = models.ImageField(upload_to='picture') # ảnh của phim
def __str__(self):
return self.name
# Lớp đại diện cho slider list các phim sắp ra mắt ở phần header
class Carausel(models.Model):
image = models.ImageField(upload_to='picture')
title = models.CharField(max_length=150)
sub_title = models.CharField(max_length=100)
def __str__(self):
return self.title
# list select cho các vé khuyến mãi
TYPE_CHOICES = {
('promotion','Promotion'),
('phim','Phim'),
('doi_tac','Đối tác'),
}
# lớp đại diện cho vé khuyến mãi
class Promotion(models.Model):
title = models.CharField(max_length=150) # tên vé khuyến mãi
image = models.ImageField(upload_to='picture') # ảnh vé khuyến mãi
start_time = models.CharField(max_length=50) # thời gian bắt đầu khuyến mãi
end_time = models.CharField(max_length=50) # thời gian kết thúc khuyến mãi
type = models.CharField(max_length=90, choices=TYPE_CHOICES, default='promotion') # khuyến mãi phần gì
def __str__(self):
return self.title
<file_sep>/tickets/migrations/0003_auto_20210316_1633.py
# Generated by Django 3.1.7 on 2021-03-16 09:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tickets', '0002_carausel'),
]
operations = [
migrations.AlterField(
model_name='carausel',
name='image',
field=models.ImageField(upload_to='course_picture'),
),
]
<file_sep>/tickets/admin.py
from tickets.models import Carausel, Promotion, Ticket
from django.contrib import admin
class TicketAdmin(admin.ModelAdmin):
list_display = ('name', 'time', 'release_time', 'type')
search_fields = ('name',)
# Đăng ký các lớp cơ sở dữ liệu với trang admin
admin.site.register(Ticket, TicketAdmin)
admin.site.register(Carausel)
admin.site.register(Promotion)<file_sep>/static/js/main.js
$(document).ready(function() {
$('.btn-chi-tiet').on('click', function(event) {
event.preventDefault();
var str = $(this).attr('class')
var res = str.split(" ");
var des = $(this).find('#desTicket').text();
// console.log(res[0])
// var info_main = '.info_main_'+res[0];
// console.log(info_main);
// console.log($(`.info_main/`));
//$(location).attr('href', './chi-tiet');
$(this).first().removeClass('d-none')
localStorage.setItem('info_main', $(this).find('.info_main').html())
localStorage.setItem('imageSrc', res[0])
localStorage.setItem('description', des)
$(location).attr('href', './chi-tiet');
})
})
<file_sep>/tickets/views.py
from django.db.models.enums import Choices
from django.shortcuts import render
from django.utils.regex_helper import Choice
from tickets.models import Carausel, Promotion, Ticket
# Create your views here.
# define các trang của dự án
# trang chủ
def home_view(request):
carousel = Carausel.objects.all()
card = Ticket.objects.all()
# filter chỉ lấy những vé khuyến mãi có kiểu là promotion
promotion = Promotion.objects.filter(
type = 'promotion'
)
return render(request, 'tickets/home.html', {
'carousel': carousel,
'card': card,
'promotion': promotion
})
# trang các vé khuyến mãi
def promotion_view(request):
carousel = Carausel.objects.all()
# filter chỉ lấy những vé khuyến mãi có kiểu là promotion
promotion = Promotion.objects.filter(
type='promotion'
)
# filter chỉ lấy những vé khuyến mãi có kiểu là phim
phim = Promotion.objects.filter(
type = 'phim'
)
# filter chỉ lấy những vé khuyến mãi có kiểu là doi_tac
doi_tac = Promotion.objects.filter(
type = 'doi_tac'
)
return render(request, 'tickets/khuyen-mai.html', {
'carousel': carousel,
'promotion': promotion,
'phim': phim,
'doi_tac': doi_tac
})
# trang chi tiết vé
def chi_tiet_view(request):
carousel = Carausel.objects.all()
return render(request, 'tickets/chi-tiet.html', {
'carousel': carousel
})<file_sep>/requirements.txt
Django==3.1.7
numpy==1.20.2
Pillow==8.1.2
psycopg2==2.8.6
requests==2.25.1
# Những thư viện cần thiết cho đồ án
|
8f60c5912e7f6335e67244ac9191c101a58d2045
|
[
"JavaScript",
"Python",
"Text"
] | 9
|
Python
|
huytxx/ute-cinema
|
35b3e01da137deec28709db60efe733677669d42
|
3d519e33f38a0196b5567ae570057e3b11431ddb
|
refs/heads/master
|
<file_sep>import { Injectable } from '@angular/core';
import { Http, RequestOptionsArgs, Response, Headers, RequestOptions } from '@angular/http';
import { Observable, BehaviorSubject } from 'rxjs';
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/toPromise';
@Injectable()
export class Service {
private searchUrl: string;
constructor(private _http: Http) { }
getProducts() {
this.searchUrl = `http://zend.com/album`;
return this._http.get(this.searchUrl)
.map((res) => { return res.json() })
}
}<file_sep><?php
namespace Album\Model;
class Album
{
public $id;
public $product_name;
public $sku;
public $stock;
public $prouduct_url;
public $price;
public $image;
public $image2;
public $image3;
public $image4;
public $image5;
public $manufacturer;
public function exchangeArray($data)
{
$this->product_name = (!empty($data['PRODUCT_NAME'])) ? $data['PRODUCT_NAME'] : null;
$this->sku = (!empty($data['SKU'])) ? $data['SKU'] : null;
$this->stock = (!empty($data['STOCK'])) ? $data['STOCK'] : null;
$this->prouduct_url = (!empty($data['PROUDUCT_URL'])) ? $data['PROUDUCT_URL'] : null;
$this->price = (!empty($data['PRICE'])) ? $data['PRICE'] : null;
$this->image = (!empty($data['IMAGE'])) ? $data['IMAGE'] : null;
$this->image2 = (!empty($data['IMAGE2'])) ? $data['IMAGE2'] : null;
$this->image3 = (!empty($data['IMAGE3'])) ? $data['IMAGE3'] : null;
$this->image4 = (!empty($data['IMAGE4'])) ? $data['IMAGE4'] : null;
$this->image5 = (!empty($data['IMAGE5'])) ? $data['IMAGE5'] : null;
$this->manufacturer = (!empty($data['MANUFACTURER'])) ? $data['MANUFACTURER'] : null;
}
}<file_sep><?php
namespace Album\Controller;
use Zend\Mvc\Controller\AbstractActionController;
use Zend\View\Model\ViewModel;
use Album\Model\Album;
use Zend\Config\Reader;
use Zend\Console\Request as ConsoleRequest;
use Zend\Http\Client;
use Zend\View\Model\JsonModel;
class AlbumController extends AbstractActionController
{
protected $albumTable;
protected $result = [];
protected $num = [];
public function indexAction()
{
// grab the paginator from the AlbumTable
/*$paginator = $this->getAlbumTable()->fetchAll(true);
// set the current page to what has been passed in query string, or to 1 if none set
$paginator->setCurrentPageNumber((int)$this->params()->fromQuery('page', 1));
// set the number of items per page to 20
$paginator->setItemCountPerPage(20);
return new ViewModel(array(
'paginator' => $paginator
));*/
/*$result = new ViewModel(array(
'success'=>true,
'results' => $this->getAlbumTable()->fetchAll(),
));*/
$results = $this->getAlbumTable()->fetchAll();
$data = array();
foreach ($results as $result) {
$data[] = $result;
}
return new JsonModel(array(
'data' => $data,
'success' => true,
)
);
}
public function addAction()
{
$reader = new Reader\Xml();
$newData = $reader->fromFile('instock.xml');
$album = new Album();
foreach ($newData['PRODUCTS']['PRODUCT'] as $val) {
$album->exchangeArray($val);
$this->getAlbumTable()->saveAlbum($album);
}
}
public function editAction()
{
}
public function deleteAction()
{
}
public function getAlbumTable()
{
if (!$this->albumTable) {
$sm = $this->getServiceLocator();
$this->albumTable = $sm->get('Album\Model\AlbumTable');
}
return $this->albumTable;
}
public function updateProductTableAction()
{
$request = $this->getRequest();
// Make sure that we are running in a console and the user has not tricked our
// application into running this action from a public web server.
if (!$request instanceof ConsoleRequest) {
throw new \RuntimeException('You can only use this action from a console!');
}
/*$reader = new Reader\Xml();
$newData = $reader->fromFile('instock.xml');*/
$client = new Client('https://mw-glasberg.com/media/feed/instock.xml', array(
'maxredirects' => 0,
'timeout' => 30
));
$response = $client->send();
$data = simplexml_load_string($response->getBody());
$products = $this->xmlToArray($data);
foreach ($products['PRODUCTS']['PRODUCT'] as $j => $products) {
foreach ($products as $k => $product) {
$this->result[$j][$k] = (string)$product;
}
}
$album = new Album();
foreach ($this->result as $val) {
$album->exchangeArray($val);
$this->getAlbumTable()->saveAlbum($album);
}
}
public function xmlToArray($xmlObject, $out = [])
{
foreach ((array)$xmlObject as $index => $node)
$out[$index] = (is_object($node)) ? $this->xmlToArray($node) : $node;
return $out;
}
}
<file_sep>import {Component, NgModule, NgZone} from '@angular/core'
import {BrowserModule} from '@angular/platform-browser'
import {Service} from './request.ts'
@Component({
selector: 'my-app',
templateUrl: '/js/app/app.component.html',
})
export class AppComponent {
constructor(private service: Service) {
}
responseResult;
products: Products[];
keys: String[];
ngOnInit() {
this.service.getProducts()
.subscribe(data => {
this.responseResult = data;
this.products = this.responseResult.data;
this.keys = Object.keys(this.products);
console.log(this.responseResult);
},
error => {
console.log(error)
});
}
}
<file_sep>"# zend_test"
|
09d792fd7dc8efa6288401f4a38f90e0ce5fd37c
|
[
"Markdown",
"TypeScript",
"PHP"
] | 5
|
TypeScript
|
LapEugene/zend_test
|
1926458a1c474fc9bb3b0a3cf707fad315ff778a
|
b1a0e50b2dd2bc40da66007e654113f6d9949622
|
refs/heads/main
|
<file_sep>#include <Servo.h>
Servo servomotor;
int pinoBotao = 11;
void setup()
{
servomotor.attach(8);
pinMode(pinoBotao, INPUT);
}
void loop()
{
int estadoBotao = digitalRead(pinoBotao);
if(estadoBotao == HIGH){
for(int i=0; i <= 180; i ++){
servomotor.write(i);
delay(20);
}
delay(3000);
for(int i=180; i >= 0; i --){
servomotor.write(i);
delay(20);
}
}
}<file_sep># Como aprender arduino sem arduino - aula 09
Estes codigos e referente ao tutorial do video no meu canal do youtube do link abaixo.
[Como aprender arduino sem arduino - aula 09](https://youtu.be/67NQhRYhEi8)
|
5ff93c9dfed2cc2483b299e77b128cc06614378d
|
[
"Markdown",
"C++"
] | 2
|
C++
|
EderFernandesTogakure/Tinkercad09
|
e91e435b6b91f41348b775a317fae9919aa54f72
|
7941e662fc9d4127256f356aadfebb216becb3a8
|
refs/heads/master
|
<repo_name>wfordyce28/municipal-2<file_sep>/public/javascripts/municipal.js
"use strict";
let login = document.getElementById(`login-button`);
let report = document.getElementById(`report-button`);
let five = document.getElementById(`one`);
let little = document.getElementById(`two`);
let forum = document.getElementById(`three`);
let open = document.getElementById(`four`);
let mana = document.getElementById(`five`);
let festival = document.getElementById(`six`);
let soccer = document.getElementById(`seven`);
let name, password, user;
login.addEventListener("click", function () {
login2();
});
report.addEventListener("click", function () {
report2();
});
five.addEventListener("click", function () {
five2();
});
little.addEventListener("click", function () {
little2();
});
forum.addEventListener("click", function () {
forum2();
});
open.addEventListener("click", function () {
open2();
});
mana.addEventListener("click", function () {
mana2();
});
festival.addEventListener("click", function () {
festival2();
});
soccer.addEventListener("click", function () {
soccer2();
});
function login2() {
name = window.prompt(`Hello, what is your name?`);
password = window.prompt(`Please enter your password`);
if (password !== `<PASSWORD>` || password !== `<PASSWORD>`){
window.alert(`Welcome, ${name}! Your user ID is 42069`);
}
else {
window.alert(`That is incorrect, goodbye.`)
window.close()
}
}
function report2() {
window.prompt('What issue would you like to report?')
window.alert('Thank you for being an active member of this community!')
}
function five2() {
window.prompt(`Please enter your user ID`);
if (user > `42069` || user < `42069`){
window.alert(`That is incorrect, goodbye.`);
window.close()
}
else {
window.alert(`You are now signed up for 5k`)
}
}
function little2() {
window.prompt(`Please enter your user ID`);
if (user > `42069` || user < `42069`){
window.alert(`That is incorrect, goodbye.`);
window.close()
}
else {
window.alert(`You are now signed up for Little League`)
}
}
function forum2() {
window.prompt(`Please enter your user ID`);
if (user > `42069` || user < `42069`){
window.alert(`That is incorrect, goodbye.`);
window.close()
}
else {
window.alert(`You are now signed up for the community forum`)
}
}
function open2() {
window.prompt(`Please enter your user ID`);
if (user > `42069` || user < `42069`){
window.alert(`That is incorrect, goodbye.`);
window.close()
}
else {
window.alert(`You are now signed up to volunteer at the Open House`)
}
}
function mana2() {
window.prompt(`Please enter your user ID`);
if (user > `42069` || user < `42069`){
window.alert(`That is incorrect, goodbye.`);
window.close()
}
else {
window.alert(`You are now signed up to volunteer at the Mana Food Project`)
}
}
function festival2() {
window.prompt(`Please enter your user ID`);
if (user > `42069` || user < `42069`){
window.alert(`That is incorrect, goodbye.`);
window.close()
}
else {
window.alert(`You are now signed up to volunteer at the Festival by the Bay`)
}
}
function soccer2() {
window.prompt(`Please enter your user ID`);
if (user > `42069` || user < `42069`){
window.alert(`That is incorrect, goodbye.`);
window.close()
}
else {
window.alert(`You are now signed up to volunteer for the Petoskey Soccer Invitational `)
}
}
|
585016f094b4ebb5fe269ba58b3848f45aae62fe
|
[
"JavaScript"
] | 1
|
JavaScript
|
wfordyce28/municipal-2
|
08e8533fc8ce17955b34ae0f6e54ea82a061ae1b
|
c7233a9e76d796b0105b4ed492bad57fca76bd15
|
refs/heads/master
|
<file_sep>const express = require('express')
const database = require('../mongo.js').Database;
var db = new database();
db.openDb('localhost', 'rocky');
db.setSchema();
const router = express.Router()
router.get('/getData/:name', function (req, res) {
var regs = req.params.name.split('&');
var query = req.query;
var start = Number(query.start)||0;
var pagesize = Number(query.pagesize)||30;
db.findAll(regs,start,pagesize,doc=>{
res.send({count:doc.length,data:doc});
});
})
router.get('/getCount/:name', function (req, res) {
var regx = req.params.name.split('&');
db.getCount(regx,doc=>{
res.send({totalcount:doc});
});
})
router.get('/api/douban',function(req,res){
var query = req.query;
db.findAll(/滨江/,start,30,doc=>{
// console.log(doc);
res.send({count:doc.length,data:doc});
});
});
// app.get('/api/html',function(req,res){
// res.sendFile(__dirname+'/mock/test.html');
// });
module.exports = router
<file_sep># househunter 租房猎手
测试地址:[点击使用](http://easyread.top)
这是一个租房信息整合的webapp,旨在方便用户查找想要的租房信息,避免了用户在豆瓣小组上瞎逛浪费时间,你可以轻松根据多种筛选条件选择你想要的结果,比如:在滨江网易附近有独卫带阳台的房间。
下面是操作演示

# 主要特点
- 重要信息直观显示(图片+文字,避免豆瓣小组那种不直观的显示模式)
- 多条件筛选,方便定位
- 多平台信息聚合(目前只有豆瓣小组,后续会加入别的平台)
- 数据动态更新(每小时更新)
# 安装方法
``` bash
# install dependencies
npm install
# serve with hot reload at localhost:8080
npm run dev
# build for production with minification
npm run build
# build for production and view the bundle analyzer report
npm run build --report
```
你在本地运行的话是没有数据的,你可以在src/components下List.vue文件,把data里的api改成`http://easyread.top/api` 使用。
```
src/components
└── List.vue
```
# 更新情况
### v.1.1.0
- 增加了多条件筛选功能
- 增加了分页功能
- 增加了回到顶部功能
- 增加了图片预览
- 修改了介绍内容过多显示不美观,改成了弹出显示
- 数据存入mongoDB
### v.1.0.0
- 基本前端框架搭建,可以根据豆瓣api显示内容
- 每小时数据缓存,防止多次访问接口被禁止
|
8dd543bf5dfc5fddaf8ec970b8983c3166c5366a
|
[
"JavaScript",
"Markdown"
] | 2
|
JavaScript
|
ZhangHui1993/househunter
|
eb65a64158e21871ac6c4ccddcd325e39b44885d
|
caed5fc19cb30df36b41b01619096d502436b9a5
|
refs/heads/main
|
<file_sep>expense_report = []
entries = None
inputFile = open('.\\input.txt', 'r')
for expense in inputFile:
expense_report.append(int(expense.rstrip('\n')))
inputFile.close()
for x in range(len(expense_report)):
for y in range(x + 1, len(expense_report) - 1):
if (expense_report[x] + expense_report[y] == 2020):
entries = (expense_report[x], expense_report[y])
print(x)
print(y)
break
if (entries is not None):
break
print(entries)
print(entries[0] * entries[1])
<file_sep># AdventOfCode2020
Repo for Challenges in Advent of Code 2020
[Organization & Folder Structure]
Challenges will be separated into folders named after the Day
Challenges may be separated into additional folders for clarity
|
5f8df6563a52d615f324648ed8fcb7e74e5a365e
|
[
"Markdown",
"Python"
] | 2
|
Python
|
SilverKirin8/AdventOfCode2020
|
533f5307fc578b8f6dc54ae47b60b3ed66bdf559
|
ede5c4a6ca8aaeb1563656ab1919dceb4da4a891
|
refs/heads/master
|
<repo_name>andresggz/demo-spring-clean-architecture<file_sep>/src/main/java/com/udea/demo/component/user/io/web/v1/model/UserSaveResponse.java
package com.udea.demo.component.user.io.web.v1.model;
import com.udea.demo.component.user.model.User;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.Generated;
import lombok.NoArgsConstructor;
import java.time.LocalDateTime;
@Data
@Generated
@NoArgsConstructor
@AllArgsConstructor
@Builder
public class UserSaveResponse {
private Long id;
private String name;
private String password;
private String email;
private LocalDateTime createDate;
private LocalDateTime updateDate;
public static UserSaveResponse fromModel(User user) {
return UserSaveResponse.builder().id(user.getId()).name(
user.getName()).email(user.getEmail()).password(<PASSWORD>()).createDate(
user.getCreateDate()).updateDate(
user.getUpdateDate()).build();
}
}
<file_sep>/README.md
## Demo project
Basic example based on [**clean architecture**](https://blog.cleancoder.com/uncle-bob/2012/08/13/the-clean-architecture.html)
## Design considerations

- `Entities` encapsulate Enterprise wide business rules. An entity can be an object with methods, or it can be a set of data structures and functions. It doesn’t matter so long as the entities could be used by many different applications in the enterprise. If you don’t have an enterprise, and are just writing a single application, then these entities are the business objects of the application. They encapsulate the most general and high-level rules. They are the least likely to change when something external changes. For example, you would not expect these objects to be affected by a change to page navigation, or security. No operational change to any particular application should affect the entity layer.
- `Use Cases`
The software in this layer contains application specific business rules. It encapsulates and implements all of the use cases of the system. These use cases orchestrate the flow of data to and from the entities, and direct those entities to use their enterprise wide business rules to achieve the goals of the use case. We do not expect changes in this layer to affect the entities. We also do not expect this layer to be affected by changes to externalities such as the database, the UI, or any of the common frameworks. This layer is isolated from such concerns. We do, however, expect that changes to the operation of the application will affect the use-cases and therefore the software in this layer. If the details of a use-case change, then some code in this layer will certainly be affected.
- `Interface Adapters`
The software in this layer is a set of adapters that convert data from the format most convenient for the use cases and entities, to the format most convenient for some external agency such as the Database or the Web. It is this layer, for example, that will wholly contain the MVC architecture of a GUI. The Presenters, Views, and Controllers all belong in here. The models are likely just data structures that are passed from the controllers to the use cases, and then back from the use cases to the presenters and views. Similarly, data is converted, in this layer, from the form most convenient for entities and use cases, into the form most convenient for whatever persistence framework is being used. i.e. The Database. No code inward of this circle should know anything at all about the database. If the database is a SQL database, then all the SQL should be restricted to this layer, and in particular to the parts of this layer that have to do with the database. Also in this layer is any other adapter necessary to convert data from some external form, such as an external service, to the internal form used by the use cases and entities.
- `Frameworks and Drivers` The outermost layer is generally composed of frameworks and tools such as the Database, the Web Framework, etc. Generally you don’t write much code in this layer other than glue code that communicates to the next circle inwards. This layer is where all the details go. The Web is a detail. The database is a detail. We keep these things on the outside where they can do little harm.
### Requerimientos ###
- [Java 11](https://www.java.com/es/download/).
- [Maven 3](https://maven.apache.org/download.cgi).
- [Lombok](https://projectlombok.org/setup/intellij)
### Build ###
```
mvn clean install
```
### Run ###
```
mvn spring-boot:run -Dspring.profiles.active=local
```
### Ejecutar endpoints ###
Import Demo.postman_collection.json. Source folder: main/java/resources
<file_sep>/src/main/java/com/udea/demo/config/WebExceptionHandler.java
package com.udea.demo.config;
import com.fasterxml.jackson.databind.exc.InvalidFormatException;
import com.udea.demo.component.shared.model.ErrorDetails;
import com.udea.demo.component.shared.web.exception.BadRequestException;
import com.udea.demo.component.shared.web.exception.BusinessException;
import com.udea.demo.component.shared.web.exception.NotAuthorizedException;
import com.udea.demo.component.shared.web.exception.OperationNotSupportedException;
import com.udea.demo.component.shared.web.exception.ResourceNotFoundException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.http.converter.HttpMessageNotReadableException;
import org.springframework.validation.BindException;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.MethodArgumentNotValidException;
import org.springframework.web.bind.annotation.ControllerAdvice;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.client.HttpClientErrorException;
import org.springframework.web.context.request.NativeWebRequest;
import org.springframework.web.servlet.NoHandlerFoundException;
import org.springframework.web.servlet.mvc.method.annotation.ResponseEntityExceptionHandler;
import javax.validation.ConstraintViolationException;
import java.sql.SQLIntegrityConstraintViolationException;
import java.time.LocalDate;
import java.util.stream.Collectors;
import static java.lang.String.format;
@ControllerAdvice
public class WebExceptionHandler extends ResponseEntityExceptionHandler {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
private static final String CONSTRAINT_VIOLATION = "Constraint violation";
private static final String VALIDATION_FAILED = "Validation Failed";
private static final String NOT_ACCEPTABLE = "Not acceptable";
private static final String NOT_AUTHORIZED = "Not authorized";
private static final String RESOURCE_NOT_FOUND = "Resource not found";
private static final String BAD_REQUEST = "Bad request";
private static final String FAILED_DEPENDENCY = "Failed dependency";
private static final String MESSAGE_INVALID_NUMBER_FIELD = "The value '%s' is not a number";
private static final String MESSAGE_INVALID_PROPERTY = "This value [%s] is invalid for field '%s'";
private static final String PRECONDITION_FAILED = "Precondition failed";
@ResponseStatus(HttpStatus.BAD_REQUEST)
public ResponseEntity<ErrorDetails> handleConstraintViolation(ConstraintViolationException ex,
NativeWebRequest request) {
return new ResponseEntity<>(
new ErrorDetails(LocalDate.now(), CONSTRAINT_VIOLATION, ex.getMessage(), request.getDescription(false)),
HttpStatus.BAD_REQUEST);
}
@ResponseStatus(HttpStatus.BAD_REQUEST)
public ResponseEntity<ErrorDetails> handleMethodArgumentNotValid(MethodArgumentNotValidException ex,
NativeWebRequest request) {
return new ResponseEntity<>(new ErrorDetails(LocalDate.now(), VALIDATION_FAILED,
getErrors(ex.getBindingResult()),
request.getDescription(false)), HttpStatus.BAD_REQUEST);
}
@ResponseStatus(HttpStatus.BAD_REQUEST)
public ResponseEntity<ErrorDetails> handleBindingResult(BindException ex, NativeWebRequest request) {
return new ResponseEntity<>(
new ErrorDetails(LocalDate.now(), BAD_REQUEST, getErrors(ex.getBindingResult()),
request.getDescription(false)),
HttpStatus.BAD_REQUEST);
}
@ResponseStatus(HttpStatus.BAD_REQUEST)
public ResponseEntity<ErrorDetails> handleMessageNotReadableException(HttpMessageNotReadableException ex,
NativeWebRequest req) {
String message = ex.getMessage();
if (ex.getCause() instanceof InvalidFormatException) {
InvalidFormatException exInvFor = (InvalidFormatException) ex.getCause();
if (!Number.class.isAssignableFrom(exInvFor.getTargetType())) {
message = String.format(MESSAGE_INVALID_NUMBER_FIELD, exInvFor.getValue().toString());
}
}
ErrorDetails errorDetails = new ErrorDetails(LocalDate.now(), BAD_REQUEST, message, req.getDescription(false));
return new ResponseEntity<>(errorDetails, HttpStatus.BAD_REQUEST);
}
@ResponseStatus(HttpStatus.INTERNAL_SERVER_ERROR)
public ResponseEntity<ErrorDetails> handleNoHandlerFound(NoHandlerFoundException exception, NativeWebRequest req) {
return new ResponseEntity<>(
new ErrorDetails(LocalDate.now(), HttpStatus.INTERNAL_SERVER_ERROR.getReasonPhrase(),
String.valueOf(HttpStatus.INTERNAL_SERVER_ERROR.value()), req.getDescription(false)),
HttpStatus.INTERNAL_SERVER_ERROR);
}
@ExceptionHandler(ResourceNotFoundException.class)
public ResponseEntity<ErrorDetails> handleResourceNotFoundException(final NativeWebRequest req,
final ResourceNotFoundException ex) {
ErrorDetails errorDetails = new ErrorDetails(LocalDate.now(), RESOURCE_NOT_FOUND, ex.getMessage(),
req.getDescription(false));
return new ResponseEntity<>(errorDetails, HttpStatus.NOT_FOUND);
}
@ExceptionHandler(BadRequestException.class)
public ResponseEntity<ErrorDetails> handleBadRequestException(final NativeWebRequest req,
final BadRequestException ex) {
ErrorDetails errorDetails = new ErrorDetails(LocalDate.now(), BAD_REQUEST, ex.getMessage(),
req.getDescription(false));
return new ResponseEntity<>(errorDetails, HttpStatus.BAD_REQUEST);
}
@ExceptionHandler(NotAuthorizedException.class)
public ResponseEntity<ErrorDetails> handleNotAuthorizedException(final NativeWebRequest req,
final NotAuthorizedException ex) {
ErrorDetails errorDetails = new ErrorDetails(LocalDate.now(), NOT_AUTHORIZED, ex.getMessage(),
req.getDescription(false));
return new ResponseEntity<>(errorDetails, HttpStatus.UNAUTHORIZED);
}
@ExceptionHandler(BusinessException.class)
public ResponseEntity<ErrorDetails> handleBusinessException(final NativeWebRequest req,
final BusinessException ex) {
ErrorDetails errorDetails = new ErrorDetails(LocalDate.now(), NOT_ACCEPTABLE, ex.getMessage(),
req.getDescription(false));
return new ResponseEntity<>(errorDetails, HttpStatus.NOT_ACCEPTABLE);
}
@ExceptionHandler(IllegalArgumentException.class)
public ResponseEntity<ErrorDetails> handleIllegalArgumentException(final NativeWebRequest req,
final IllegalArgumentException ex) {
ErrorDetails errorDetails = new ErrorDetails(LocalDate.now(), NOT_ACCEPTABLE, ex.getMessage(),
req.getDescription(false));
return new ResponseEntity<>(errorDetails, HttpStatus.UNAUTHORIZED);
}
@ExceptionHandler(Exception.class)
public ResponseEntity<ErrorDetails> handleGenericException(final NativeWebRequest req, final Exception ex) {
logger.error("Exception handled", ex);
ErrorDetails errorDetails = new ErrorDetails(LocalDate.now(),
HttpStatus.INTERNAL_SERVER_ERROR.getReasonPhrase(),
ex.getMessage(), req.getDescription(false));
return new ResponseEntity<>(errorDetails, HttpStatus.INTERNAL_SERVER_ERROR);
}
@ExceptionHandler(HttpClientErrorException.class)
public ResponseEntity<ErrorDetails> handleHttpClientErrorException(final NativeWebRequest req,
final HttpClientErrorException ex) {
ErrorDetails errorDetails = new ErrorDetails(LocalDate.now(), FAILED_DEPENDENCY, ex.getMessage(),
req.getDescription(false));
return new ResponseEntity<>(errorDetails, HttpStatus.FAILED_DEPENDENCY);
}
@ExceptionHandler(OperationNotSupportedException.class)
public ResponseEntity<ErrorDetails> handleOperationNotSupportedException(final NativeWebRequest req,
final OperationNotSupportedException ex) {
ErrorDetails errorDetails = new ErrorDetails(LocalDate.now(), PRECONDITION_FAILED, ex.getMessage(),
req.getDescription(false));
return new ResponseEntity<>(errorDetails, HttpStatus.PRECONDITION_FAILED);
}
@ExceptionHandler(SQLIntegrityConstraintViolationException.class)
public ResponseEntity<ErrorDetails> handleSqlIntegrityConstraintViolationException(final NativeWebRequest req,
final SQLIntegrityConstraintViolationException ex) {
ErrorDetails errorDetails = new ErrorDetails(LocalDate.now(), PRECONDITION_FAILED, ex.getMessage(),
req.getDescription(false));
return new ResponseEntity<>(errorDetails, HttpStatus.PRECONDITION_FAILED);
}
private String getErrors(BindingResult bindingResult) {
String errors;
if (bindingResult.hasFieldErrors()) {
errors = bindingResult.getFieldErrors().stream()
.map(fieldError -> format(MESSAGE_INVALID_PROPERTY, fieldError.getRejectedValue(),
fieldError.getField()))
.collect(Collectors.joining(". "));
} else {
errors = bindingResult.toString();
}
return errors;
}
}
<file_sep>/src/main/java/com/udea/demo/component/shared/model/ErrorDetails.java
package com.udea.demo.component.shared.model;
import lombok.Generated;
import lombok.Getter;
import java.time.LocalDate;
@Getter
@Generated
public class ErrorDetails {
private LocalDate timestamp;
private String message;
private String details;
private String type;
public ErrorDetails(LocalDate timestamp, String message, String details, String type) {
super();
this.timestamp = timestamp;
this.message = message;
this.details = details;
this.type = type;
}
}
<file_sep>/src/main/resources/db/migration/V1__init.sql
CREATE TABLE IF NOT EXISTS `users` (
`id` BIGINT NOT NULL AUTO_INCREMENT,
`name` VARCHAR(45) NOT NULL,
`password` VARCHAR(45) NOT NULL,
`email` VARCHAR(100) NOT NULL,
`create_date` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
`update_date` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
ALTER TABLE `users`
ADD CONSTRAINT `uk_email`
UNIQUE KEY (`email`);<file_sep>/src/main/java/com/udea/demo/component/user/io/web/v1/model/UserSaveRequest.java
package com.udea.demo.component.user.io.web.v1.model;
import com.udea.demo.component.user.service.model.UserSaveCmd;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.Generated;
import lombok.NoArgsConstructor;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
@Data
@Generated
@NoArgsConstructor
@AllArgsConstructor
@Builder
public class UserSaveRequest {
@NotNull
@NotBlank
@Size(min = 3, max = 45)
private String name;
@NotNull
@NotBlank
@Size(min = 8, max = 45)
private String password;
@NotNull
@NotBlank
@Size(min = 3, max = 100)
private String email;
public static UserSaveCmd toModel(UserSaveRequest userToCreate) {
return UserSaveCmd.builder().name(userToCreate.getName()).password(userToCreate.getPassword())
.email(userToCreate.getEmail()).build();
}
}
<file_sep>/src/main/java/com/udea/demo/component/shared/web/ErrorDto.java
package com.udea.demo.component.shared.web;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import lombok.AllArgsConstructor;
import java.io.Serializable;
@JsonSerialize
@AllArgsConstructor
public class ErrorDto implements Serializable {
private static final long serialVersionUID = 6058781533824057875L;
private String message;
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
}
<file_sep>/src/main/java/com/udea/demo/component/user/io/web/v1/UserController.java
package com.udea.demo.component.user.io.web.v1;
import com.udea.demo.component.shared.model.ErrorDetails;
import com.udea.demo.component.shared.model.ResponsePagination;
import com.udea.demo.component.user.io.web.v1.model.UserListResponse;
import com.udea.demo.component.user.io.web.v1.model.UserQuerySearchRequest;
import com.udea.demo.component.user.io.web.v1.model.UserSaveRequest;
import com.udea.demo.component.user.io.web.v1.model.UserSaveResponse;
import com.udea.demo.component.user.model.User;
import com.udea.demo.component.user.service.UserService;
import com.udea.demo.component.user.service.model.UserQuerySearchCmd;
import com.udea.demo.component.user.service.model.UserSaveCmd;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiResponse;
import io.swagger.annotations.ApiResponses;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.web.PageableDefault;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import java.net.URI;
import java.util.List;
import java.util.stream.Collectors;
import static org.springframework.web.util.UriComponentsBuilder.fromUriString;
@RestController
@RequestMapping(path = "/api/v1/users", produces = MediaType.APPLICATION_JSON_VALUE)
@Api(tags = {"Users"}, value = "Users")
public class UserController {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
private UserService userService;
public UserController(UserService userService) {
this.userService = userService;
}
@PostMapping
@ApiOperation(value = "Create an User.", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponses(value = {@ApiResponse(code = 201, message = "Created."),
@ApiResponse(code = 400, message = "Payload is invalid.", response = ErrorDetails.class),
@ApiResponse(code = 404, message = "Resource not found.", response = ErrorDetails.class),
@ApiResponse(code = 500, message = "Internal server error.", response = ErrorDetails.class)
})
@ResponseStatus(value = HttpStatus.CREATED)
@CrossOrigin(exposedHeaders = {HttpHeaders.LOCATION})
public ResponseEntity<Void> create(@Valid @NotNull @RequestBody UserSaveRequest userToCreate) {
logger.debug("Begin create: userToCreate = {}", userToCreate);
UserSaveCmd userToCreateCmd = UserSaveRequest.toModel(userToCreate);
User userCreated = userService.create(userToCreateCmd);
URI location = fromUriString("/api/v1/users").path("/{id}")
.buildAndExpand(userCreated.getId()).toUri();
logger.debug("End create: userCreated = {}", userCreated);
return ResponseEntity.created(location).build();
}
@GetMapping(path = "/{id}")
@ApiOperation(value = "Find an User by id.", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponses(value = {@ApiResponse(code = 200, message = "Success.", response = UserSaveResponse.class),
@ApiResponse(code = 400, message = "Payload is invalid.", response = ErrorDetails.class),
@ApiResponse(code = 404, message = "Resource not found.", response = ErrorDetails.class),
@ApiResponse(code = 500, message = "Internal server error.", response = ErrorDetails.class)
})
public ResponseEntity<UserSaveResponse> findById(@Valid @PathVariable("id") @NotNull Long id) {
logger.debug("Begin findById: id = {}", id);
User userFound = userService.findById(id);
logger.debug("End findById: userFound = {}", userFound);
return ResponseEntity.ok(UserSaveResponse.fromModel(userFound));
}
@GetMapping
@ApiOperation(value = "Find users by parameters.", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponses(value = {
@ApiResponse(code = 200, message = "Success", response = UserListResponse.class),
@ApiResponse(code = 400, message = "Payload is invalid.", response = ErrorDetails.class),
@ApiResponse(code = 500, message = "Internal server error.", response = ErrorDetails.class)
})
public ResponsePagination<UserListResponse> findByParameters(@Valid @NotNull UserQuerySearchRequest queryCriteria,
@PageableDefault(page = 0, size = 10,
direction = Sort.Direction.DESC, sort = "id")
Pageable pageable) {
logger.debug("Begin findByParameters: queryCriteria = {}, pageable= {}", queryCriteria, pageable);
UserQuerySearchCmd queryCriteriaCmd = UserQuerySearchRequest.toModel(queryCriteria);
Page<User> usersFound = userService.findByParameters(queryCriteriaCmd, pageable);
List<UserListResponse> usersFoundList = usersFound.stream().map(UserListResponse::fromModel)
.collect(Collectors.toList());
logger.debug("End findByParameters: usersFound = {}", usersFound);
return ResponsePagination.fromObject(usersFoundList, usersFound.getTotalElements(), usersFound.getNumber(),
usersFoundList.size());
}
@DeleteMapping(path = "/{id}")
@ApiOperation(value = "Delete an user.", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponses(value = {@ApiResponse(code = 204, message = "Success."),
@ApiResponse(code = 400, message = "Payload is invalid.", response = ErrorDetails.class),
@ApiResponse(code = 404, message = "Resource not found.", response = ErrorDetails.class),
@ApiResponse(code = 500, message = "Internal server error.", response = ErrorDetails.class)
})
@ResponseStatus(value = HttpStatus.NO_CONTENT)
public ResponseEntity<Void> delete(@Valid @PathVariable("id") @NotNull Long id) {
logger.debug("Begin delete: id = {}", id);
userService.deleteById(id);
logger.debug("End delete: id = {}", id);
return new ResponseEntity<>(HttpStatus.NO_CONTENT);
}
@PutMapping(path = "/{id}")
@ApiOperation(value = "Update an user.", produces = MediaType.APPLICATION_JSON_VALUE)
@ApiResponses(value = {@ApiResponse(code = 200, message = "Success.", response = UserSaveResponse.class),
@ApiResponse(code = 400, message = "Payload is invalid.", response = ErrorDetails.class),
@ApiResponse(code = 404, message = "Resource not found.", response = ErrorDetails.class),
@ApiResponse(code = 500, message = "Internal server error.", response = ErrorDetails.class)
})
public ResponseEntity<UserSaveResponse> update(@Valid @RequestBody @NotNull UserSaveRequest userToUpdate,
@Valid @PathVariable("id") @NotNull Long id) {
logger.debug("Begin update: userToUpdate = {}, id = {}", userToUpdate, id);
UserSaveCmd userToUpdateCmd = UserSaveRequest.toModel(userToUpdate);
User userUpdated = userService.update(id, userToUpdateCmd);
logger.debug("End update: userUpdated = {}", userUpdated);
return ResponseEntity.ok(UserSaveResponse.fromModel(userUpdated));
}
}
<file_sep>/src/main/java/com/udea/demo/component/user/service/UserServiceImpl.java
package com.udea.demo.component.user.service;
import com.udea.demo.component.user.model.User;
import com.udea.demo.component.user.service.model.UserQuerySearchCmd;
import com.udea.demo.component.user.service.model.UserSaveCmd;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.validation.constraints.NotNull;
@Service
@Transactional
class UserServiceImpl implements UserService {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
private UserGateway userGateway;
public UserServiceImpl(UserGateway userGateway) {
this.userGateway = userGateway;
}
@Override
public User create(@NotNull UserSaveCmd userToCreateCmd) {
logger.debug("Begin create userToCreateCmd = {}", userToCreateCmd);
User userToCreate = UserSaveCmd.toModel(userToCreateCmd);
userToCreate.setPassword(userToCreateCmd.getPassword() + "<PASSWORD>");
User userCreated = userGateway.save(userToCreate);
logger.debug("End create userCreated = {}", userCreated);
return userCreated;
}
@Override
@Transactional(readOnly = true)
public User findById(@NotNull Long id) {
logger.debug("Begin findById id = {}", id);
User userFound = userGateway.findById(id);
logger.debug("End findById userFound = {}", userFound);
return userFound;
}
@Override
@Transactional(readOnly = true)
public Page<User> findByParameters(@NotNull UserQuerySearchCmd queryCriteria, @NotNull Pageable pageable) {
logger.debug("Begin findByParameters queryCriteria = {}, pageable = {}", queryCriteria, pageable);
Page<User> usersFound = userGateway.findByParameters(queryCriteria, pageable);
logger.debug("End findByParameters usersFound = {}", usersFound);
return usersFound;
}
@Override
public void deleteById(@NotNull Long id) {
logger.debug("Begin deleteById id = {}", id);
userGateway.deleteById(id);
logger.debug("End deleteById");
}
@Override
public User update(@NotNull Long id, @NotNull UserSaveCmd userToUpdateCmd) {
logger.debug("Begin update id = {}, userToUpdateCmd = {}", id, userToUpdateCmd);
User userInDataBase = findById(id);
User userToUpdate = userInDataBase.toBuilder().name(userToUpdateCmd.getName()).email(userToUpdateCmd.getEmail())
.build();
User userUpdated = userGateway.update(userToUpdate);
logger.debug("End update userUpdated = {}", userUpdated);
return userUpdated;
}
}
|
1639d701e6bacca90a7f57bed3b386154103b3cb
|
[
"Markdown",
"Java",
"SQL"
] | 9
|
Java
|
andresggz/demo-spring-clean-architecture
|
5e49039e4f97693176d15bde7ababbd13370fabe
|
8088525f8aca118c81d691aadda100e14499d539
|
refs/heads/master
|
<file_sep>fateful_characters
==================
A character sheet and setting manager for FATE
<file_sep># This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rake db:seed (or created alongside the db with db:setup).
#
# Examples:
#
# cities = City.create([{ name: 'Chicago' }, { name: 'Copenhagen' }])
# Mayor.create(name: 'Emanuel', city: cities.first)
#
camp = Campaign.new name: :test_campaign
camp.save!
skills = [
Skill.new({ campaign_id: camp.id, name: :athletics }),
Skill.new({ campaign_id: camp.id, name: :notice }),
Skill.new({ campaign_id: camp.id, name: :fight }),
Skill.new({ campaign_id: camp.id, name: :deceive }),
Skill.new({ campaign_id: camp.id, name: :investiage }),
Skill.new({ campaign_id: camp.id, name: :physique }),
Skill.new({ campaign_id: camp.id, name: :resources }),
Skill.new({ campaign_id: camp.id, name: :lore })
]
skills.map(&:save!)
char1 = Character.new name: :fred, description: "This is fred", refresh: 3, extras: "Nothing in extras yet", campaign_id: camp.id
char1.save!
skills.each_with_index do |skill, i|
rs = RankedSkill.new character_id: char1.id, skill_id: skill.id, rank: i
rs.save!
end
<file_sep>class AspectsController < ApplicationController
end
<file_sep>class Stunt < ActiveRecord::Base
belongs_to :characters
end
<file_sep>class StuntsController < ApplicationController
end
<file_sep>class RankedSkill < ActiveRecord::Base
belongs_to :skill
belongs_to :character
def name
skill.name
end
end
<file_sep>class Character < ActiveRecord::Base
has_many :aspects
has_many :stunts
has_many :ranked_skills
belongs_to :campaign
def sorted_skills
sorted = {}
ranked_skills.each do |skill|
sorted[skill.rank] ||= []
sorted[skill.rank] << skill.name
end
sorted
end
end
<file_sep># Be sure to restart your server when you modify this file.
FatefulCharacters::Application.config.session_store :cookie_store, key: '_fateful_characters_session'
<file_sep>class RankedSkillsController < ApplicationController
end
<file_sep>class Aspect < ActiveRecord::Base
belongs_to :characters
end
|
f899354cc2feae252dadc4ee39f6468d7b4abfe0
|
[
"Markdown",
"Ruby"
] | 10
|
Markdown
|
crimsonknave/fateful_characters
|
ccd311d6b655e8846eb058e4a555a84714cb7428
|
209ec1e6cd9a3e2deadc290234b0e712bd7e7ad5
|
refs/heads/master
|
<repo_name>owade/Angular<file_sep>/src/app/post-customer/post-customer.component.ts
import { Component, OnInit } from '@angular/core';
import { HttpClient } from '@angular/common/http';
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/do';
import { Observable } from 'rxjs/Observable';
import {Customer} from '../get-customer/Customer';
import { AuthService } from './../auth/auth.service';
@Component({
selector: 'app-post-customer',
templateUrl: './post-customer.component.html',
styleUrls: ['./post-customer.component.css']
})
export class PostCustomerComponent implements OnInit {
constructor(private http: HttpClient, public auth: AuthService) {}
model: Customer = new Customer();
onSubmit(employeeForm) {
console.log(this.model);
this.http.post<Customer>('https://o5nmudet3b.execute-api.us-east-2.amazonaws.com/dev/clients', this.model)
.subscribe(
res => {
console.log(res);
},
err => {
console.log('Error occured');
}
);
employeeForm.resetForm();
}
ngOnInit(): void {
// this.http.post<Customer>('http://jsonplaceholder.typicode.com/posts', {
// title: 'foo0099',
// body: 'basdv ar',
// userId: 5939
// })
// .subscribe(
// res => {
// console.log(res);
// },
// err => {
// console.log('Error occured');
// }
// );
}
}
<file_sep>/src/app/app.module.ts
import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { AppRoutingModule } from './app-routing.module';
import { FormsModule } from '@angular/forms';
import { AuthService } from './auth/auth.service';
import { CustomerGetService } from './get-customer/CustomerGetService';
import { AppComponent } from './app.component';
import { ClarityModule , ClrFormsNextModule } from '@clr/angular';
import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
import { HttpClientModule } from '@angular/common/http';
import { GetCustomerComponent } from './get-customer/get-customer.component';
import { PostCustomerComponent } from './post-customer/post-customer.component';
import {MapToIterable} from '../app/get-customer/MapToIterable';
import { CallbackComponent } from './callback/callback.component';
import { HomeComponent } from './home/home.component';
@NgModule({
declarations: [
AppComponent,
GetCustomerComponent,
PostCustomerComponent,
CallbackComponent,
HomeComponent,
MapToIterable
],
imports: [
BrowserModule,
ClarityModule,
BrowserAnimationsModule,
HttpClientModule,
ClrFormsNextModule,
AppRoutingModule,
FormsModule,
],
providers: [AuthService, CustomerGetService],
bootstrap: [AppComponent]
})
export class AppModule { }
<file_sep>/src/app/get-customer/Customer.ts
export class Customer {
UserID: string;
LastName: string;
FirstName: string;
City: string;
constructor() {
this.UserID = (Math.floor(Math.random() * (200 - 10 + 1)) + 10).toString();
this.LastName = '';
this.FirstName = '';
this.City = '';
}
}
<file_sep>/src/app/get-customer/CustomerGetService.ts
import { Injectable } from '@angular/core';
import { Http, RequestOptions } from '@angular/http';
import { HttpClient, HttpHeaders } from '@angular/common/http';
import { Observable } from 'rxjs/Observable';
import 'rxjs/add/operator/map' ;
import * as _ from 'lodash';
import {Customer} from './Customer';
@Injectable()
export class CustomerGetService {
constructor(private http: HttpClient) {
}
// customers = [];
authstr = 'Authorization';
// tslint:disable-next-line:max-line-length
authtoken = '<KEY>';
// const headers = new HttpHeaders({authstr: authtoken});
headerss = new HttpHeaders().append(this.authstr, this.authtoken);
getCustomers() {
return this.http.get<Customer[]>('https://o5nmudet3b.execute-api.us-east-2.amazonaws.com/dev/clients', {headers : this.headerss})
.map(res => (res)).do(console.log);
}
}
<file_sep>/src/environments/environment.prod.ts
export const environment = {
production: true,
auth: {
clientID: 'W9D8e3Y8SQ2BbqAITVJ9EP6vk789DGuX',
domain: 's3579452.auth0.com', // e.g., you.auth0.com
audience: 'https://owade.github.io/Angular/', // e.g., http://localhost:4200
redirect: 'https://owade.github.io/Angular/callback',
scope: 'openid profile email'
}
};
<file_sep>/src/app/get-customer/get-customer.component.ts
import { Component, OnInit } from '@angular/core';
import { HttpClient, HttpHeaders } from '@angular/common/http';
import { CustomerGetService} from './CustomerGetService';
import { Http, RequestOptions, Jsonp } from '@angular/http';
// import { Injectable } from '@angular/core';
// import { do } fromHttpHeaderstors';
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/do';
import * as _ from 'lodash';
import { Observable } from 'rxjs/Observable';
import { AuthService } from './../auth/auth.service';
import {Customer} from './Customer';
import { CUSTOM_BUTTON_TYPES } from '@clr/angular';
@Component({
selector: 'app-get-customer',
templateUrl: './get-customer.component.html',
styleUrls: ['./get-customer.component.css']
})
export class GetCustomerComponent implements OnInit {
customers$: Observable<Customer[]>;
custo: Customer[];
currentPage = 1;
total = 0;
loading = true;
constructor(private http: HttpClient, public auth: AuthService, public CustService: CustomerGetService) { }
ngOnInit(): void {
// const headers = new HttpHeaders();
// const authstr = 'Authorization';
// tslint:disable-next-line:max-line-length
// const authtoken = '<KEY>';
// const headers = new HttpHeaders({authstr: authtoken});
// const headerss = new HttpHeaders().append(authstr, authtoken);
// const headerDict = {
// tslint:disable-next-line:max-line-length
// 'Authorization': 'Bearer <KEY>'
// };
// const requestOptions = {headers: new HttpHeaders(headerDict)};
// const options = new RequestOptions({headers: headers});
// const headers = new Headers();
// tslint:disable-next-line:max-line-length
// headers.append('Authorization', 'Bearer <KEY>');
// this.customers$ = this.http
// .get<Customer[]>('https://o5nmudet3b.execute-api.us-east-2.amazonaws.com/dev/clients', {headers : headerss})
// .map(data => _.values(data))
// .do(console.log);
this.getCustomers();
// this.loading = false;
console.log('ewgew');
// console.log(this.custo.length);
}
getCustomers() {
this.CustService.getCustomers()
.subscribe(custs => {
this.custo = (custs) ;
this.loading = false;
this.total = this.custo.length;
});
}
}
|
93f829f832b088b0cd1aa0fba9df26c709d1230b
|
[
"TypeScript"
] | 6
|
TypeScript
|
owade/Angular
|
0785ecf38cf0a3be9f5f9a56f658078661bff081
|
064836d133a98bfbb79365d455ad578b5a821ac1
|
refs/heads/master
|
<file_sep>#! /usr/bin/env node
const { information } = require('./information')
const { render } = require('./render')
information.map(info => render[Object.keys(info)[0]](Object.values(info)[0]))<file_sep># thinkverse command line 👏
Console out latest information about [thinkverse]. 🤘
### Installation
##### npm
```bash
$ npm install thinkverse
```
##### yarn
```bash
$ yarn add thinkverse
```
### Usage
```bash
$ thinkverse
```
[thinkverse]: https://github.com/thinkverse<file_sep>module.exports.information = [{
'text': 'Hello, I\'m Thinkverse. 👋'
}, {
'text': 'You can find out more about me below. 🔗'
}, {
'text': ''
}, {
'link': ['Twitter', 'https://twitter.com/thinkverse']
}, {
'link': ['GitHub', 'https://github.com/thinkverse']
}, {
'link': ['Dev', 'https://dev.to/thinkverse']
}]
<file_sep>module.exports.render = {
text: (info) => {
return console.log(info)
},
link: (info) => {
return console.log(`${info[0]}: %o`, info[1]);
}
}
|
16d37938462689164977e028393bb81dde3a37c6
|
[
"JavaScript",
"Markdown"
] | 4
|
JavaScript
|
thinkverse/thinkverse-cli
|
3336e07206443887bbeb95e0fffe29d8f496abb8
|
78589fe002c3c98e55eae04360d388afb047caf2
|
refs/heads/master
|
<repo_name>VictoriaSkokova/Lab_1<file_sep>/Main.cpp
#include <iostream>
#include "Function.h"
using namespace std;
int main()
{
setlocale(LC_ALL, "Russian");
size_t height, width;
cout << "Добрый день. Лабораторная работа №1 Скоковой Виктории гр. 7302\n";
cout << "Введите размеры двумерного массива\n";
cout << "Строки: ";
cin >> height;
cout << "Столбцы: ";
cin >> width;
int beginning, l, size_print = 0;
cout << "Введите число, начиная с которого будет заполняться массив ";
cin >> l;
size_t **array = new size_t*[height];
for (size_t i = 0; i < height; i++)
array[i] = new size_t[width];
beginning = put_array(array, height, width, l);
size_print = for_print(height, width, beginning);
print_array(array, height, width, size_print);
for (size_t i = 0; i < height; i++)
delete[] array[i];
delete[]array;
system("pause");
return 0;
}
<file_sep>/Function.h
#pragma once
int put_array(size_t ** array, const size_t height, const size_t width, const size_t number);
void print_array(size_t ** array, const size_t height, const size_t width, const size_t k);
bool equals(const size_t* array1, const size_t array1_size, size_t ** array2, const size_t height, const size_t width);
int for_print(const size_t height, const size_t width, const size_t k);
<file_sep>/Function.cpp
#include "Function.h"
#include <iostream>
using namespace std;
//Функция заполнениния массива работает следующим образом:
//заполнение происходит по диагонали путем уменьшения j номера столбца и увеличения i номера строки, пока не дойдет до элемента с индексом j=j_0 (задает крайний левый столбец, необходимый для заполнения)
//После того как заполнится крайний левый возможный элемент, переходим на новый символ, сравнивая i, j с максимумами. В случае совпадения, меняем начальные значения в цикле. Основной цикл происходит пока не будут заполнены все элементы.
int put_array(size_t ** array, const size_t height, const size_t width, const size_t number)
{
size_t k=number;
int i = 0, j = 1;
int i_0 = 0, j_0 = -1, j_1 = 1;
size_t nn = 1;
array[0][0] = k;
if ((height == 1) || (width == 1))
{
if (height == 1)
{
i = 0;
for (size_t j = 0; j < width; j++)
{
array[i][j] = k;
k++;
}
}
else
{
j = 0;
for (size_t i = 0; i < height; i++)
{
array[i][j] = k;
k++;
}
}
}
else {
while (nn < (height*width))
{
while (j > j_0)
{
k++;
array[i][j] = k;
nn++;
j--;
i++;
}
if (j_1 == (width - 1))
i_0++;
else
j_1++;
if (i == height)
j_0++;
i = i_0;
j = j_1;
}
}
return k;
}
// Функция, считающая значение для ровного вывода массива
int for_print(const size_t height, const size_t width, const size_t k)
{
size_t j, num = 0;
j = k + (width*height) - 1;
while (j > 0)
{
j = j / 10;
num++;
}
return num;
}
void print_array (size_t ** array, const size_t height, const size_t width, const size_t num)
{
for (size_t i = 0; i < height; i++)
{
for (size_t j = 0; j < width; j++)
{
cout.width(num);
cout << array[i][j] << " ";
}
cout << "\n";
}
}
bool equals (const size_t* array1, const size_t array1_size, size_t ** array2, const size_t height, const size_t width)
{
cout << "\nЗаполненный массив: \n";
int i_1 = 0, j_1 = 0;
if (array1_size != (height* width)) return false;
for (size_t i = 0; i < array1_size; i++)
{
if (array1[i] != array2[i_1][j_1])
return false;
j_1++;
if (j_1 == width)
{
j_1 = 0;
i_1++;
}
}
return true;
}
<file_sep>/unittest1.cpp
#include "stdafx.h"
#include "CppUnitTest.h"
#include "../Lab_Alg_1/Function.h"
using namespace Microsoft::VisualStudio::CppUnitTestFramework;
namespace UnitTest1
{
TEST_CLASS(UnitTest1)
{
public:
TEST_METHOD(is_test_works)
{
Assert::IsTrue(true);
}
TEST_METHOD(equal_for_different_arrays)
{
size_t n = 2, m = 5, k=1;
size_t array1[5] = { 5, 4, 3, 2, 1 };
size_t **array2 = new size_t*[n];
for (size_t i = 0; i < n; i++)
array2[i] = new size_t[m];
for (size_t i = 0; i < n; i++)
for (size_t j = 0; j < m; j++)
{
array2[i][j] = k;
k++;
}
Assert::IsFalse(equals(array1, 5, array2, 2, 5));
for (size_t i = 0; i < n; i++)
delete[] array2[i];
delete[]array2;
}
TEST_METHOD(equal_for_equal_arrays)
{
size_t n = 3, m = 3, k = 1;
size_t array1[9] = {1, 2, 3, 4, 5, 6, 7, 8, 9 };
size_t **array2 = new size_t*[n];
for (size_t i = 0; i < n; i++)
array2[i] = new size_t[m];
for (size_t i = 0; i < n; i++)
for (size_t j = 0; j < m; j++)
{
array2[i][j] = k;
k++;
}
Assert::IsTrue(equals(array1, 9, array2, n, m));
for (size_t i = 0; i < n; i++)
delete[] array2[i];
delete[]array2;
}
TEST_METHOD(function_for_print)
{
size_t n = 2, m = 5, k = 33;
size_t **array2 = new size_t*[n];
for (size_t i = 0; i < n; i++)
array2[i] = new size_t[m];
k = put_array(array2, n, m, 1);
Assert::IsFalse(for_print(2, 5, k) == 3);
for (size_t i = 0; i < n; i++)
delete[] array2[i];
delete[]array2;
}
TEST_METHOD(equal_for_function_put)
{
size_t n = 2, m = 2, k = 0;
size_t array1[4] = { 1, 2, 3, 4 };
size_t **array2 = new size_t*[n];
for (size_t i = 0; i < n; i++)
array2[i] = new size_t[m];
k = put_array(array2, n, m, 1);
Assert::IsTrue(equals(array1, 4, array2, n, m));
for (size_t i = 0; i < n; i++)
delete[] array2[i];
delete[]array2;
}
TEST_METHOD(equal_for_function_put_elem)
{
size_t n = 2, m = 2, k = 0;
size_t array1[4] = { 1, 2, 3, 4 };
size_t **array2 = new size_t*[n];
for (size_t i = 0; i < n; i++)
array2[i] = new size_t[m];
k = put_array(array2, n, m, 1);
Assert::IsTrue(array1[1]==array2[0][1]);
for (size_t i = 0; i < n; i++)
delete[] array2[i];
delete[]array2;
}
TEST_METHOD(equal_for_function_put_different_size)
{
size_t n = 4, m = 4, k = 0;
size_t array1[15] = { 1, 2, 4, 7, 3, 5, 8, 11, 6, 9, 12, 14, 10, 13, 15 };
size_t **array2 = new size_t*[n];
for (size_t i = 0; i < n; i++)
array2[i] = new size_t[m];
k = put_array(array2, n, m, 1);
Assert::IsFalse(equals(array1, 15, array2, n, m));
for (size_t i = 0; i < n; i++)
delete[] array2[i];
delete[]array2;
}
TEST_METHOD(unusual_1x5)
{
size_t n = 1, m = 5, k = 0;
size_t array1[5] = { 1, 2, 3, 4, 5 };
size_t **array2 = new size_t*[n];
for (size_t i = 0; i < n; i++)
array2[i] = new size_t[m];
k = put_array(array2, n, m, 1);
Assert::IsTrue(equals(array1, 5, array2, n, m));
for (size_t i = 0; i < n; i++)
delete[] array2[i];
delete[]array2;
}
TEST_METHOD(unusual_5x1)
{
size_t n = 5, m = 1, k = 0;
size_t array1[5] = { 1, 2, 3, 4, 5 };
size_t **array2 = new size_t*[n];
for (size_t i = 0; i < n; i++)
array2[i] = new size_t[m];
k = put_array(array2, n, m, 1);
Assert::IsTrue(equals(array1, 5, array2, n, m));
for (size_t i = 0; i < n; i++)
delete[] array2[i];
delete[]array2;
}
TEST_METHOD(unusual_1x1)
{
size_t n = 1, m = 1, k = 0;
size_t array1[1] = { 1 };
size_t **array2 = new size_t*[n];
for (size_t i = 0; i < n; i++)
array2[i] = new size_t[m];
k = put_array(array2, n, m, 1);
Assert::IsTrue(equals(array1, 1, array2, n, m));
for (size_t i = 0; i < n; i++)
delete[] array2[i];
delete[]array2;
}
};
}
|
2e0d9ed106553c1ef9ae03dc794383d0e164b8c6
|
[
"C",
"C++"
] | 4
|
C++
|
VictoriaSkokova/Lab_1
|
24339cb4f5d2ed17a2aab79c40d66e02c9bc9115
|
d8f71d8555826fb90df4f5cdc70885cf6bb7d2ee
|
refs/heads/master
|
<file_sep>"""Helper functions for beam search."""
import numpy as np
from queue import PriorityQueue
from future.utils import implements_iterator
def InitBeam(phrase, user_id, m):
# Need to find the hidden state for the last char in the prefix.
prev_hidden = np.zeros((1, 2 * m.params.num_units))
for word in phrase[:-1]:
feed_dict = {
m.model.prev_hidden_state: prev_hidden,
m.model.prev_word: [m.char_vocab[word]],
m.model.beam_size: 4
}
prev_hidden = m.session.run(m.model.next_hidden_state, feed_dict)
return prev_hidden
class BeamItem(object):
"""This is a node in the beam search tree.
Each node holds four things: a log probability, a list of previous words, and
the two hidden state vectors.
"""
def __init__(self, prev_word, prev_hidden, log_prob=0.0):
self.log_probs = log_prob
if type(prev_word) == list:
self.words = prev_word
else:
self.words = [prev_word]
self.prev_hidden = prev_hidden
def __le__(self, other):
return self.log_probs <= other.log_probs
def __lt__(self, other):
return self.log_probs < other.log_probs
def __ge__(self, other):
return self.log_probs >= other.log_probs
def __gt__(self, other):
return self.log_probs > other.log_probs
def __eq__(self, other):
return self.log_probs == other.log_probs
def __str__(self):
return 'beam {0:.3f}: '.format(self.log_probs) + ''.join(self.words)
class BeamQueue(object):
"""Bounded priority queue."""
def __init__(self, max_size=10):
self.max_size = max_size
self.size = 0
self.bound = None
self.q = PriorityQueue()
def Insert(self, item):
self.size += 1
self.q.put((-item.log_probs, item))
if self.size > self.max_size:
self.Eject()
def CheckBound(self, val):
# If the queue is full then we know that there is no chance of a new item
# being accepted if it's priority is worse than the last thing that got
# ejected.
return self.size < self.max_size or self.bound is None or val < self.bound
def Eject(self):
score, _ = self.q.get()
self.bound = -score
self.size -= 1
def __iter__(self):
return self
def __next__(self):
if not self.q.empty():
_, item = self.q.get()
return item
raise StopIteration
def next(self):
return self.__next__()
def GetCompletions(prefix, user_id, m, branching_factor=8, beam_size=300,
stop='</S>'):
""" Find top completions for a given prefix, user and model."""
m.Lock(user_id) # pre-compute the adaptive recurrent matrix
prev_state = InitBeam(prefix, user_id, m)
nodes = [BeamItem(prefix, prev_state)]
for i in range(36):
new_nodes = BeamQueue(max_size=beam_size)
current_nodes = []
for node in nodes:
if i > 0 and node.words[-1] == stop: # don't extend past the stop token
new_nodes.Insert(node) # copy over finished beams
else:
current_nodes.append(node) # these ones will get extended
if len(current_nodes) == 0:
return new_nodes # all beams have finished
# group together all the nodes in the queue for efficient computation
prev_hidden = np.vstack([item.prev_hidden for item in current_nodes])
prev_words = np.array([m.char_vocab[item.words[-1]] for item in current_nodes])
feed_dict = {
m.model.prev_word: prev_words,
m.model.prev_hidden_state: prev_hidden,
m.model.beam_size: branching_factor
}
current_char, current_char_p, prev_hidden = m.session.run(
[m.beam_chars, m.model.selected_p, m.model.next_hidden_state],
feed_dict)
for i, node in enumerate(current_nodes):
for new_word, top_value in zip(current_char[i, :], current_char_p[i, :]):
new_cost = top_value + node.log_probs
if new_nodes.CheckBound(new_cost): # only create a new object if it fits in beam
new_beam = BeamItem(node.words + [new_word], prev_hidden[i, :],
log_prob=new_cost)
new_nodes.Insert(new_beam)
nodes = new_nodes
return nodes
def FirstNonMatch(s1, s2, start=0):
# returns the position of the first non-matching character
min_len = min(len(s1), len(s2))
for i in xrange(start, min_len):
if s1[i] != s2[i]:
return i
return min_len
def GetSavedKeystrokes(m, query, branching_factor=4, beam_size=100):
"""Find the shortest prefix that gets the right completion.
Uses binary search.
"""
left = 1
right = len(query)
while left <= right:
midpoint = (left + right) / 2
prefix = ['<S>'] + list(query[:midpoint])
completions = GetCompletions(
prefix, 0, m, branching_factor=branching_factor, beam_size=beam_size)
top_completion = list(completions)[-1]
top_completion = ''.join(top_completion.words[1:-1])
if top_completion == query:
right = midpoint - 1
else:
left = midpoint + 1
return left
|
a9e0b16d99877f37973b4eb3c7ae047c23e1eda8
|
[
"Python"
] | 1
|
Python
|
setupup/query_completion
|
6f84b1cd5a16e830758e5e56ff7ae6f5dcf5aff7
|
ceb6900cb96351f5e6ef86b55e204a2ed54fee61
|
refs/heads/master
|
<repo_name>andypinet/webpack<file_sep>/test/hotCases/update.js
module.exports = function(done) {
return function() {
module.hot.check(true, function(err) {
if(err) done(err);
});
}
};
|
ef8c7e1f4f9e11575f73330abfb73bd805cbb33a
|
[
"JavaScript"
] | 1
|
JavaScript
|
andypinet/webpack
|
662c5abd1f4d9705dee3d02676bb894c29359932
|
f0d31f4891ffa411b0fa9c21ad723fb56da4dadd
|
refs/heads/master
|
<repo_name>jacekdalkowski/_fun_and_profit_asp_net_5_mono<file_sep>/Infrastructure/Security/ApplicationUserStore.cs
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNet.Identity;
namespace hello_web_app.Infrastructure.Security
{
public class SingleMockApplicationUserStore : IUserStore<ApplicationUser>
{
public Task<string> GetUserIdAsync(ApplicationUser user, CancellationToken cancellationToken)
{
return Task.FromResult("1");
}
public Task<string> GetUserNameAsync(ApplicationUser user, CancellationToken cancellationToken)
{
return Task.FromResult("Jacek");
}
public Task SetUserNameAsync(ApplicationUser user, string userName,
CancellationToken cancellationToken)
{
throw new NotImplementedException();
}
public Task<string> GetNormalizedUserNameAsync(ApplicationUser user, CancellationToken cancellationToken)
{
return Task.FromResult("Jacek");
}
public Task SetNormalizedUserNameAsync(ApplicationUser user, string normalizedName,
CancellationToken cancellationToken)
{
throw new NotImplementedException();
}
public Task<IdentityResult> CreateAsync(ApplicationUser user, CancellationToken cancellationToken)
{
throw new NotImplementedException();
}
public Task<IdentityResult> UpdateAsync(ApplicationUser user, CancellationToken cancellationToken)
{
throw new NotImplementedException();
}
public Task<IdentityResult> DeleteAsync(ApplicationUser user, CancellationToken cancellationToken)
{
throw new NotImplementedException();
}
public Task<ApplicationUser> FindByIdAsync(string userId, CancellationToken cancellationToken)
{
return Task.FromResult(new ApplicationUser(){
Id = "1",
UserName = "Jacek"
});
}
public Task<ApplicationUser> FindByNameAsync(string normalizedUserName, CancellationToken cancellationToken)
{
return Task.FromResult(new ApplicationUser(){
Id = "1",
UserName = "Jacek"
});
}
public void Dispose()
{
}
}
}<file_sep>/Controllers/AngularController.cs
using Microsoft.AspNet.Identity;
using hello_web_app.Infrastructure.Security;
namespace hello_web_app.Controllers
{
public class AngularController
{
public AngularController(UserManager<ApplicationUser> userManager, SignInManager<ApplicationUser> signInManager)
{
}
}
}<file_sep>/Infrastructure/Security/ApplicationUser.cs
using Microsoft.AspNet.Identity;
namespace hello_web_app.Infrastructure.Security
{
public class ApplicationUser : IdentityUser<string>
{
}
}<file_sep>/Infrastructure/Security/SimpleOAuthProvider.cs
//using Microsoft.Owin.Security.OAuth;
namespace hello_web_app.Infrastructure.Security
{
// no oauth server in asp.net 5 yet
public class SimpleOAuthProvider //: OAuthAuthorizationServerProvider
{
}
}<file_sep>/Infrastructure/Security/ApplicationUserManager.cs
using Microsoft.AspNet.Identity;
using System;
// using System.Collections.Generic;
// using System.Globalization;
// using System.Linq;
// using System.Runtime.CompilerServices;
// using System.Security.Claims;
// using System.Text;
// using System.Threading;
// using System.Threading.Tasks;
// using Microsoft.AspNet.Hosting;
// using Microsoft.AspNet.Http;
// using Microsoft.AspNet.Identity.Logging;
// using Microsoft.Framework.Logging;
// using Microsoft.Framework.OptionsModel;
// namespace hello_web_app.Infrastructure.Security
// {
// public class ApplicationUserManager : UserManager<ApplicationUser>
// {
// public ApplicationUserManager(IUserStore<ApplicationUser> store,
// IOptions<IdentityOptions> optionsAccessor,
// IPasswordHasher<ApplicationUser> passwordHasher,
// IEnumerable<IUserValidator<ApplicationUser>> userValidators,
// IEnumerable<IPasswordValidator<ApplicationUser>> passwordValidators,
// ILookupNormalizer keyNormalizer,
// IdentityErrorDescriber errors,
// IEnumerable<IUserTokenProvider<ApplicationUser>> tokenProviders,
// ILoggerFactory logger,
// IHttpContextAccessor contextAccessor)
// : base(store, optionsAccessor, passwordHasher, userValidators, passwordValidators,
// keyNormalizer, errors, tokenProviders, logger, contextAccessor)
// {
// }
// }
// }
|
01376801e098de9d16ef218e5420f18bb057b515
|
[
"C#"
] | 5
|
C#
|
jacekdalkowski/_fun_and_profit_asp_net_5_mono
|
f8538ae3bb213480f45e0da164252e62ed87e992
|
a7c018b00fc793d68943347b1ce9451d5356cb8b
|
refs/heads/master
|
<repo_name>h3nnysa/RESKRIPSIT<file_sep>/modul/vartikel.php
<?php
// lakukan koneksi ke mysql
include "inc/koneksi.php";
// baca id artikel dari parameter link 'Baca selengkapnya...' dari index.php
$id = $_GET['id'];
// update jumlah views (jumlah views bertambah 1 dari sebelumnya) dari artikel
$query = "UPDATE t_artikel SET views = views + 1 WHERE idArtikel = '$id'";
mysql_query($query);
// baca data artikel berdasarkan id nya
$query = "SELECT * FROM t_artikel WHERE idArtikel = '$id'";
$hasil = mysql_query($query);
$data = mysql_fetch_array($hasil);
// tampilkan title artikel
echo "<h3>".$data['title']."</h3>";
// tampilkan tanggal pub, author, dan jumlah views
echo "<p><small>Tgl post: ".$data['datePub'].". Posted by: ".$data['author'].". Dibaca: ".$data['views']." kali</small></p><hr>";
// tampilkan isi artikel
echo "<p>".$data['content']."</p>";
?><file_sep>/modul/backup/backup.php
<link href="css/form.css" rel="stylesheet" type="text/css">
<?php
// membaca file koneksi.php
include "config/koneksi.php";
echo "<h1>Dump MySQL</h1>";
echo "<h3>Nama Database: ".$database."</h3>";
echo "<h3>Daftar Tabel</h3>";
// query untuk menampilkan semua tabel dalam database
$query = "SHOW TABLES";
$hasil = mysql_query($query);
// menampilkan semua tabel dalam form
echo "<form method='post' action='modul/backup/backup_proses.php'>";
echo "<table>";
while ($data = mysql_fetch_row($hasil))
{
echo "<tr><td><input type='checkbox' name='tabel[]' value='".$data[0]."'></td><td>".$data[0]."</td></tr>";
}
echo "</table><br>";
echo "<input type='submit' name='submit' value='Backup Data'>";
echo "</form>";
?><file_sep>/modul/core.php
<?php
function CF($listsakit = Array()){
include "inc/koneksi.php";
$CF = Array();
$sqli = mysql_query('SELECT idp FROM t_penyakit') or die(mysql_error());
if (count($listsakit)==1) {
$i = 0;
while ($data = mysql_fetch_array($sqli)){
$idp = $data['idp'];
$req = mysql_query("SELECT mb,md FROM t_aturan WHERE idp = '$idp' AND idg='".$listsakit[0]."'") or die(mysql_error());
$mbmd = mysql_fetch_array($req);
if ($mbmd['mb']!=""){
$hitung = $mbmd['mb'] - $mbmd['md'];
$CF['p'][$i] = $idp;
$CF['h'][$i] = number_format($hitung, 3, ',', '');
$i++;
}
}
} elseif (count($listsakit)>1){
$i = 0;
while ($data = mysql_fetch_array($sqli)){
$idp = $data['idp'];
$req = mysql_query("SELECT mb,md FROM t_aturan WHERE idp='$idp' AND idg='".$listsakit[0]."'") or die(mysql_error());
$mbmd = mysql_fetch_array($req);
for ($j=1;$j<count($listsakit);$j++){
$req1 = mysql_query("SELECT mb,md FROM t_aturan WHERE idp='$idp' AND idg='".$listsakit[$j]."'") or die(mysql_error());
$mbmd1 = mysql_fetch_array($req1);
if (($mbmd['mb']!="") || ($mbmd1['mb']!="")){
if ($j==1){
$mb = ($mbmd['mb']+$mbmd1['mb'] * (1 - $mbmd['mb']));
$md = ($mbmd['md']+$mbmd1['md'] * (1 - $mbmd['md']));
}else{
$mb = ($mb+$mbmd1['mb'] * (1 - $mb));
$md = ($md+$mbmd1['md'] * (1 - $md));
}
}
}
if (($mbmd['mb']!="") || ($mbmd1['mb']!="")){
$hitung = $mb - $md;
$CF['p'][$i] = $idp;
$CF['h'][$i] = number_format($hitung, 3, ',', '');
$i++;
}
}
}
return $CF;
}
function doublemax($mylist){
$maxvalue=max($mylist);
while(list($key,$value)=each($mylist)){
if($value==$maxvalue)$maxindex=$key;
}
return array("m"=>$maxvalue,"i"=>$maxindex);
}
function gejala_umum($bypass = "")
{
include "inc/koneksi.php";
$sql = "SELECT GROUP_CONCAT(idg order by idg asc separator '_') FROM `t_aturan` group by idp";
$query = mysql_query($sql) or die(mysql_error());
$list = array();
while($result = mysql_fetch_array($query))
{
$arr = explode("_", $result[0]);
$list[] = $arr;
}
$gu = call_user_func_array('array_intersect', $list);
$list = array();
foreach ($gu as $key => $value) {
array_push($list, $value);
}
if($bypass=="")
{
$_SESSION['list_gejala_umum'] = $list;
}
else
{
return $list;
}
// echo "<pre>b"; print_r($list); echo "</pre>";
}
function gejala_setelah_umum($idp)
{
include "inc/koneksi.php";
$cond = ($idp) ? "idp = '".$idp."'" : 1;
$sql = "SELECT GROUP_CONCAT(idg order by mb desc separator '_') FROM `t_aturan` WHERE $cond group by idp";
$query = mysql_query($sql) or die(mysql_error());
$list = array();
while($result = mysql_fetch_array($query))
{
$arr = explode("_", $result[0]);
$list = $arr;
}
$gu = gejala_umum(TRUE);
$result = array_intersect($gu, $list);
$fin = array_diff($list, $result);
$list = array();
foreach ($fin as $key => $value) {
array_push($list, $value);
}
$_SESSION['list_gejala_setelah_umum'] = $list;
$_SESSION['list_gejala_umum'] = $list;
return $list;
}
?><file_sep>/modul/hasildiag.php
<?php
session_start();
require 'inc/koneksi.php';
require 'modul/core.php';
$lala= $_SESSION['userid'];
$date = date("Y-m-d");
$query=mysql_query("select * from t_user where userid='$lala'");
$datapas = mysql_fetch_array($query);
if (count($_SESSION['listgejala'])!=0){
$hasil = CF($_SESSION['listgejala']);
$hasilmax = doublemax($hasil['h']);
$idpeny = $hasil['p'][$hasilmax['i']];
//print_r($hasil);
//print_r($hasilmax);
//echo $idpeny;
$penyakit = mysql_query("SELECT * FROM t_penyakit WHERE idp = '".$hasil['p'][$hasilmax['i']]."' LIMIT 0,1") or die(mysql_error());
$datap = mysql_fetch_array($penyakit);
?>
<div class="content-box-header"><center><h4><u>HASIL PENELUSURAN PENYEBAB KANKER KANDUNGAN<h4/><u/><center><p></div>
<table width="100%">
<tr valign="top">
<td colspan="3">
<h4>Dilihat Dari Gejala-Gejala Yang Anda Alami:</h4><br/>
</td>
</tr>
<tr valign="center">
<td colspan="3"><?php
for ($i=0;$i<count($_SESSION['listgejala']);$i++){
$idgejx = $_SESSION['listgejala'][$i];
$lgej = mysql_query("SELECT idg, gejala FROM t_gejala WHERE idg='$idgejx'") or die(mysql_error());
$dlgej = mysql_fetch_array($lgej);
echo "<li>[".$dlgej['idg']."] ".$dlgej['gejala']."<br/>";
}
?></td>
</tr>
<tr valign="top">
<td colspan="3">
<br/><h4>Dugaan Sementara Anda Didiagnosa Mengidap Penyakit : <i><?php echo $datap ['nama'];?></i></h4><br/>
</td>
</tr>
<tr valign="center">
<td colspan="3">-<?php echo $datap['desk'];?></td>
</tr>
<tr valign="center">
<td colspan="3"><br/><h4>Penanganan Diagnosis : </h4><br/></td>
</tr>
<tr valign="center">
<td colspan="3"><?php echo $datap['penanganan'];?></td>
</tr>
<tr valign="center">
<td colspan="3"><br/><h4>DUGAAN PENYAKIT dengan tingkat kepercayaan (CF) : <i><?php echo $hasilmax['m'] ;?></i></h4><br/> </td>
</tr>
<td colspan="3" color="red">
<br/>.: UNTUK MENGETAHUI LEBIH JELAS TENTANG PENYAKIT YANG ANDA DERITA,
SILAHKAN KONSULTASI LEBIH LANJUT KE DOKTER ATAU RUMAH SAKIT :.</h4><br/>
</td>
<?php
$qry = mysql_query("insert into t_diagnosa values ('".$datap ['idp']."','".$datapas ['idu']."','".$hasilmax['m']."','$date')") or die(mysql_error());
?>
</table>
<?php
}else{
?>
<div class="content-box-header"><center>HASIL PENELUSURAN PENYEBAB KANKER KANDUNGAN<center><p></div>
<table width="100%">
<tr valign="top">
<td colspan="3">
<h4 align="center">Kemungkinan anda tidak sakit !</h4><br/>
</td>
</tr>
</table>
<?php
}
unset($_SESSION['listgejala']);
?>
<file_sep>/home.php
<?
session_start();
if(!isset($_SESSION['userid'])){
echo "<script language=Javascript>
alert('Anda tidak berhak untuk mengakses laman ini, Silahkan login terlebih dahulu');
document.location='index.php';
</script>";
}
?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>Sistem Pakar</title>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />
<link href="css/style.css" rel="stylesheet" type="text/css" />
<link rel="stylesheet" href="css/tstyle.css"/>
<link rel="stylesheet" href="css/tabel.css"/>
</head>
<body>
<div id="topPan"> <a href="#"><img src="images/lolo.png" title="Brain Tech" alt="" width="" height="" border="0" class="logo"/></a>
<div id="topimagePan"><img src="images/g2.jpg" alt="" /></div>
<ul>
<li><a href="home.php?page=home">Home</a></li>
<li><a href="home.php?page=profil">About us</a></li>
<li><a href="#">Glosarium</a></li>
<li><a href=log.php?op=out>Logout</a></li>
</ul>
</div>
<div id="bodyPan">
<div id="leftPan">
<?php
$page=$_GET['page'];
if(isset($_GET['page'])){
include 'modul/'.$page.'.php';
}else{
include 'modul/home.php';
}
?>
</div>
<div id="rightPan">
<?php
if ($_SESSION['level']==10){
include "menupas.php";
}
if ($_SESSION['level']==30){
include "menupakar.php";
}
else if ($_SESSION['level']==50){
include "menuadm.php";
}
?>
</div>
</div>
<div id="bodyBottomPan">
</div>
<div id="footerPan">
<ul>
<li>SISTEM PAKAR -</li>
<li> - <NAME> 08</li>
<li>      TIF UIN Sunan Kalijaga Yogyakarta
</li>
</ul>
</div>
</body>
</html>
<file_sep>/modul/backup/restore.php
<?php
// koneksi ke db mysql
//$dbHost = "localhost";
//$dbUser = "root";
//$dbPass = "...";
//$dbName = "perpustakaan";
//mysql_connect($dbHost, $dbUser, $dbPass);
//mysql_select_db($dbName);
include "config/koneksi.php";
echo "<h1>Restore Database PSB</h1>";
echo "DB Name: ".$dbName;
// form upload file dumo
echo "<form enctype='multipart/form-data' method='post' action='".$_SERVER['PHP_SELF']."?op=restore'>";
echo "<input type='hidden' name='MAX_FILE_SIZE' value='20000000'>
<input name='datafile' type='file'>
<input name='submit' type='submit' value='Restore'>";
echo "</form>";
// proses restore data
if ($_GET['op'] == "restore")
{
// baca nama file
$fileName = $_FILES['datafile']['name'];
// proses upload file
move_uploaded_file($_FILES['datafile']['tmp_name'], $fileName);
// membentuk string command untuk restore
// di sini diasumsikan letak file mysql.exe terletak di direktori C:\AppServ\MySQL\bin
$string = "C:\xampp\mysql\bin\mysql -u".$dbUser." -p".$dbPass." ".$dbName." < ".$fileName;
// menjalankan command restore di shell via PHP
exec($string);
// hapus file dump yang diupload
unlink($fileName);
}
?><file_sep>/modul/vdbase.php
<?php
// membaca file koneksi.php
include "inc/koneksi.php";
echo "<h3>BACKUP DATABASE</h3><br>";
echo "<h3>Nama Database: ".$dbName."</h3><br>";
echo "<h3>Daftar Tabel</h3>";
// query untuk menampilkan semua tabel dalam database
$query = "SHOW TABLES";
$hasil = mysql_query($query);
// menampilkan semua tabel dalam form
echo "<form method='post' action='modul/prosesbackup.php'>";
echo "<table>";
while ($data = mysql_fetch_row($hasil))
{
echo "<tr><td><input type='checkbox' name='tabel[]' value='".$data[0]."'></td><td>".$data[0]."</td></tr>";
}
echo "</table><br>";
echo "<input type='submit' name='submit' value='Backup Data'>";
echo "</form>";
?><file_sep>/modul/editorteks.php
<html>
<script type="text/javascript" src=".\jscripts/tiny_mce/tiny_mce.js"></script>
<script type="text/javascript">
tinyMCE.init({
mode : "exact",
elements : "elm2",
theme : "advanced",
skin : "o2k7",
skin_variant : "silver",
plugins : "safari,pagebreak,style,layer,table,save,advhr,advimage,advlink,emotions,iespell,insertdatetime,preview,media,searchreplace,print,contextmenu,paste,directionality,fullscreen,noneditable,visualchars,nonbreaking,xhtmlxtras,template,inlinepopups",
theme_advanced_buttons1 : "save,newdocument,|,bold,italic,underline,strikethrough,|,justifyleft,justifycenter,justifyright,justifyfull,|,styleselect,formatselect,fontselect,fontsizeselect",
theme_advanced_buttons2 : "cut,copy,paste,pastetext,pasteword,|,search,replace,|,bullist,numlist,|,outdent,indent,blockquote,|,undo,redo,|,link,unlink,anchor,image,cleanup,help,|,insertdate,inserttime,preview,|,forecolor,backcolor",
theme_advanced_buttons3 : "tablecontrols,|,hr,removeformat,visualaid,|,sub,sup,|,charmap,emotions,iespell,media,advhr,|,print,|,ltr,rtl,|,fullscreen",
theme_advanced_buttons4 : "insertlayer,moveforward,movebackward,absolute,|,styleprops,|,cite,abbr,acronym,del,ins,attribs,|,visualchars,nonbreaking,template,pagebreak",
theme_advanced_toolbar_location : "top",
theme_advanced_toolbar_align : "left",
theme_advanced_statusbar_location : "bottom",
theme_advanced_resizing : true,
template_external_list_url : "lists/template_list.js",
external_link_list_url : "lists/link_list.js",
external_image_list_url : "lists/image_list.js",
media_external_list_url : "lists/media_list.js",
template_replace_values : {
username : "Some User",
staffid : "991234"
}
});
</script>
<?php
require 'inc/koneksi.php';
if (isset($_POST['simpan'])){
$title=ucwords($_POST['title']);
$content=$_POST['content'];
$author=$_POST['author'];
$tanggal=date("Y-m-s");
if(empty($title) || empty($content) || empty($tanggal)){
?><center><font color="red">Data yang Anda inputkan belum lengkap. SILAHKAN ULANGI :)<a href="home.php?page=editorteks"></a></font><?php
}else{
$query=mysql_query("insert into t_artikel values('','$title','$content', '$author', NOW(),'')");
if($query){
echo "<script>window.location.href = 'home.php?page=editorteks&message=success';</script>";
}else{
echo "Gagal input data";
echo mysql_error();
}
}
}
if (isset($_POST['update'])){
$idArtikel = $_POST['idArtikel'];
$author = $_POST['author'];
$title = $_POST['title'];
$content = $_POST['content'];
$edsql= mysql_query("UPDATE `sipadu`.`t_artikel` SET idArtikel='$idArtikel', author='$author', title='$title', content='$content' WHERE `t_artikel`.`idArtikel` = '$idArtikel';") or die(mysql_error());
if ($edsql){
echo "<script>window.location.href = 'home.php?page=artikel';</script>";
}else{
}
}
if ($_GET['aksi']=="del"){
$idArtikel = $_GET['idArtikel'];
$delsql = mysql_query("DELETE FROM t_artikel WHERE idArtikel='$idArtikel'");
if ($delsql){
echo "<script>window.location.href = 'home.php?page=artikel';</script>";
}else{
}
}
?>
<title>Form Editor</title>
<h4><u><center>MENAMBAH INFORMASI DATA ARTIKEL</u></center></h4>
<body onLoad="document.login.userid.focus();">
<?php
if (!empty($_GET['message']) && $_GET['message'] == 'success') {
echo '<center>Berhasil Menambah Data Baru </center>';
}
?>
<form action="" id="form" method="post">
<input type="hidden" name="idArtikel" value="<?php
if (isset($_GET['aksi'])=="edit"){
$idArtikel = $_GET['idArtikel'];
$qedit = mysql_query("SELECT * from t_artikel WHERE idArtikel='$idArtikel'");
$redit = mysql_fetch_assoc($qedit); echo $idArtikel;
$idArtikel = $redit['idArtikel'];
$author = $redit['author'];
$title = $redit['title'];
$content = $redit['content'];}?>">
<table width="637" border="0" align="center" cellpadding="0" cellspacing="0">
<tr>
<td height="30"><strong>Author</strong></td>
</tr>
<tr>
<td width="67%" height="30"><font face="Times New Roman" size="2">
<input type="text" name="author"/ size="30" value="<?php echo $author;?>" ></font></td>
</tr>
<tr>
<td height="30"><strong>Judul Artikel</strong></td>
</tr>
<tr>
<td width="67%" height="30"><font face="Times New Roman" size="2">
<input type="text" name="title"/ size="40" value="<?php echo $title;?>"></font></td>
</tr>
<tr>
<td width="67%" height="30"><font face="Verdana, Arial, Helvetica, sans-serif" size="1"><em>(Gunakan editor untuk mengedit tulisan)</em></font></td>
</tr>
<tr>
<td width="67%" height="182"><font face="Times New Roman" size="2">
<textarea name="content" cols="60" rows="25" id='elm2'><?php echo $content;?></textarea></font></td>
</tr>
<tr>
<td> </td>
</tr>
<tr>
<td width="67%">
<?php if (isset($_GET['aksi'])=='edit'){
echo '<input class="button" type="submit" value="Simpan" name="update" />';
}else{
echo '<input class="button" type="submit" value="Tambahkan" name="simpan" />';}
?>
<input type="button" name="batal" value="Batal" onClick="location.replace('home.php?page=editorteks');" /></td>
</tr>
</table>
</form>
</body>
</html>
<file_sep>/modul/eartikel.php
<title>ARTIKEL</title>
<div >
<center>DATA POSTINGAN ARTIKEL<center>
</div> <!-- End .content-box-header -->
<a href='?page=editorteks'><img id='icon' src='images/add.png' alt='tambah' title='tambah'></a>
<p><table id="tablemn" >
<thead >
<th width="5%" >No.</th>
<th width="20%">Author</th>
<th width="35%">Judul Artikel</th>
<th width="24%">Tanggal Post</th>
<th width="8%">View</th>
<th width="8%">Aksi</th>
</thead>
<tbody>
<?php
require 'inc/koneksi.php';
require 'inc/buatid.php';
$batas=10;
$halaman = $_GET['halaman'];
if(empty($halaman))
{
$posisi=0;
$halaman=1;
}
else
{
$posisi = ($halaman-1) * $batas;
}
$no=$posisi+1;
$sql= mysql_query("SELECT * FROM t_artikel ORDER BY idArtikel ASC limit $posisi,$batas") or die (mysql_query());
if (mysql_num_rows($sql) == 0){
echo '<tr><td colspan=4><center><br/><h4>Tidak ada data</h4></center></td></tr>';
}else{
while ($row = mysql_fetch_array($sql)) {
echo '<tr>
<td>'.$no.'</td>
<td>'.$row['author'].'</td>
<td>'.$row['title'].'</td>
<td>'.$row['datePub'].'</td>
<td>'.$row['views'].'</td>
<td><a href="home.php?page=editorteks&aksi=edit&idArtikel='.$row['idArtikel'].'" title="Edit"><img src="images/icons/pencil.png" alt="Edit" /></a>
<a href="home.php?page=editorteks&aksi=del&idArtikel='.$row['idArtikel'].'" onclick="return confirm(\'Apakah kamu yakin ingin menghapus judul artkel '.$row['title'].' ?\');" title="Delete"><img src="images/icons/cross.png" alt="Delete" /></a></td></tr>';
$no++;
}
}
?>
</tbody>
</table><br/>
<?php
$file="?page=eartikel";
$tampil2="SELECT * FROM t_artikel ORDER BY idArtikel ASC";
$hasil2=mysql_query($tampil2);
$jmldata=mysql_num_rows($hasil2);
$jmlhalaman=ceil($jmldata/$batas);
//link ke halaman sebelumnya (previous)
if($halaman > 1)
{
$previous=$halaman-1;
echo "<A HREF=$file&&halaman=1><< First</A> |
<A HREF=$file&&halaman=$previous>< Previous</A> | ";
}
else
{
echo "<< First | < Previous | ";
}
$angka=($halaman > 3 ? " ... " : " ");
for($i=$halaman-2;$i<$halaman;$i++)
{
if ($i < 1)
continue;
$angka .= "<a href=$file&&halaman=$i>$i</A> ";
}
$angka .= " <b>$halaman</b> ";
for($i=$halaman+1;$i<($halaman+3);$i++)
{
if ($i > $jmlhalaman)
break;
$angka .= "<a href=$file&&halaman=$i>$i</A> ";
}
$angka .= ($halaman+2<$jmlhalaman ? " ...
<a href=$file&&halaman=$jmlhalaman>$jmlhalaman</A> " : " ");
echo "$angka";
//link kehalaman berikutnya (Next)
if($halaman < $jmlhalaman)
{
$next=$halaman+1;
echo " | <A HREF=$file&&halaman=$next>Next ></A> |
<A HREF=$file&&halaman=$jmlhalaman>Last >></A> ";
}
else
{
echo " | Next > | Last >>";
}
echo "<p><font color=red>Total Postingan Artikel : <b>$jmldata</b> Artikel</font></p>";
echo "$nama";
?>
<file_sep>/modul/edProb.php
<div>
<center>EDIT NILAI PROBABILITAS MB & MD<center><p>
</div>
<form action="" id="form" method="post">
<!--<input type="hidden" name="id" value="<?php
//if (isset($_GET['aksi'])=="edit"){
//$id = $_GET['ids'];
//$qedit = mysql_query("SELECT * from t_pasien WHERE id='$ids'");
//$redit = mysql_fetch_assoc($qedit); echo $id;
//$nama = $redit['nama'];
//$username = $redit['username'];}?>">-->
<table>
<tr valign="top">
<td width="100">Gejala</td>
<td width="5">: </td>
<td>
<select name="idg" >
<?php
$query = "SELECT * FROM t_gejala ORDER BY idg ASC";
$result = mysql_query($query);
while ($rows = mysql_fetch_array($result)) {
echo '<option value="'.$rows[idg].'">['.$rows[idg].'] '.$rows[gejala].'</option>';
}
?>
</select>
</td>
</tr>
<tr valign="top">
<td width="100">Penyakit</td>
<td width="5">: </td>
<td width="10">
<table width="100%">
<?php
$query = "SELECT * FROM t_penyakit ORDER BY idp ASC";
$result = mysql_query($query);
$no=1;
while ($rows = mysql_fetch_array($result)) {
echo "<tr><td width='80%'><input type='checkbox' value='".$rows['idp']."' name='mk".$no."' checked/>[".$rows['idp']."] ".$rows['nama']."</td><td><input type=text name='mb".$no."' size=1 placeholder='MB'/><input type=text name='md".$no."' size=1 placeholder='MD'><td></tr>";
$no++;
}
?>
<input type="hidden" name="jumMK" value="<?php echo $no;?>"/>
</table>
</td>
</tr>
<tr valign="top">
<td width="100">Jika YA Maka</td>
<td width="5">: </td>
<td width="10">
<select name="g_ya" size=5>
<?php
$query = "SELECT * FROM t_gejala ORDER BY idg ASC";
$result = mysql_query($query);
while ($rows = mysql_fetch_array($result)) {
echo '<option value="'.$rows[idg].'">['.$rows[idg].'] '.$rows[gejala].'</option>';
}
?>
</select>
</td>
</tr>
<tr valign="top">
<td width="100">Jika Tidak Maka</td>
<td width="5">: </td>
<td width="10">
<select name="g_tidak" size=5>
<?php
$query = "SELECT * FROM t_gejala ORDER BY idg ASC";
$result = mysql_query($query);
while ($rows = mysql_fetch_array($result)) {
echo '<option value="'.$rows[idg].'">['.$rows[idg].'] '.$rows[gejala].'</option>';
}
?>
</select>
</td>
<tr valign="top">
<td width="75"></td>
<td width="5"></td>
<td><input class="button" type="submit" value="Simpan" name="update" /></td>
</tr>
</table>
</form><file_sep>/modul/artikel.php
<?php
require "inc/koneksi.php";
require "script.php";
if(empty($no))
$posisi=0;
$no=$posisi+1;
$que = mysql_query("SELECT * FROM t_artikel ORDER BY idArtikel ASC ");
?>
<h3 align="center"><br>     .: MANAJEMEN DATA ARTIKEL :.</h3><br>
<div class="demo_jui">
<td><a href='?page=editorteks'><img id='icon' src='images/plus.jpg' alt='tambah' title='tambah'></a></td>
<br><br>
<table cellpadding="0" cellspacing="0" border="0" class="display" id="example">
<thead>
<tr>
<td align="center">No.</td>
<td align="center">Author</td>
<td align="center"><NAME></td>
<td align="center">Tanggal Post</td>
<td align="center">View</td>
<td align="center">Detail</td>
</tr>
</thead>
<?php
while ($konten = mysql_fetch_array($que)){
?>
<tr class="gradeC">
<td align="center"><?php echo $no; ?></td>
<td align="justify"><?php echo $konten['author']; ?></td>
<td align="justify"><?php echo $konten['title']; ?></td>
<td align="justify"><?php echo $konten['datePub']; ?></td>
<td align="justify"><?php echo $konten['views']; ?></td>
<td align="center"><a href="home.php?page=editorteks&aksi=edit&idArtikel=<?php echo $konten['idArtikel'];?>"> <img src="images/icons/pencil.png" alt="" title="Detail" border="0" /></a>
<a href="home.php?page=editorteks&aksi=del&idArtikel=<?php echo $konten['idArtikel'];?>" onclick="return confirm('Apakah kamu yakin ingin menghapus');"> <img src="images/icons/cross.png" alt="" title="hapus" border="0" /></a>
<?php
$no++;
?>
</td>
</tr>
<?php
}
?>
</table>
</div>
<?php
require 'inc/koneksi.php';
if ($_GET['aksi']=="del"){
$idArtikel = $_GET['idArtikel'];
$delsql = mysql_query("DELETE FROM t_artikel WHERE idArtikel='$idArtikel'");
if ($delsql){
echo "<script>window.location.href = 'home.php?page=artikel';</script>";
}else{
}
}
?>
<file_sep>/modul/home_admin.php
<table height="20px" width="300px" bgcolor="" align="center">
<td colspan="2"><div align="center"><font size="3px" color="white">Halaman Utama</div></td></fieldset>
</table>
<p> Menu administrator ini adalah menu khusus administrator.
<p>Untuk memasukan setup-setup yang ada dalam program ini, menu-menu bersifat rahasia dan hanya diperuntukan bagi administrator.
<p> Administrator adalah seorang pakar kanker kandungan yang ditunjuk untuk mengelola sistem ini, maka kerahasiaan dari semua data yang ada dalam sistem ini adalah tanggung jawab administrator.
</p>
<p> Semua data dalam program ini harus berdasarkan fakta, sehingga data yang dihasilkan dapat valid dan benar.</p>
<file_sep>/index.php
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>Sistem Pakar</title>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />
<link type="text/css" href="css/ui-lightness/jquery-ui-1.8.custom.css" rel="stylesheet" />
<link href="css/style.css" rel="stylesheet" type="text/css" />
<script type="text/javascript" src="js/jquery-1.4.2.min.js"></script>
<script type="text/javascript" src="js/jquery-ui-1.8.custom.min.js"></script>
</head>
<body>
<div id="topPan"> <a href=""><img src="images/lolo.png" title="" alt="" border="0" class="logo"/></a>
<div id="topimagePan"><img src="images/g2.jpg" alt="" /></div>
<ul>
<li> <a href="index.php?page=homeart">home</a></li>
<li> <a href="index.php?page=help">Help</a></li>
<li><a href="index.php?page=profil">about us</a></li>
<li><a href="index.php?page=register">Register</a></li>
<li><a href="#">Glosarium</a></li>
</ul>
</div>
<div id="bodyPan">
<div id="leftPan">
<?php
$page=$_GET['page'];
if(isset($_GET['page'])){
include 'modul/'.$page.'.php';
}else{
include 'modul/homeart.php';
}
?>
</div>
<BR>
<div id="rightPan">
<form action="log.php?op=in" method="post" class="form1">
<h2>Member Login</h2>
<div>
<?php
//kode php ini kita gunakan untuk menampilkan pesan eror
if (!empty($_GET['error'])) {
if ($_GET['error'] == 1) {
echo 'Username dan Password belum diisi!';
} else if ($_GET['error'] == 2) {
echo ' Username belum diisi!';
} else if ($_GET['error'] == 3) {
echo ' Password belum diisi!';
} else if ($_GET['error'] == 4) {
echo ' Akun Anda belum terdaftar!';
} else {
echo '';
}
}
?>
</div>
<label>Name:</label>
<input type="text" name="userid" placeholder="username" <?php echo $userid;?>/>
<label>Passward:</label>
<input type="password" name="psw" placeholder="<PASSWORD>" <?php echo $psw;?>/>
<label class="label1"></label>
<input name="login" type="submit" class="botton" id="GO" value="Sign" />
</form>
<h3>Daftar Penyakit</h3>
<ul>
<li><a href='index.php?page=artikel1'>Kanker Servik</a></li>
<li><a href='index.php?page=artikel2'>Kanker Endometrium</a></li>
<li><a href='index.php?page=artikel3'>Korpus Uteri</a></li>
<li><a href="index.php?page=artikel4">Kanker Ovarium</a></li>
<li><a href="index.php?page=artikel5">Kanker Vulva</a></li>
<li><a href="index.php?page=artikel6">Sarkoma Uteri</a></li>
<li><a href="index.php?page=artikel7">Kanker Vagina</a></li>
<li><a href="index.php?page=artikel8">Kanker Tuba Fallopii</a></li>
</ul>
<!--
<div id="rightform2Pan">
<form action="" method="get" class="form2">
<h2>Pencarian</h2>
<label></label>
<input name="" type="text" />
<input name="search" type="submit" class="search" id="search" value="CARI"/>
</form>
</div>-->
</div>
</div>
<div id="bodyBottomPan">
</div>
<div id="footerPan">
<ul>
<li>SISTEM PAKAR -</li>
<li> - <NAME> 08</li>
<li>      TIF UIN Sunan Kalijaga Yogyakarta
</li>
</ul>
</div>
</body>
</html>
<file_sep>/modul/e_probb.php
<?php
require 'inc/koneksi.php';
$prob = $_GET ['idg'];
$query = mysql_query ("select *, t_gejala.*, t_pertanyaan.* from t_aturan inner join t_gejala on t_aturan.idg=t_gejala.idg
inner join t_pertanyaan on t_gejala.idg=t_pertanyaan.idg where t_gejala.idg='$prob'");
if ($h = mysql_fetch_array($query))
?>
<div>
<center>EDIT NILAI PROBABILITAS MB & MD<center><p>
</div>
<form action="" id="form" method="post">
<!--<input type="hidden" name="id" value="<?php
//if (isset($_GET['aksi'])=="edit"){
//$id = $_GET['ids'];
//$qedit = mysql_query("SELECT * from t_pasien WHERE id='$ids'");
//$redit = mysql_fetch_assoc($qedit); echo $id;
//$nama = $redit['nama'];
//$username = $redit['username'];}?>">-->
<table>
<tr valign="top">
<td width="100">Gejala</td>
<td width="5">: </td>
<td>
<?php
echo "<select name =gejala>";
$query = "select * FROM t_gejala order by idg asc";
$id = mysql_query ($query);
while ($row=mysql_fetch_array($id)){
if ($h[idg]==$row[idg]){
echo"<option value=$row[idg] selected>[$row[idg]] $row[gejala]</option>";
}else{
echo"<option value=$row[idg]>$row[gejala]</option>";
}
}
echo"</select>";
?>
</select>
</td>
</tr>
<tr valign="top">
<td width="100">Penyakit</td>
<td width="5">: </td>
<td width="10">
<table width="100%">
<?php
if($_GET['action']=='edit'){
if($_GET['action']=='edit'){
$query = mysql_query("Select t_aturan.idp idin, t_aturan.mb mb, t_aturan.md md from t_aturan left join
t_gejala on t_aturan.idg=t_gejala.idg left join
t_penyakit on t_aturan.idp=t_penyakit.idp where t_aturan.idg= '$prob'") or die(mysql_error());
}
}else{
$query = "SELECT * from t_penyakit order by idp ASC";
}
$result = mysql_query($query);
$no=1;
while ($rows = mysql_fetch_array($result)) {
echo "<tr>
<td width='80%'><input type='checkbox' value='".$rows['idp']."' name='mk".$no."' checked/>[".$rows['idp']."] ".$rows['nama']."</td>
<td><input type=text name='mb".$no."' size=1 placeholder='MB' value = '".$rows['mb']."'/>
<input type=text name='md".$no."' size=1 placeholder='MD' value = '".$rows['md']."'><td></tr>";
$no++;
}
?>
<input type="hidden" name="jumMK" value="<?php echo $no;?>"/>
</table>
</td>
</tr>
<tr>
<td>Jika YA Maka</td>
<td>: </td>
<td>
<?php
echo "<select name =ya size=5>";
$query = "select * from t_gejala order by idg asc";
$id = mysql_query ($query);
while ($row=mysql_fetch_array($id)){
if ($h[g_ya]==$row[idg])
echo"<option value=$row[idg] selected>[$row[idg]] $row[gejala]</option>";
else
echo"<option value=$row[idg]>[$row[idg]] $row[gejala]</option>";
}
echo"</select>";
?>
</td></select>
</tr>
<tr valign="top">
<td width="100"><NAME></td>
<td width="5">: </td>
<td width="10">
<?php
echo "<select name =ya size=5>";
$query = "select * from t_gejala order by idg asc";
$id = mysql_query ($query);
while ($row=mysql_fetch_array($id)){
if ($h[g_tidak]==$row[idg])
echo"<option value=$row[idg] selected>[$row[idg]] $row[gejala]</option>";
else
echo"<option value=$row[idg]>[$row[idg]] $row[gejala]</option>";
}
echo"</select>";
?>
</td></tr></select>
<tr valign="top">
<td width="75"></td>
<td width="5"></td>
<td><?php if (isset($_GET['aksi'])=='edit'){
echo '<input class="button" type="submit" value="Simpan" name="update" />';
}else{
echo '<input class="button" type="submit" value="Simpan" name="simpan" />';}
?></td>
</tr>
</table>
</form><file_sep>/modul/artikel7.php
<?php
// lakukan koneksi ke mysql
include "inc/koneksi.php";
?>
<?php
// query untuk membaca data artikel yg diurutkan berdasarkan id artikel
$query = "SELECT * FROM t_artikel where title='kanker vagina'";
$hasil = mysql_query($query);
while ($data = mysql_fetch_array($hasil))
{
// tampilkan title artikel
echo "<h3>".$data['title']."</h3>";
// tampilkan tanggal pub, author dan berapa kali dibaca
echo "<p><small>Tgl Post: ".$data['datePub'].". Posted by: ".$data['author'].". Dibaca: ".$data['views']." kali</small></p><hr>";
// lakukan exploding terhadap isi artikel berdasarkan string <!--more-->
$pecah = explode("<!--more-->", $data['content']);
// excerpt adalah elemen pertama (index ke-0) dari array hasil exploding
$excerpt = $pecah[0];
// tampilkan excerpt
echo "<p>".$excerpt."</p>";
// link untuk baca selengkapnya. Gunakan id artikel sebagai parameternya
echo "<p><a href='index.php?page=vartikel&id=".$data['idArtikel']."'>Baca selengkapnya...</a></p>";
echo "<hr>";
}
?><file_sep>/modul/hdiagnosa.php
<?php
require "inc/koneksi.php";
require "script.php";
if(empty($no))
$posisi=0;
$no=$posisi+1;
$que = mysql_query("SELECT *, u.* FROM t_penyakit p INNER JOIN t_diagnosa d ON p.idp= d.idp inner join t_user u on d.ids=u.idu ;");
?>
<h3 align="center"><br>     .: REKAMAN HASIL DIAGNOSA AWAL KANKER KANDUNGAN :.</h3><br>
<div class="demo_jui">
<table cellpadding="0" cellspacing="0" border="0" class="display" id="example">
<thead>
<tr>
<td align="center">No</td>
<td align="center">Nama</td>
<td align="center">Umur</td>
<td align="center">Hasil Diagnosa Penyakit</td>
<td align="center">Nilai CF</td>
<td align="center">Tgl Diagnosa</td>
</tr>
</thead>
<?php
while ($konten = mysql_fetch_array($que)){
?>
<tr class="gradeC">
<td align="center"><?php echo $no; ?></td>
<td align="justify"><?php echo $konten['nama_lengkap']; ?></td>
<td align="justify"><?php echo $konten['umur']; ?></td>
<td align="justify"><?php echo $konten['nama']; ?></td>
<td align="justify"><?php echo $konten['cf']; ?></td>
<td align="justify"><?php echo $konten['tgl_diagnosa']; ?></td>
<?php
$no++;
?>
</td>
</tr>
<?php
}
?>
</table>
</div>
<file_sep>/modul/rule_pertanyaan.php
<?php
require 'inc/koneksi.php';
if (isset($_POST['simpan'])){
$idg = $_POST['idg'];
$jumMK =$_POST['jumMK'];
$ya =$_POST['g_ya'];
$tidak =$_POST['g_tidak'];
$qryi=mysql_query("Insert into t_pertanyaan (id,idg,g_ya,g_tidak) values('','$idg','$ya','$tidak')");
for($i=1; $i <$jumMK; $i++)
{
$mk=$_POST['mk'.$i];
$mb=$_POST['mb'.$i];
$md=$_POST['md'.$i];
if (!empty($mk) && !empty($mb) && !empty($md))
{
$insql= mysql_query("insert into t_aturan(ida,idg,idp,mb,md) values('','$idg','$mk', '$mb', '$md')");
if ($insql && $qryi){
echo "<script>window.location.href = 'home.php?page=rule_pertanyaan&message=success';</script>";
}else{
}
}}}
?>
<div>
<center>INPUT NILAI PROBABILITAS MB & MD<center><p>
<?php
if (!empty($_GET['message']) && $_GET['message'] == 'success') {
echo '<center>Berhasil Menambah Rule</center>';
}
?>
</div>
<form action="" id="form" method="post">
<!--<input type="hidden" name="id" value="<?php
//if (isset($_GET['aksi'])=="edit"){
//$id = $_GET['ids'];
//$qedit = mysql_query("SELECT * from t_pasien WHERE id='$ids'");
//$redit = mysql_fetch_assoc($qedit); echo $id;
//$nama = $redit['nama'];
//$username = $redit['username'];}?>">-->
<table>
<tr valign="top">
<td width="100">Gejala</td>
<td width="5">: </td>
<td>
<select name="idg" >
<?php
$query = "SELECT * FROM t_gejala ORDER BY idg ASC";
$result = mysql_query($query);
while ($rows = mysql_fetch_array($result)) {
echo '<option value="'.$rows[idg].'">['.$rows[idg].'] '.$rows[gejala].'</option>';
}
?>
</select>
</td>
</tr>
<tr valign="top">
<td width="100">Penyakit</td>
<td width="5">: </td>
<td width="10">
<table width="100%">
<?php
$query = "SELECT * FROM t_penyakit ORDER BY idp ASC";
$result = mysql_query($query);
$no=1;
while ($rows = mysql_fetch_array($result)) {
echo "<tr><td width='80%'><input type='checkbox' value='".$rows['idp']."' name='mk".$no."' checked/>[".$rows['idp']."] ".$rows['nama']."</td><td><input type=text name='mb".$no."' size=1 placeholder='MB'/><input type=text name='md".$no."' size=1 placeholder='MD'><td></tr>";
$no++;
}
?>
<input type="hidden" name="jumMK" value="<?php echo $no;?>"/>
</table>
</td>
</tr>
<tr valign="top">
<td width="100">Jika YA Maka</td>
<td width="5">: </td>
<td width="10">
<select name="g_ya" size=5>
<?php
$query = "SELECT * FROM t_gejala ORDER BY idg ASC";
$result = mysql_query($query);
while ($rows = mysql_fetch_array($result)) {
echo '<option value="'.$rows[idg].'">['.$rows[idg].'] '.$rows[gejala].'</option>';
}
?>
</select>
</td>
</tr>
<tr valign="top">
<td width="100"><NAME></td>
<td width="5">: </td>
<td width="10">
<select name="g_tidak" size=5>
<?php
$query = "SELECT * FROM t_gejala ORDER BY idg ASC";
$result = mysql_query($query);
while ($rows = mysql_fetch_array($result)) {
echo '<option value="'.$rows[idg].'">['.$rows[idg].'] '.$rows[gejala].'</option>';
}
?>
</select>
</td>
<tr valign="top">
<td width="75"></td>
<td width="5"></td>
<td><?php if (isset($_GET['aksi'])=='edit'){
echo '<input class="button" type="submit" value="Simpan" name="update" />';
}else{
echo '<input class="button" type="submit" value="Simpan" name="simpan" />';}
?></td>
</tr>
</table>
</form><file_sep>/modul/admin/vgejala.php
<?php
require "inc/koneksi.php";
require "script.php";
if(empty($no))
$posisi=0;
$no=$posisi+1;
$que = mysql_query("SELECT * FROM t_gejala ORDER BY idg ASC ");
?>
<h3 align="center"><br>     .: DATA GEJALA PENYAKIT KANKER KANDUNGAN :.</h3><br>
<div class="demo_jui">
<br><br>
<table cellpadding="0" cellspacing="0" border="0" class="display" id="example">
<thead>
<tr>
<td align="center">No</td>
<td align="center">Kode</td>
<td align="center">Nama Gejala</td>
<td align="center">Pertanyaan</td>
</tr>
</thead>
<?php
while ($konten = mysql_fetch_array($que)){
?>
<tr class="gradeC">
<td align="center"><?php echo $no; ?></td>
<td align="justify"><?php echo $konten['idg']; ?></td>
<td align="justify"><?php echo $konten['gejala']; ?></td>
<td align="justify"><?php echo $konten['pertanyaan']; ?></td>
<?php
$no++;
?>
</td>
</tr>
<?php
}
?>
</table>
</div>
<file_sep>/modul/reg.php
<?php
require 'inc/koneksi.php';
if (isset($_POST['simpan'])){
$nama = $_POST['xnama'];
$username = $_POST['xusername'];
$umur = $_POST['xumur'];
$email = $_POST['xemail'];
$telepon = $_POST['xtelepon'];
$cek=mysql_query("select * from t_user where userid='$username'");
$bcek=mysql_num_rows($cek);
if ($bcek == 0)
{
$date = date("Y-m-d");
$inusrsql= mysql_query("insert into t_user values('','$nama','$username','$username','$umur','$email','$telepon','10','$date')") or die(mysql_error());
echo "<table width='170%' border='0'><td align='left'><br>
<tr><center>Selamat anda telah menjadi user kami, anda dapat menggunakan hak akses anda untuk melakukan login dengan data sebagai berikut: </td></tr>
<br><tr><center>Username : $username</td></tr>
<tr><center>Password : $<PASSWORD></td></tr>
</table>";
}else
{
echo " <center> Maaf username sudah ada, silahkan coba dengan username berbeda </center>";
}
}
?><file_sep>/modul/delUser.php
<?php
require "inc/koneksi.php";
$userid = $_GET['hapus'];
$query = mysql_query("DELETE FROM t_user WHERE userid='$userid'");
if($query){
echo "<script>window.location.href = 'home.php?page=user';</script>";
}else{
echo "<script language=Javascript>
alert('Silahkan login terlebih dahulu');
document.location='index.php';
</script>";
}
?><file_sep>/modul/diagpakar.php
<?php
session_start();
if($_GET['sess'])
{
unset($_SESSION['list_gejala_setelah_umum']);
unset($_SESSION['listgejala']);
unset($_SESSION['hasil_gejala_umum']);
unset($_SESSION['list_gejala_umum']);
echo "<script>window.location.href = 'home.php?page=diagpakar';</script>";
}
require 'inc/koneksi.php';
require 'modul/core.php';
if (isset($_POST['cur'])){
if (isset($_POST['ya'])){
$next = $_POST['yaval'];
$lgu = $_SESSION['list_gejala_umum'];
$afternext = (isset($lgu[0])) ? $lgu[0] : "";
array_push($_SESSION['listgejala'], $_POST['cur']);
array_push($_SESSION['hasil_gejala_umum'], $_POST['cur']);
if ($next=="" && !isset($_SESSION['list_gejala_setelah_umum'])){
// echo "<script>window.location.href = 'home.php?page=hdiagpakar';</script>";
$hasil = CF($_SESSION['hasil_gejala_umum']);
$hasilmax = doublemax($hasil['h']);
$idpeny = $hasil['p'][$hasilmax['i']];
$gsu = gejala_setelah_umum($idpeny);
// echo "<pre>gsu"; print_r($gsu); echo "</pre>";
$next = $gsu[0];
$afternext = $gsu[1];
array_splice($_SESSION['list_gejala_umum'], 0, 2); //remove first and second
}
if ($next=="" && !isset($_SESSION['list_gejala_setelah_umum']) && count($_SESSION['hasil_gejala_umum']) == 0){
echo "<script>window.location.href = 'home.php?page=hdiagpakar';</script>";
}
}else if(isset($_POST['tidak'])){
$next = $_POST['tidakval'];
$lgu = $_SESSION['list_gejala_umum'];
$afternext = (isset($lgu[0])) ? $lgu[0] : "";
// array_splice($_SESSION['list_gejala_umum'], 0, 1); //remove first
if ($next=="" && !isset($_SESSION['list_gejala_setelah_umum']) && count($_SESSION['hasil_gejala_umum']) > 0){
// echo "<script>window.location.href = 'home.php?page=hdiagpakar';</script>";
$hasil = CF($_SESSION['hasil_gejala_umum']);
$hasilmax = doublemax($hasil['h']);
$idpeny = $hasil['p'][$hasilmax['i']];
$gsu = gejala_setelah_umum($idpeny);
$next = $gsu[0];
$afternext = $gsu[1];
array_splice($_SESSION['list_gejala_umum'], 0, 2); //remove first and second
}
if ($next=="" && !isset($_SESSION['list_gejala_setelah_umum']) && count($_SESSION['hasil_gejala_umum']) == 0){
echo "<script>window.location.href = 'home.php?page=hdiagpakar';</script>";
}
}
array_splice($_SESSION['list_gejala_umum'], 0, 1); //remove first
if ($next=="" && isset($_SESSION['list_gejala_setelah_umum'])){
echo "<script>window.location.href = 'home.php?page=hdiagpakar';</script>";
}
}else{
$gu = gejala_umum();
$lgu = $_SESSION['list_gejala_umum'];
// $mulai = "G034";
// $mulai = $gu[0];
$mulai = $lgu[0];
$next = $mulai;
$afternext = $lgu[1];
array_splice($_SESSION['list_gejala_umum'], 0, 2); //remove first and second
$_SESSION['listgejala'] = Array();
$_SESSION['hasil_gejala_umum'] = Array();
}
// echo "<pre>list"; print_r($_SESSION['list_gejala_umum']); echo "</pre>";
// echo "<pre>hasil"; print_r($_SESSION['hasil_gejala_umum']); echo "</pre>";
// $qry = mysql_query("SELECT t.idg idg,t.g_ya,t.g_tidak,g.pertanyaan FROM `t_pertanyaan` t
// LEFT JOIN t_gejala g ON t.idg = g.idg WHERE t.idg='$next'") or die(mysql_error());
$qry = mysql_query("SELECT * FROM t_gejala where idg = '$next'") or die(mysql_error());
// echo ("SELECT t.idg idg,t.g_ya,t.g_tidak,g.pertanyaan FROM `t_pertanyaan` t LEFT JOIN t_gejala g ON t.idg = g.idg WHERE t.idg='$next'");
$result = mysql_fetch_array($qry);
?>
<br><br><br>
<div class="content-box-header"><center><u><h4>DIAGNOSA AWAL KANKER KANDUNGAN</u></h4><center><p></div>
<form action="" id="form" method="post">
<input type="hidden" name="cur" value="<?php echo $result['idg'];?>"/>
<!-- <input type="hidden" name="yaval" value="<?php // echo $result['g_ya'];?>"/> -->
<!-- <input type="hidden" name="tidakval" value="<?php // echo $result['g_tidak'];?>"/> -->
<input type="hidden" name="yaval" value="<?php echo $afternext; ?>"/>
<input type="hidden" name="tidakval" value="<?php echo $afternext; ?>"/>
<table width="100%">
<tr valign="top">
<td>
<h4 align="center"><?php echo $result['idg'];?></h4><br/>
<h4 align="center"><?php echo $result['pertanyaan'];?></h4><br/>
</td>
</tr>
<tr valign="top">
<td align="center"><?php echo '<input color="red" class="button" type="submit" value="Ya" name="ya" />
<input class="button" type="submit" value="Tidak" name="tidak" />';
?></td>
</tr>
</table>
</form><file_sep>/modul/xhome.php
<p/><p/><table height="20px" width="300px" bgcolor="" align="center">
<td colspan="2"><div align="center"><font size="1px" color="red">
<!-- Page Head -->
<?php if(isset($_GET['sukses'])){?>
<div class="notification success png_bg">
<a href="#" class="close"><img src="images/icons/active.png" title="Close this notification" alt="close" /></a>
<div><?php echo $_GET['sukses'];?></div>
</div>
<?php } ?>
<?php if(isset($_GET['gagal'])){?>
<div class="notification error png_bg">
<a href="#" class="close"><img src="images/icons/cross_grey_small.png" title="Close this notification" alt="close" /></a>
<div><?php echo $_GET['gagal'];?></div>
</div>
<?php } ?>
<!--<p>
<h3>Selamat datang, <?php if ($_SESSION['level']==1){echo $_SESSION['userid'];}else{echo ucwords(strtolower($_SESSION['userid']));}?>.<h3/>
<?php
if($_SESSION['level']==10){
?>
<br/><br/>
<h3>Anda Login sebagai Member,
<br/>Segera <a href="home.php?page=gpass">ganti password</a> anda untuk menjaga keamanan data anda.</h3>
<?php
}else if($_SESSION['level']==50){
?>
<br/><br/>
<h3>Anda Login sebagai ADMIN,
<br/>Segera ganti password</a> anda untuk menjaga keamanan data anda.</h3>
<?php
}
?>
</h3><hr/>-->
</div></td></fieldset>
</table>
<file_sep>/modul/aturan.php
<?php
require 'inc/koneksi.php';
if (isset($_POST['simpan'])){
$idg = $_POST['idg'];
$jumMK =$_POST['jumMK'];
$ya =$_POST['g_ya'];
$tidak =$_POST['g_tidak'];
$qryi=mysql_query("Insert into t_pertanyaan (id,idg,g_ya,g_tidak) values('','$idg','$ya','$tidak')");
for($i=1; $i <$jumMK; $i++)
{
$mk=$_POST['mk'.$i];
$mb=$_POST['mb'.$i];
$md=$_POST['md'.$i];
if (!empty($mk) && !empty($mb) && !empty($md))
{
$insql= mysql_query("insert into t_aturan(ida,idg,idp,mb,md) values('','$idg','$mk', '$mb', '$md')");
if ($insql && $qryi){
echo "<script>window.location.href = 'home.php?page=aturan';</script>";
}else{
}
}}} else if(isset($_POST['update'])){
$idg = $_POST['idg'];
$jumMK =$_POST['jumMK'];
$ya =$_POST['g_ya'];
$tidak =$_POST['g_tidak'];
$check_id = mysql_query("SELECT idg FROM t_pertanyaan WHERE idg = '$idg'");
$isi = '';
while ($rows = mysql_fetch_array($check_id)) {
$isi = $rows['idg'];
}
if(empty($isi)){
mysql_query("INSERT INTO t_pertanyaan (id,idg,g_ya,g_tidak) VALUES ('','$idg','$ya','$tidak')");
}else{
mysql_query("UPDATE t_pertanyaan set g_ya ='$ya', g_tidak = '$tidak' where idg = '$idg'");
}
for($i=1; $i <$jumMK; $i++){
$mk=$_POST['mk'.$i];
$mb=$_POST['mb'.$i];
$md=$_POST['md'.$i];
if (!empty($mk) && !empty($mb) && !empty($md)){
$insql= mysql_query("UPDATE t_aturan set mb ='$mb', md ='$md' WHERE idg = '$idg' AND idp = '$mk'");
if ($insql){
echo "<script>window.location.href = 'home.php?page=view/vrule';</script>";
}else{
}
}
}
}
?>
<div>
<center>INPUT NILAI PROBABILITAS MB & MD<center><p>
</div>
<form action="" id="form" method="post">
<!--<input type="hidden" name="id" value="<?php
//if (isset($_GET['aksi'])=="edit"){
//$id = $_GET['ids'];
//$qedit = mysql_query("SELECT * from t_pasien WHERE id='$ids'");
//$redit = mysql_fetch_assoc($qedit); echo $id;
//$nama = $redit['nama'];
//$username = $redit['username'];}?>">-->
<table>
<tr valign="top">
<td width="100">Gejala</td>
<td>
<select name="idg" >
<?php
if($_GET['action']=='edit'){
$query = "SELECT * FROM t_gejala WHERE idg = '$_GET[idg]'";
}else{
$query = "SELECT * FROM t_gejala ORDER BY idg ASC";
}
$result = mysql_query($query);
while ($rows = mysql_fetch_array($result)) {
echo '<option value="'.$rows[idg].'">['.$rows[idg].'] '.$rows[gejala].'</option>';
}
?>
</select>
</td>
</tr>
<tr valign="top">
<td width="100"><NAME></td>
<td width="10">
<table width="100%">
<?php
if($_GET['action']=='edit'){
if($_GET['action']=='edit'){
$query = "SELECT k.idp, k.nama, r.mb, r.md FROM aturan r
LEFT JOIN t_gejala g ON r.idg = g.idg
LEFT JOIN t_penyakit k ON r.idp = k.idp
WHERE r.idg = '$_GET[idg]'";
}
}else{
$query = "SELECT * FROM t_penyakit ORDER BY idp ASC";
}
$result = mysql_query($query);
$no=1;
while ($rows = mysql_fetch_array($result)) {
echo "<tr>
<td width='80%'><input type='checkbox' value='".$rows['idp']."' name='mk".$no."' checked/>[".$rows['idp']."] ".$rows['nama']."</td>
<td><input type=text name='mb".$no."' size=1 placeholder='MB' value = '".$rows['mb']."'/>
<input type=text name='md".$no."' size=1 placeholder='MD' value = '".$rows['md']."'><td></tr>";
$no++;
}
?>
<input type="hidden" name="jumMK" value="<?php echo $no;?>"/>
</table>
</td>
</tr>
<tr valign="top">
<td width="100">Jika YA Maka</td>
<td width="10">
<select name="g_ya" size=5>
<?php
$query = "SELECT * FROM t_gejala ORDER BY idg ASC";
$result = mysql_query($query);
while ($rows = mysql_fetch_array($result)) {
echo '<option value="'.$rows[idg].'">['.$rows[idg].'] '.$rows[gejala].'</option>';
}
?>
</select>
</td>
</tr>
<tr valign="top">
<td width="100">Jika Tidak Maka</td>
<td width="10">
<select name="g_tidak" size=5>
<?php
$query = "SELECT * FROM t_gejala ORDER BY idg ASC";
$result = mysql_query($query);
while ($rows = mysql_fetch_array($result)) {
echo '<option value="'.$rows[idg].'">['.$rows[idg].'] '.$rows[gejala].'</option>';
}
?>
</select>
</td>
<tr valign="top">
<td width="75"></td>
<td><?php if (isset($_GET['action'])=='edit'){
echo '<input class="button" type="submit" value="Update" name="update" />';
}else{
echo '<input class="button" type="submit" value="Simpan" name="simpan" />';}
?></td>
</tr>
</table>
</form>
<file_sep>/modul/v_saran.php
<?php
require 'inc/koneksi.php';
$slc="SELECT *FROM t_saran ORDER BY waktu DESC";
$slcsql=mysql_query($slc);
?>
<table width="100%" border="0">
<center>DATA SARAN MEMBER<center>
<?php
while($row=mysql_fetch_array($slcsql)){?>
<tr>
<td width="5%">Nama </td>
<td width="70%"><?php echo $row['nama'];?></td>
<tr/>
<tr>
<td width="26%">Email </td>
<td><?php echo $row['email'];?></td>
<tr/>
<tr>
<td width="26%">Waktu</td>
<td><?php echo $row['waktu'];?></td>
<tr/>
<tr>
<td width="26%">Komentar <br></td>
<td><?php echo $row['saran'];?><br>
<br></td><br>
<?php
echo '
<td>
<a href="home.php?page=v_saran&aksi=del&idn='.$row['idn'].'" onclick="return confirm(\'Apakah kamu yakin ingin menghapus '.$row['v_saran'].' ?\');" title="Delete"><img src="images/icons/cross.png" alt="Delete" /></a></td>';?>
<?php
if ($_GET['aksi']=="del"){
$id = $_GET['idn'];
$delsql = mysql_query("DELETE FROM t_saran WHERE idn='$id'");
if ($delsql){
echo "<script>window.location.href = 'home.php?page=v_saran';</script>";
}else{
}
}
}
?>
</table>
<file_sep>/modul/diagnosa.php
<?php
session_start();
require 'inc/koneksi.php';
if (isset($_POST['cur'])){
if (isset($_POST['ya'])){
$next = $_POST['yaval'];
array_push($_SESSION['listgejala'], $_POST['cur']);
}else if(isset($_POST['tidak'])){
$next = $_POST['tidakval'];
}
if ($next==""){
echo "<script>window.location.href = 'home.php?page=hasildiag';</script>";
}
}else{
$mulai = "G034";
$next = $mulai;
$_SESSION['listgejala'] = Array();
}
$qry = mysql_query("SELECT t.idg idg,t.g_ya,t.g_tidak,g.pertanyaan FROM `t_pertanyaan` t
LEFT JOIN t_gejala g ON t.idg = g.idg WHERE t.idg='$next'") or die(mysql_error());
$result = mysql_fetch_array($qry);
?>
<br><br><br>
<div class="content-box-header"><center><u><h4>DIAGNOSA AWAL KANKER KANDUNGAN<center><u/><h4/><p></div>
<form action="" id="form" method="post">
<input type="hidden" name="cur" value="<?php echo $result['idg'];?>"/>
<input type="hidden" name="yaval" value="<?php echo $result['g_ya'];?>"/>
<input type="hidden" name="tidakval" value="<?php echo $result['g_tidak'];?>"/>
<table width="100%">
<tr valign="top">
<td>
<h4 align="center"><?php echo $result['pertanyaan'];?></h4><br/>
</td>
</tr>
<tr valign="top">
<td align="center"><?php echo '<input color="red" class="button" type="submit" value="Ya" name="ya" />
<input class="button" type="submit" value="Tidak" name="tidak" />';
?></td>
</tr>
</table>
</form><file_sep>/modul/vrule.php
<?php
require 'inc/koneksi.php';
require 'script.php';
if(empty($no))
$posisi=0;
$no=$posisi+1;
$que = mysql_query("SELECT t_aturan . * , t_penyakit.nama a, t_penyakit.idp c, t_gejala.gejala b, t_gejala.idg d
FROM t_aturan
LEFT JOIN t_gejala ON t_gejala.idg = t_aturan.idg
LEFT JOIN t_penyakit ON t_penyakit.idp = t_aturan.idp group by t_aturan.idg ");
?>
<h3 align="center"><br>     .: VIEW PROBABILITAS NILAI MB & MD :.</h3><br>
<div class="demo_jui">
<table cellpadding="0" cellspacing="0" border="0" class="display" id="example">
<thead>
<tr>
<td width="1%" align="center">No</td>
<td width="22%" align="center">Gejala</td>
<td width="22%" align="center">Jenis Penyakit</td>
<td width="10%" align="center">MB</td>
<td width="11%" align="center">MD</td>
<td width="1%" align="center">Detail</td>
</tr>
</thead>
<?php
while ($konten = mysql_fetch_array($que)){
?>
<tr class="gradeC">
<td align="center"><?php echo $no; ?></td>
<td align="justify"><?php echo $konten['d']." - ".$konten['b']; ?></td>
<?php
$id = $konten['idg'];
$kueri = mysql_query("SELECT t_penyakit.idp id, t_penyakit.nama py, t_aturan.idp,t_aturan.mb, t_aturan.md
FROM t_penyakit, t_aturan
WHERE t_penyakit.idp = t_aturan.idp
AND t_aturan.idg = '$id' order by t_penyakit.idp ASC");
?>
<td width="">
<?php
while($q = mysql_fetch_array($kueri))
{
echo $q['id']." - ".$q['py'];
echo "<br>";
}
?>
</td>
<?php
$id = $konten['idg'];
$kueri = mysql_query("SELECT t_penyakit.idp id, t_penyakit.nama py, t_aturan.idp,t_aturan.mb, t_aturan.md
FROM t_penyakit, t_aturan
WHERE t_penyakit.idp = t_aturan.idp
AND t_aturan.idg = '$id' order by t_penyakit.idp ASC");
?>
<td align="left">
<?php
while($q = mysql_fetch_array($kueri))
{
echo $q['id']." - ".$q['mb'];
echo "<br>";
}
?>
</td>
<?php
$id = $konten['idg'];
$kueri = mysql_query("SELECT t_penyakit.idp id, t_penyakit.nama py, t_aturan.idp,t_aturan.mb, t_aturan.md
FROM t_penyakit, t_aturan
WHERE t_penyakit.idp = t_aturan.idp
AND t_aturan.idg = '$id' order by t_penyakit.idp ASC");
?>
<td align="left">
<?php
while($q = mysql_fetch_array($kueri))
{
echo $q['id']." - ".$q['md'];
echo "<br>";
}
?>
</td>
<td align="center">
<a href="home.php?page=e_prob&aksi=edit&idg=<?php echo $konten['idg'];?>" title="edit"><img src="images/icons/pencil.png" alt="edit" /></a>
<a href="home.php?page=vrule&aksi=del&idg=<?php echo $konten['idg'];?>" onclick="return confirm('Apakah kamu yakin ingin menghapus?');"> <img src="images/icons/cross.png" alt="" title="hapus" border="0" /></a>
<?php
$no++;
?>
</tr>
<?php
}
?>
</table>
</div>
<?php
require 'inc/koneksi.php';
if ($_GET['aksi']=="del"){
$idg = $_GET['idg'];
$delsql = mysql_query("DELETE FROM Alias_A, Alias_B USING t_aturan AS Alias_A INNER JOIN t_pertanyaan AS Alias_B
on Alias_A.idg=Alias_B.idg where Alias_A.idg='$idg'");
if ($delsql){
echo "<script>window.location.href = 'home.php?page=vrule';</script>";
}else{
}
}
?><file_sep>/modul/gpass.php
<?php
session_start();
mysql_connect("localhost","root","") or die("Nggak bisa koneksi");
mysql_select_db("sipadu");//sesuaikan dengan nama database anda
$user = $_SESSION['userid'];
$selek=mysql_query("SELECT * FROM t_user where userid='$user'");
while ($data=mysql_fetch_array($selek)){
$nananina=$data['password'];
}
if (isset($_POST['ganti'])){
$passlama = ($_POST['passlama']);
$passbaru = ($_POST['passbaru']);
$confpassbaru = ($_POST['passbaruconf']);
if (($passlama==$nananina) && ($passbaru==$confpassbaru)){
$qgantipass = mysql_query("UPDATE t_user SET password='$<PASSWORD>' WHERE userid='$user'");
if ($qgantipass){
echo "<script>window.location.href = 'home.php?page=xhome&sukses=ANDA BERHASIL MERUBAH PASSWORD';</script>";
}else{
echo "<script>window.location.href = 'home.php?page=xhome&gagal=Gagal Merubah password, karena password tidak sama';</script>";
}
}else{
echo "<script>window.location.href = 'home.php?page=xhome&gagal=Password lama anda salah';</script>";
}
}
?>
<html lang="en-US">
<head>
<meta charset="UTF-8">
<title>Form Register</title>
<!-- Include CSS -->
<link rel="stylesheet" href="css/register.css"/>
<link rel="stylesheet" href="css/saran.css"/>
</head>
<body>
<form class="register_form" action="" id="form" method="post">
<ul>
<li>
<center>Ganti Password <?php echo $user;?>
</li>
<li>
<label for="name">Pass lama</label><td>
<input name="passlama" type="password placeholder="" required />
<span class="form_hint">inputkan password lama</span>
</li>
<li>
<label for="username">Pass baru</label>
<input name="passbaru" type="password" placeholder="" required />
<span class="form_hint">inputkan password baru</span>
</li>
<li>
<label for="umur">Pass baru konfirm</label>
<input name="passbaruconf" type="password" placeholder="" required />
<span class="form_hint">inputkan password baru confirm</span>
</li>
<li>
<button type="submit" value="Ganti Password" name="ganti">Ganti password</button>
</li>
</ul>
</form>
</body>
</html>
<file_sep>/modul/f_penyakit.php
<?php
require "inc/koneksi.php";
require "script.php";
require 'inc/buatid.php';
if(empty($no))
$posisi=0;
$no=$posisi+1;
$que = mysql_query("SELECT * FROM t_penyakit ORDER BY idp ASC ");
?>
<h3 align="center"><br>     .: DATA JENIS PENYAKIT KANKER KANDUNGAN :.</h3><br>
<?php
if (!empty($_GET['message']) && $_GET['message'] == 'success') {
echo '<center>Berhasil Menambah Data Baru </center>';
}
?>
<div class="demo_jui">
<br><br>
<table cellpadding="0" cellspacing="0" border="0" class="display" id="example">
<thead>
<tr>
<td align="center">No</td>
<td align="center">Kode</td>
<td align="center">Jenis Penyakit</td>
<td align="center">Deskripsi</td>
<td align="center">Detail</td>
</tr>
</thead>
<?php
while ($konten = mysql_fetch_array($que)){
?>
<tr class="gradeC">
<td width="2%" align="center"><?php echo $no; ?></td>
<td width="2%" align="justify"><?php echo $konten['idp']; ?></td>
<td width="10%" align="justify"><?php echo $konten['nama']; ?></td>
<td width="54%" align="justify"><?php echo $konten['desk']; ?></td>
<td width="2%" align="center">
<a href="home.php?page=f_penyakit&aksi=edit&idp=<?php echo $konten['idp'];?>"> <img src="images/icons/pencil.png" alt="Edit" /></a>
<a href="home.php?page=f_penyakit&aksi=del&idp=<?php echo $konten['idp'];?>" onclick="return confirm('Anda yakin ingin menghapus <?php echo $konten['idp'];?>?');"> <img src="images/icons/cross.png" alt="Delete" /></a>
<?php
$no++;
?>
</td>
</tr>
<?php
}
?>
</table>
</div>
<div class="content-box-header"> <!-- Add the class "closed" to the Content box header to have it closed by default -->
<p><h3>INPUT GEJALA</h3></p>
</div> <!-- End .content-box-header -->
<?php
require 'inc/koneksi.php';
if (isset($_POST['simpan'])){
$idp = buatID("t_penyakit","P");
$nama = $_POST['nama'];
$desk = $_POST['desk'];
$penanganan = $_POST['penanganan'];
$insql= mysql_query("insert into t_penyakit(idp,nama,desk,penanganan) values('$idp','$nama','$desk', '$penanganan')");
if ($insql){
echo "<script>window.location.href = 'home.php?page=f_penyakit&message=success';</script>";
}else{
}
}
if (isset($_POST['update'])){
$idp = $_POST['idp'];
$nama = $_POST['nama'];
$desk = $_POST['desk'];
$penanganan = $_POST['penanganan'];
$edsql= mysql_query
("UPDATE `sipadu`.`t_penyakit` SET idp='$idp', nama='$nama', desk='$desk', penanganan='$penanganan' WHERE `t_penyakit`.`idp` = '$idp';") or die(mysql_error());
if ($edsql){
echo "<script>window.location.href = 'home.php?page=f_penyakit';</script>";
}else{
}
}
if ($_GET['aksi']=="del"){
$idp = $_GET['idp'];
$delsql = mysql_query("DELETE FROM t_penyakit WHERE idp='$idp'");
if ($delsql){
echo "<script>window.location.href = 'home.php?page=f_penyakit';</script>";
}else{
}
}
?>
<form action="" id="form" method="post">
<input type="hidden" name="idp" value="<?php
if (isset($_GET['aksi'])=="edit"){
$idp = $_GET['idp'];
$qedit = mysql_query("SELECT * from t_penyakit WHERE idp='$idp'");
$redit = mysql_fetch_assoc($qedit); echo $idp;
$idp = $redit['idp'];
$nama = $redit['nama'];
$desk = $redit['desk'];
$penanganan = $redit['penanganan'];}?>">
<table center>
<tr valign="top">
<td width="100">Kode Penyakit</td>
<td width="5">: </td>
<td><input name="idp" type="text" value="<? echo buatID ("t_penyakit","P"); ?>" maxlength="4" disabled>
</td>
</tr>
<tr valign="top">
<td width="100">Jenis Penyakit</td>
<td width="5">: </td>
<td><textarea name="nama" cols="25" id="alamatsurat" required ><?php echo $nama;?></textarea></td>
</tr>
<tr valign="top">
<td width="100">Deskripsi</td>
<td width="5">: </td>
<td><textarea name="desk" style="height: 100px; width:480px;" id="alamatsurat" required><?php echo $desk;?></textarea></td>
</tr>
<tr valign="top">
<td width="100">penanganan</td>
<td width="5">: </td>
<td><textarea name="penanganan" style="height: 100px; width:480px;" id="alamatsurat" required><?php echo $penanganan;?></textarea></td>
</tr>
<tr valign="top">
<td width="75"></td>
<td width="5"></td>
<td><?php if (isset($_GET['aksi'])=='edit'){
echo '<input class="button" type="submit" value="Simpan" name="update" />';
}else{
echo '<input class="button" type="submit" value="Tambahkan" name="simpan" />';}
?></td>
</tr>
</table>
</form>
<file_sep>/modul/pakar.php
<?php
require 'inc/koneksi.php';
require 'inc/buatid.php';
?>
<?php
require "inc/koneksi.php";
require "script.php";
if(empty($no))
$posisi=0;
$no=$posisi+1;
$que = mysql_query("SELECT * FROM t_user where level='30'");
?>
<h3 align="center">    .: Managemen Data Pakar :.</h3>
<?php
if (!empty($_GET['message']) && $_GET['message'] == 'success') {
echo '<center>Berhasil Menambah Data Baru</center>';
}
?>
<div class="demo_jui">
<br><br>
<table cellpadding="0" cellspacing="0" border="0" class="display" id="example">
<thead>
<tr>
<td align="center">No</td>
<td align="center">Nama Pakar</td>
<td align="center">Username</td>
<td align="center">Email</td>
<td align="center">Telepon</td>
<td align="center">Level</td>
<td align="center">Create Date</td>
<td align="center">Detail</td>
</tr>
</thead>
<?php
while ($konten = mysql_fetch_array($que)){
?>
<tr class="gradeC">
<td align="center"><?php echo $no; ?></td>
<td align="justify"><?php echo $konten['nama_lengkap']; ?></td>
<td align="justify"><?php echo $konten['userid']; ?></td>
<td align="justify"><?php echo $konten['email']; ?></td>
<td align="justify"><?php echo $konten['telepon']; ?></td>
<td align="justify"><?php echo $konten['level']; ?></td>
<td align="justify"><?php echo $konten['create_date']; ?></td>
<td align="center">
<a href="home.php?page=pakar&aksi=edite&idu=<?php echo $konten['idu'];?>"> <img src="images/icons/pencil.png" alt="" title="edite" border="0" /></a>
<a href="home.php?page=pakar&aksi=del&username=<?php echo $konten['userid'];?>" onclick="return confirm('Anda yakin ingin menghapus?');"> <img src="images/icons/cross.png" alt="" title="Delete" border="0" /></a>
<?php
$no++;
?>
</td>
</tr>
<?php
}
?>
</table>
</div>
<div class="content-box-header"> <!-- Add the class "closed" to the Content box header to have it closed by default -->
<p><h3>INPUT DATA PAKAR</h3></p>
</div> <!-- End .content-box-header -->
<?php
require 'inc/koneksi.php';
if (isset($_POST['simpan'])){
$nama = $_POST['nama'];
$username = $_POST['username'];
$email = $_POST['email'];
$telepon = $_POST['telepon'];
$cek=mysql_query("select * from t_user where userid='$username'");
$bcek=mysql_num_rows($cek);
if ($bcek == 0)
{
$date = date("Y-m-d");
$inusrsql= mysql_query("insert into t_user values('','$nama','$username','$username','','$email','$telepon','30','$date')") or die(mysql_error());
if ($insql){
echo "<script>window.location.href ='home.php?page=saran&message=success';</script>";
}else{
}
}else{
echo "<script language=Javascript>
alert('User yang Anda masukkan sudah terdaftar');
window.location.href = 'home.php?page=pakar';
</script>";
}
}
if (isset($_POST['update'])){
$idu = $_POST['idu'];
$nama = $_POST['nama'];
$username = $_POST['username'];
$email = $_POST['email'];
$telepon = $_POST['telepon'];
$edsql= mysql_query
("UPDATE `sipadu`.`t_user` SET nama_lengkap='$nama', userid='$username', email='$email', telepon='$telepon' WHERE `t_user`.`idu` = '$idu';") or die(mysql_error());
if ($edsql){
echo "<script>window.location.href = 'home.php?page=pakar';</script>";
}else{
}
}
if ($_GET['aksi']=="del"){
$user= $_GET['username'];
$delsql2 = mysql_query("DELETE FROM t_user WHERE userid='$user'");
if ($delsql && $delsql2){
echo "<script>window.location.href = 'home.php?page=pakar';</script>";
}else{
echo "Data gagal dihapuss";
}
}
?>
<form action="" id="form" method="post">
<input type="hidden" name="idu" value="<?php
if (isset($_GET['aksi'])=="edit"){
$idu = $_GET['idu'];
$qedit = mysql_query("SELECT * from t_user WHERE idu='$idu'");
$redit = mysql_fetch_assoc($qedit); echo $idu;
$idu = $redit['idu'];
$nama = $redit['nama_lengkap'];
$username = $redit['userid'];
$email = $redit['email'];
$telepon = $redit['telepon'];
}?>">
<table center>
<tr valign="top">
<td width="100">Nama Pakar</td>
<td width="5">: </td>
<td><input name="nama" type="text" value="<?php echo $nama;?>">
</td>
</tr>
<tr valign="top">
<td width="100">Username</td>
<td width="5">: </td>
<td><input name="username" type="text" value="<?php echo $username;?>" required/>
</td>
</tr>
<tr valign="top">
<td width="100">Email</td>
<td width="5">: </td>
<td><textarea name="email" cols="25" id="email" required><?php echo $email;?></textarea></td>
</tr>
<tr valign="top">
<td width="100">Telepon</td>
<td width="5">: </td>
<td><input name="telepon" id="telepon" value="<?php echo $telepon;?>" required/></td>
</tr>
<tr valign="top">
<td width="75"></td>
<td width="5"></td>
<td><?php if (isset($_GET['aksi'])=='edit'){
echo '<input class="button" type="submit" value="Simpan" name="update" />';
}else{
echo '<input class="button" type="submit" value="Tambahkan" name="simpan" />';}
?></td>
</tr>
</table>
</form>
<file_sep>/modul/member.php
<?php
require "inc/koneksi.php";
require "script.php";
if(empty($no))
$posisi=0;
$no=$posisi+1;
$que = mysql_query("SELECT * FROM t_user where level='10' ");
?>
<h3 align="center"><br>     .: DATA MEMBER TERDAFTAR :.</h3><br>
<div class="demo_jui">
<!--<td><a href='?page=register'><img id='icon' src='images/plus.jpg' alt='tambah' title='tambah'></a></td>
-->
<br><br>
<table cellpadding="0" cellspacing="0" border="0" class="display" id="example">
<thead>
<tr>
<td align="center">No</td>
<td align="center">Nama</td>
<td align="center">Umur</td>
<td align="center">Alamat Email</td>
<td align="center">Telepon</td>
<!--<td align="center">Level</td>-->
<td align="center">Create Date</td>
</tr>
</thead>
<?php
while ($konten = mysql_fetch_array($que)){
$idu = $konten['idu'];
?>
<tr class="gradeC">
<td align="center"><?php echo $no; ?></td>
<td align="justify"><?php echo $konten['nama_lengkap']; ?></td>
<td align="justify"><?php echo $konten['umur']; ?></td>
<td align="justify"><?php echo $konten['email']; ?></td>
<td align="justify"><?php echo $konten['telepon']; ?></td>
<!--<td align="justify"><?php echo $konten['level']; ?></td>-->
<td align="justify"><?php echo $konten['create_date']; ?></td>
<?php
$no++;
?>
</td>
</tr>
<?php
}
?>
</table>
</div>
<?php
require 'inc/koneksi.php';
if ($_GET['aksi']=="del"){
$ids = $_GET['ids'];
$delsql = mysql_query("DELETE FROM t_pasien WHERE ids='$ids'");
if ($delsql){
echo "<script>window.location.href = 'home.php?page=member';</script>";
}else{
}
}
?>
<file_sep>/log.php
<?php
session_start();
mysql_connect("localhost","root","") or die("Nggak bisa koneksi");
mysql_select_db("sipadu");//sesuaikan dengan nama database anda
//tangkap data dari form login
$userid = $_POST['userid'];
$psw = $_POST['psw'];
$op = $_GET['op'];
if($op=="in"){
//cek data yang dikirim, apakah kosong atau tidak
if (empty($userid) && empty($psw)) {
//kalau username dan password kosong
header('location:index.php?error=1');
break;
} else if (empty($userid)) {
//kalau username saja yang kosong
header('location:index.php?error=2');
break;
} else if (empty($psw)) {
//kalau password saja yang kosong
header('location:index.php?error=3');
break;
}
$cek = mysql_query("SELECT * FROM t_user WHERE userid='$userid' AND password='$psw'");
if(mysql_num_rows($cek)==1){//jika berhasil akan bernilai 1
$c = mysql_fetch_array($cek);
$_SESSION['userid'] = $c['userid'];
$_SESSION['level'] = $c['level'];
if($c['level']=="10"){
header("location:home.php");
}
if($c['level']=="30"){
header("location:home.php");
}
else if($c['level']=="50"){
header("location:home.php");
}
}else{
header('location:index.php?error=4');
}
}else if($op=="out"){
unset($_SESSION['userid']);
unset($_SESSION['level']);
header("location:index.php");
}
?><file_sep>/modul/vmember.php
<title>Sistem Pakar</title>
<div>
<center>DATA MEMBER<center>
</div> <!-- End .content-box-header -->
<p><table id="tablemn" >
<thead >
<th width="5%" >No.</th>
<th width="20%">Nama</th>
<th width="5%">Umur</th>
<th width="10%">Pekerjaan</th>
<th width="15%">Status</th>
<th width="15%">Email</th>
<th width="10%">Telepon</th>
<th width="4%">Aksi</th>
</thead>
<tbody>
<?php
require 'inc/koneksi.php';
require 'inc/buatid.php';
$batas=10;
$halaman = $_GET['halaman'];
if(empty($halaman))
{
$posisi=0;
$halaman=1;
}
else
{
$posisi = ($halaman-1) * $batas;
}
$no=$posisi+1;
$sql= mysql_query("SELECT * FROM t_pasien ORDER BY ids ASC limit $posisi,$batas") or die (mysql_query());
if (mysql_num_rows($sql) == 0){
echo '<tr><td colspan=4><center><br/><h4>Tidak ada data</h4></center></td></tr>';
}else{
while ($row = mysql_fetch_array($sql)) {
echo '<tr>
<td>'.$no.'</td>
<td>'.$row['nama'].'</td>
<td>'.$row['umur'].'</td>
<td>'.$row['pek'].'</td>
<td>'.$row['status'].'</td>
<td>'.$row['email'].'</td>
<td>0'.$row['telepon'].'</td>
<td>
<a href="home.php?page=e_member&aksi=del&ids='.$row['ids'].'" onclick="return confirm(\'Apakah kamu yakin ingin menghapus '.$row['t_pasien'].' ?\');" title="Delete"><img src="images/icons/cross.png" alt="Delete" /></a></td></tr>';
$no++;
}
}
?>
</tbody>
</table><br/>
<?php
$file="?page=v_pasien";
$tampil2="SELECT * FROM t_pasien ORDER BY ids ASC";
$hasil2=mysql_query($tampil2);
$jmldata=mysql_num_rows($hasil2);
$jmlhalaman=ceil($jmldata/$batas);
//link ke halaman sebelumnya (previous)
if($halaman > 1)
{
$previous=$halaman-1;
echo "<A HREF=$file&&halaman=1><< First</A> |
<A HREF=$file&&halaman=$previous>< Previous</A> | ";
}
else
{
echo "<< First | < Previous | ";
}
$angka=($halaman > 3 ? " ... " : " ");
for($i=$halaman-2;$i<$halaman;$i++)
{
if ($i < 1)
continue;
$angka .= "<a href=$file&&halaman=$i>$i</A> ";
}
$angka .= " <b>$halaman</b> ";
for($i=$halaman+1;$i<($halaman+3);$i++)
{
if ($i > $jmlhalaman)
break;
$angka .= "<a href=$file&&halaman=$i>$i</A> ";
}
$angka .= ($halaman+2<$jmlhalaman ? " ...
<a href=$file&&halaman=$jmlhalaman>$jmlhalaman</A> " : " ");
echo "$angka";
//link kehalaman berikutnya (Next)
if($halaman < $jmlhalaman)
{
$next=$halaman+1;
echo " | <A HREF=$file&&halaman=$next>Next ></A> |
<A HREF=$file&&halaman=$jmlhalaman>Last >></A> ";
}
else
{
echo " | Next > | Last >>";
}
echo "<p><font color=red>Total Member : <b>$jmldata</b> Member</font></p>";
echo "$nama";
?>
<file_sep>/modul/e_member.php
<?php
require 'inc/koneksi.php';
if (isset($_POST['simpan'])){
$nama = $_POST['xnama'];
$username = $_POST['xusername'];
$umur = $_POST['xumur'];
$pek = $_POST['xpek'];
$status = $_POST['xstatus'];
$email = $_POST['xemail'];
$telepon = $_POST['xtelepon'];
$insql= mysql_query("insert into t_pasien(nama,username,umur,pek,status,email,telepon) values('$nama','$username','$umur','$pek','$status','$email','$telepon')");
$date = date("Y-m-d");
$inusrsql= mysql_query("insert into t_user values('$username','$username','10','$date','$date')") or die(mysql_error());
if ($insql){
echo "<script>window.location.href = 'index.php?page=gview&sukses=Guru baru telah ditambahkan';</script>";
}else{
}
}
if (isset($_POST['update'])){
$nama = $_POST['xnama'];
$username = $_POST['xusername'];
$umur = $_POST['xumur'];
$pek = $_POST['xpek'];
$status = $_POST['xstatus'];
$email = $_POST['xemail'];
$telepon = $_POST['xtelepon'];
$edsql= mysql_query
("UPDATE `sipadu`.`t_pasien` SET ids='$ids', nama='$nama', username='$username', umur='$umur', pek='$pek', status='$status', email='$email', telepon='$telepon' WHERE `t_pasien`.`ids` = '$ids';") or die(mysql_error());
if ($edsql){
echo "<script>window.location.href = 'home.php?page=vmember';</script>";
}else{
}
}
if ($_GET['aksi']=="del"){
$ids = $_GET['ids'];
$delsql = mysql_query("DELETE FROM t_pasien WHERE ids='$ids'");
if ($delsql){
echo "<script>window.location.href = 'home.php?page=vmember';</script>";
}else{
}
}
?>
<!DOCTYPE HTML>
<html lang="en-US">
<head>
<meta charset="UTF-8">
<title>Form Register</title>
<!-- Include CSS -->
<link rel="stylesheet" href="css/register.css"/>
<link rel="stylesheet" href="css/contact.css"/>
</head>
<body>
<form class="registri_form" action="" id="form" method="post">
<input type="hidden" name="ids" value="<?php
if (isset($_GET['aksi'])=="edit"){
$ids = $_GET['ids'];
$qedit = mysql_query("SELECT * from t_pasien WHERE ids='$ids'");
$redit = mysql_fetch_assoc($qedit); echo $ids;
$ids = $redit['ids'];
$nama = $redit['xnama'];
$username = $redit['xusername'];
$umur = $redit['xumur'];
$pek = $redit['xpek'];
$status = $redit['xstatus'];
$email = $redit['xemail'];
$telepon = $redit['xtelepon']; }?>">
<ul>
<li>
<center>EDIT MEMBER</center>
</li>
<li>
<label for="name">Nama :</label>
<input type="text" name="xnama" placeholder="" required <?php echo $nama;?> />
<span class="form_hint">inputkan nama lengkap</span>
</li>
<li>
<label for="username">Username :</label>
<input type="text" name="xusername" placeholder="" required <?php echo $username;?> />
<span class="form_hint">format <EMAIL></span>
</li>
<li>
<label for="umur">Umur :</label>
<input type="text" name="xumur" placeholder="" required <?php echo $umur;?> />
<span class="form_hint">format <EMAIL></span>
</li>
<li>
<label for="pekerjaan">Pekerjaan :</label>
<input type="text" name="xpek" placeholder="" required <?php echo $pek;?>/>
<span class="form_hint">format <EMAIL></span>
</li>
<li>
<label for="status">Status :</label>
<select name="xstatus">
<option value="Kawin">Kawin</option>
<option value="belum kawin">Belum Kawin</option>
</select>
<span class="form_hint">format <EMAIL></span>
</li>
<li>
<label for="email">E-mail :</label>
<input type="email" name="xemail" placeholder="" required <?php echo $email;?>/>
<span class="form_hint">format <EMAIL></span>
</li>
<li>
<label for="email">Telepon :</label>
<input type="text" name="xtelepon" placeholder="" required <?php echo $telepon; ?> />
<span class="form_hint">085729001164</span>
</li>
<li>
<button type="submit" name="simpan">Submit</button>
</li>
</ul>
</form>
</body>
</html><file_sep>/js/js/kalender.js
<!--
var hariH = new Array(7);
hariH[1] = "Minggu";
hariH[2] = "Senin";
hariH[3] = "Selasa";
hariH[4] = "Rabu";
hariH[5] = "Kamis";
hariH[6] = "Jum'at";
hariH[7] = "Sabtu";
var bulanB = new Array(12);
bulanB[1] = "Januari";
bulanB[2] = "Februari";
bulanB[3] = "Maret";
bulanB[4] = "April";
bulanB[5] = "Mei";
bulanB[6] = "Juni";
bulanB[7] = "Juli";
bulanB[8] = "Agustus";
bulanB[9] = "September";
bulanB[10] = "Oktober";
bulanB[11] = "November";
bulanB[12] = "Desember";
var hariini = new Date();
var hari = hariH[hariini.getDay() + 1];
var bulan = bulanB[hariini.getMonth() + 1];
var tanggal = hariini.getDate();
var tahun = (hariini.getYear()+1900);
var detik = hariini.getSeconds()
var menit = hariini.getMinutes();
var jam = hariini.getHours();
function showtip(model, delay, text)
{
if (window.document.all&&window.document.readyState=="complete")
{
window.document.all.tooltip.innerHTML='<marquee class=marque behavior='+model+' scrollamount='+delay+' scrolldelay=25>'+text+'</marquee>'
if(event.clientX > screen.width-200)
window.document.all.tooltip.style.pixelLeft=event.clientX+document.body.scrollLeft-(window.document.all.tooltip.style.pixelWidth+10)
else
window.document.all.tooltip.style.pixelLeft=event.clientX+document.body.scrollLeft+10
window.document.all.tooltip.style.pixelTop=event.clientY+document.body.scrollTop+10
window.document.all.tooltip.style.visibility="visible"
}
}
function hidetip()
{
window.document.all.tooltip.style.visibility="hidden"
}
document.write ("<div id=tooltip style='position:absolute; visibility:hidden; clip:rect(0 150 50 0); width:150px;'></div>")
//--><file_sep>/modul/vuser.php
<title>Sistem Pakar</title>
<div>
<center>DATA USER AKSES<center>
</div> <!-- End .content-box-header -->
<p><table id="tablemn" >
<thead >
<th width="10%" >No.</th>
<th width="15%">Userid</th>
<th width="10%">Password</th>
<th width="10%">Level</th>
<th width="20%">Create Date</th>
<th width="20%">Last Login</th>
<th width="10%">Aksi</th>
</thead>
<tbody>
<?php
require 'inc/koneksi.php';
require 'inc/buatid.php';
$batas=10;
$halaman = $_GET['halaman'];
if(empty($halaman))
{
$posisi=0;
$halaman=1;
}
else
{
$posisi = ($halaman-1) * $batas;
}
$no=$posisi+1;
$sql= mysql_query("SELECT * FROM t_user ORDER BY userid ASC limit $posisi,$batas") or die (mysql_query());
if (mysql_num_rows($sql) == 0){
echo '<tr><td colspan=4><center><br/><h4>Tidak ada data</h4></center></td></tr>';
}else{
while ($row = mysql_fetch_assoc($sql)) {
echo '<tr>
<td>'.$no.'</td>
<td>'.$row['userid'].'</td>
<td>'.$row['password'].'</td>
<td>'.$row['level'].'</td>
<td>'.$row['create_date'].'</td>
<td>'.$row['last_login'].'</td>
<td>
<a href="home.php?page=vuser&aksi=del&userid='.$row['userid'].'" onclick="return confirm(\'Apakah kamu yakin ingin menghapus '.$row['userid'].' ?\');" title="Delete"><img src="images/icons/cross.png" alt="Delete" /></a></td></tr>';
$no++;
}
}
?>
</tbody>
</table><br/>
<?php
$file="?page=vuser";
$tampil2="SELECT * FROM t_user ORDER BY userid ASC";
$hasil2=mysql_query($tampil2);
$jmldata=mysql_num_rows($hasil2);
$jmlhalaman=ceil($jmldata/$batas);
//link ke halaman sebelumnya (previous)
if($halaman > 1)
{
$previous=$halaman-1;
echo "<A HREF=$file&&halaman=1><< First</A> |
<A HREF=$file&&halaman=$previous>< Previous</A> | ";
}
else
{
echo "<< First | < Previous | ";
}
$angka=($halaman > 3 ? " ... " : " ");
for($i=$halaman-2;$i<$halaman;$i++)
{
if ($i < 1)
continue;
$angka .= "<a href=$file&&halaman=$i>$i</A> ";
}
$angka .= " <b>$halaman</b> ";
for($i=$halaman+1;$i<($halaman+3);$i++)
{
if ($i > $jmlhalaman)
break;
$angka .= "<a href=$file&&halaman=$i>$i</A> ";
}
$angka .= ($halaman+2<$jmlhalaman ? " ...
<a href=$file&&halaman=$jmlhalaman>$jmlhalaman</A> " : " ");
echo "$angka";
//link kehalaman berikutnya (Next)
if($halaman < $jmlhalaman)
{
$next=$halaman+1;
echo " | <A HREF=$file&&halaman=$next>Next ></A> |
<A HREF=$file&&halaman=$jmlhalaman>Last >></A> ";
}
else
{
echo " | Next > | Last >>";
}
echo "<p><font color=red>Total User : <b>$jmldata</b> User</font></p>";
?>
</tbody>
</table>
<?php
require 'inc/koneksi.php';
if ($_GET['aksi']=="del"){
$userid = $_GET['userid'];
$delsql = mysql_query("DELETE FROM t_user WHERE userid='$userid'");
if ($delsql){
echo "<script>window.location.href = 'home.php?page=vuser';</script>";
}else{
}
}
?>
<file_sep>/modul/saran.php
<?php
require 'inc/koneksi.php';
$user = $_SESSION['userid'];
if (isset($_POST['simpan'])){
$nama = $_POST['xnama'];
$email = $_POST['xemail'];
$saran = $_POST['xsaran'];
$insql= mysql_query("insert into t_saran(nama,email,saran, waktu) values('$nama','$email','$saran', NOW())") or die(mysql_error());
if ($insql){
echo "<script>window.location.href = 'home.php?page=saran&message=success';</script>";
}else{
}
}
?>
<!DOCTYPE HTML>
<html lang="en-US">
<head>
<meta charset="UTF-8">
<title>Form Contact</title>
<?php
if (!empty($_GET['message']) && $_GET['message'] == 'success') {
echo '<center>Terima Kasih Atas Saranya <strong>'.$user.'</strong></center>';
}
?>
<!-- Include CSS -->
<link rel="stylesheet" href="css/saran.css"/>
</head>
<body>
<form class="contact_form" action="" id="form" method="post" >
<ul>
<li>
<h4>Hubungi Kami</h4>
<span class="required_notification">*Field harus diisi</span>
</li>
<li>
<label for="name">Nama :</label>
<input type="text" name="xnama" placeholder="" required />
<span class="form_hint">inputkan nama lengkap</span>
</li>
<li>
<label for="email">E-mail :</label>
<input type="email" name="xemail" placeholder="" required />
<span class="form_hint">format <EMAIL></span>
</li>
<li>
<label for="message">Pesan / Saran :</label>
<textarea name="xsaran" cols="45" rows="5" required id="message"></textarea>
</li>
<li>
<button type="submit" name="simpan">Submit</button>
</li>
</ul>
</form>
</body>
</html>
<file_sep>/modul/backup/backup/Thu14Feb2013_ri32_backup_data_1360782925.sql
DROP TABLE t_artikel;
CREATE TABLE `t_artikel` (
`idArtikel` int(11) NOT NULL AUTO_INCREMENT,
`title` varchar(30) DEFAULT NULL,
`content` text,
`author` varchar(30) DEFAULT NULL,
`datePub` datetime DEFAULT NULL,
`views` int(11) DEFAULT NULL,
PRIMARY KEY (`idArtikel`)
) ENGINE=MyISAM AUTO_INCREMENT=39 DEFAULT CHARSET=latin1;
INSERT INTO t_artikel VALUES("32","Kanker Serviks","<p><span style=\"background-color: #ffffff;\"><span style=\"font-size: small; font-family: \'arial black\', \'avant garde\';\">Apa yang dimaksud dengan kanker servik ?</span></span></p>
\n<p style=\"text-align: justify;\"><span> <span style=\"font-size: small; font-family: \'times new roman\', times;\">Kanker serviks adalah tumor ganas yang terletak pada saluran rahim vagina dan serviks. Tingginya kasus kanker serviks umumnya berusia sekitar 50 tahun, dan pasien dengan kanker serviks pada pernikahan dini, hamil dini, perempuan dari infeksi HPV produktif. Dalam beberapa tahun terakhir, kejadian kanker serviks secara bertahap usia dekat dengan wanita muda.</span></span></p>
\n<p style=\"text-align: justify;\"><span style=\"font-size: small; font-family: \'times new roman\', times;\"> Kasus kanker serviks menduduki peringkat kedua di seluruh kanker perempuan. Setiap tahunnya sekitar 53.000 kasus kanker serviks terjadi, dimana 85% kasus kanker serviks berasal dari negara berkembang.</span></p>
\n<p style=\"text-align: justify;\"><span style=\"font-size: small; font-family: \'times new roman\', times;\"> Setiap tahunnya sekitar 7,6 juta orang di seluruh dunia meninggal karena kanker, akuntansi untuk 13 persen kematian kanker secara global, termasuk korban kanker serviks dari sekitar 27.000, dan 88% angka kematian berasal dari negara berkembang.</span></p>
\n<p style=\"text-align: justify;\"><span style=\"background-color: #ffffff;\"><span style=\"font-size: small; font-family: \'arial black\', \'avant garde\'; color: #800000;\">Apa penyebab kanker servik ?</span></span></p>
\n<p style=\"text-align: justify;\"><span style=\"font-size: small; font-family: \'times new roman\', times;\"> 70% kanker serviks disebabkan oleh human papillomavirus (HPV), diikuti oleh kanker serviks yang disebabkan oleh merokok dan human immunodeficiency virus (HIV), dan faktor risiko lainnya saling terkait yang juga dapat menyebabkan kanker serviks.</span></p>
\n<p style=\"text-align: justify;\"><span style=\"font-size: small; font-family: \'times new roman\', times;\"> Faktor risiko kanker serviks lainnya antara lainnya meliputi : klamidia, kebiasaan makan yang buruk, menggunakan obat-obatan yang mengandung hormon, riwayat keluarga yang terkena kanker serviks, terlalu sering mengkonsumsi pil kontrasepsi, berhubungan seksual di usia muda, hamil dini, melahirkan banyak anak dan faktor lain yang cenderung memicu kanker serviks.</span></p>","","2013-02-12 03:35:38","0");
INSERT INTO t_artikel VALUES("36","KANKER ENDOMETRIUM ","<p style=\"text-align: justify;\"><span style=\"font-size: small; font-family: \'times new roman\', times;\"><strong>Kanker endometrium adalah jaringan atau selaput lender rahim yang tumbuh di luar rahim.</strong><span style=\"font-size: small; font-family: \'times new roman\', times; text-align: justify;\"> Padahal, seharusnya jaringan endometrium melapisi dinding rahim.</span></span></p>
\n<p class=\"MsoNormal\" style=\"text-align: justify;\"><span style=\"font-size: small; font-family: \'times new roman\', times;\">Kanker endometrium tumbuh pada ovarium, tuba falopii, dan saluran menuju vagina. Kanker ini bukan merupakan penyakit akibat hubungan seksual. Wanita muda maupun yang sudah tua dapat terkena penyakit ini. Walaupun pada umumnya yang terserang wanita yang sudah tua.</span></p>
\n<p class=\"MsoNormal\" style=\"text-align: justify;\"><span style=\"font-size: small; font-family: \'times new roman\', times;\"><span style=\"font-size: small; font-family: \'times new roman\', times;\">Tumbuhnya jaringan endometrium di luar rahim kemungkinan disebabkan oleh darah menstruasi masuk kembali ke tuba falopii dengan membawa jaringan dari lapisan dinding rahim sehingga jaringan tersebut menetap dan tumbuh di luar rahim. Kemungkinan lain adalah jaringan endometrium terbawa ke luar rahim melalui pembuluh darah atau kelenjar getah bening.</span></span></p>
\n<p class=\"MsoNormal\" style=\"text-align: justify;\"><span style=\"font-family: \'arial black\', \'avant garde\';\"><span style=\"background-color: #ffffff; font-size: small; color: #800000;\">Faktor pemicu kanker endometrium</span><span style=\"font-size: small;\"> </span></span></p>
\n<p style=\"text-align: justify;\"><span style=\"font-size: small; font-family: \'times new roman\', times;\">Penyebab utama kanker tersebut masih belum jelas, tetapi pada beberapa penelitian ditemukan adanya mutasi (perubahan secara genetik yang abnormal) dari gen p35. Beberapa hal yang diduga sebagai pemicu (faktor resiko) kanker dinding rahim adalah:</span></p>
\n<p style=\"text-align: justify;\"> </p>
\n<ul class=\"gkCircle2\" style=\"text-align: justify;\">
\n<li><span style=\"font-family: \'times new roman\', times; font-size: small; font-weight: bold;\">Obesitas </span><span style=\"font-family: \'times new roman\', times; font-size: small;\">Pada wanita pascamenopause, kebanyakan estrogen berasal dari perubahan androstenedion menjadi estron pada jaringan lemak. Kecepatan perubahan ini 15-20 kali lebih besar pada wanita gemuk. Oleh karena itu, estrogen dalam darah wanita gemuk diketahui lebih tinggi. Kadar estrogen yang tinggi diyakini dapat meningkatkan resiko kanker dinding rahim.</span></li>
\n<li><span style=\"font-size: small; font-family: \'times new roman\', times;\"><strong>Sindroma ovarium polikistik</strong><br />Sindroma ovarium polikistik menyebabkan kadar estrogen dalam darah yang sangat tinggi sehingga meningkatkan resiko kanker.</span></li>
\n<li><span style=\"font-size: small; font-family: \'times new roman\', times;\"><strong>Menstruasi terlalu dini (Menarke dini)</strong><br />Wanita yang mulai menstruasi pada usia di bawah usia 12 tahun memiliki resiko yang lebih tinggi karena adanya peningkatan waktu paparan dinding rahim terhadap estrogen.</span></li>
\n<li><span style=\"font-size: small; font-family: \'times new roman\', times;\"><strong>Menopause lambat</strong><br />Hal ini sama kaitannya dengan paparan estrogen yang bertambah lama pada dinding rahim.</span></li>
\n<li><span style=\"font-size: small; font-family: \'times new roman\', times;\"><strong>Riwayat diabetes, hipertensi, dan penyakit kandung empedu</strong><br />Kondisi di atas umumnya ditentukan pada pasien kegemukan, yang juga dapat meningkatkan resiko kejadian kanker. Diabetes, terutama diabetes yang tidak tergantung insulin, dikaitkan dengan keadaan hiperinsulinemia (kadar hormon insulin berlebih dalam darah). Hiperinsulinemia dikaitkan dengan keadaan hiperestrogen termasuk peningkatan produksi steroid, stimulasi perubahan testosteron menjadi estradiol (cikal bakal estrogen) dan penekanan konsentrasi protein (globulin) yang berkaitan dengan hormon seks dalam sirkulasi.</span></li>
\n<li><span style=\"font-size: small; font-family: \'times new roman\', times;\"><strong>Penggunaan jangka panjang kombinasi kontrasepsi oral dosis tinggi</strong><br />Penggunaan terapi pengganti estrogen konjugasi untuk jangka waktu lama meningkatkan resiko kanker antara 2-15 kali, tetapi menurun dengan pemberhentian penggunaan terapi pengganti estrogen. Kontrasepsi oral sekuensial juga memberikan efek estrogenik netto, yang turut meningkatkan resiko kanker dinding rahim.</span></li>
\n</ul>
\n<p style=\"text-align: justify;\"><span><span style=\"font-size: small; font-family: \'times new roman\', times;\">Informasi lengkapnya di: <a href=\"http://www.deherba.com/apa-itu-kanker-endometrium.html#ixzz2Kd0h4ae8\">http://www.deherba.com/apa-itu-kanker-endometrium.html#ixzz2Kd0h4ae8</a></span></span></p>","","2013-02-12 04:49:17","0");
INSERT INTO t_artikel VALUES("30","Kanker Ovarium","<p><span style=\"font-size: small; background-color: #ffffff; font-family: \'arial black\', \'avant garde\'; color: #800000;\">Apa itu kanker ovarium ?</span></p>
\n<p style=\"text-align: justify;\"><span style=\"font-size: small; font-family: \'book antiqua\', palatino;\"><span style=\"font-size: small;\"> </span><span style=\"font-size: small;\">Kanker ovarium adalah sebuah penyakit sel tumor ganas didalam ovarium wanita. Merupakan salah satu tumor yang paling sering ditemukan pada organ reproduksi wanita. Dikarenakan jaringan di dalam ovarium dan kompleksitas fungsi endokrin, sulit mendeteksi apakah tumor tersebut jinak atau ganas. Saat diagnosis, mayoritas sel kanker sudah menyebar ke organ disekitarnya.</span></span></p>
\n<p style=\"text-align: justify;\"><span style=\"font-size: small; font-family: \'book antiqua\', palatino;\"> Tingkat kematian tumor ganas ovarium menduduki urutan pertama pada onkologi ginekologi. Sudah menjadi ancaman serius buat kehidupan dan kesehatan para kaum wanita. Setelah ditemukan adanya kanker ovarium, sekitar 2/3 diantaranya sudah memasuki stadium lanjut. Oleh karena itu tingkat kelangsungan hidup dalam waktu lima tahun hanya tinggal 20%-30%. Setelah timbul penyakit, sedikit yang bisa hidup sampai 3 tahun. Semakin tua usianya, tingkat kematian kanker ovarium juga semakin tinggi.</span></p>
\n<p style=\"text-align: justify;\"><span><span style=\"font-size: small; font-family: \'book antiqua\', palatino;\"> Namun masih ada harapan seperti, semakin cepat terdeteksi kanker ovarium dan menggunakan teknik pengobatan yang maju akan mendapatkan hasil pengobatan yang efektif, memperpanjang kehidupan wanita dan meningkatkan kualitas hidup.</span></span></p>
\n<p style=\"text-align: justify;\"><span style=\"font-size: small; background-color: #ffffff; font-family: \'arial black\', \'avant garde\'; color: #800000;\">Apa yang menjadi penyebab kanker ovarium ?</span></p>
\n<p style=\"text-align: justify;\"><span style=\"font-size: small; font-family: \'book antiqua\', palatino;\"><strong><span style=\"font-size: small;\">1. Faktor lingkungan :</span></strong><span style=\"font-size: small;\"> </span></span></p>
\n<p style=\"text-align: justify;\"><span style=\"font-size: small; font-family: \'book antiqua\', palatino;\">tingkat kejadian kanker ovarium lebih tinggi di negara industri yang berkembang dan wanita dilapisan masyarakat atas, kemungkinan berhubungan dengan pola makan yang tinggi kolesterol. Selain itu, radiasi komputer, asbes dan talek dapat meningkatkan resiko terkena kanker ovarium. Merokok dan kurangnya vitamin A,C,E juga ada kaitannya.<!--more--></span></p>
\n<p style=\"text-align: justify;\"><span style=\"font-size: small; font-family: \'book antiqua\', palatino;\"><strong>2. Faktor endokrin :</strong> </span></p>
\n<p style=\"text-align: justify;\"><span style=\"font-size: small; font-family: \'book antiqua\', palatino;\">kanker ovarium lebih banyak terjadi pada nulipara atau wanita steril. Kehamilan tampaknya memiliki efek melawan kanker ovarium. Menurut kedokteran, ovulasi yang setiap hari menyebabkan epitel ovarium rusak berulang kali, ada hubungannya dengan kanker ovarium. Selain itu, <a title=\"kanker payudara\" href=\"http://www.asiancancer.com/indonesian/cancer-topics/breast-cancer/\">kanker payudara</a>, kanker endometrium dan kanker ovarium mudah terjadi bersamaan. Ketiga penyakit ini mempunyai sifat ketergantungan terhadap endokrin.</span></p>
\n<p style=\"text-align: justify;\"><span style=\"font-size: small; font-family: \'book antiqua\', palatino;\"><strong>3. Faktor genetik dan keluarga : </strong></span></p>
\n<p style=\"text-align: justify;\"><span style=\"font-size: small; font-family: \'book antiqua\', palatino;\"><strong></strong>sekitar 20%-25% pasien kanker ovarium anggota keluarganya mengidap kanker.</span></p>
\n<p style=\"text-align: justify;\"> </p>","","2013-02-11 15:57:08","12");
DROP TABLE t_aturan;
CREATE TABLE `t_aturan` (
`ida` int(3) NOT NULL AUTO_INCREMENT,
`idg` varchar(4) NOT NULL,
`idp` varchar(4) NOT NULL,
`mb` float NOT NULL,
`md` float NOT NULL,
PRIMARY KEY (`ida`)
) ENGINE=MyISAM AUTO_INCREMENT=258 DEFAULT CHARSET=latin1;
INSERT INTO t_aturan VALUES("140","G004","P002","0.75","0.02");
INSERT INTO t_aturan VALUES("139","G004","P001","0.8","0.01");
INSERT INTO t_aturan VALUES("138","G013","P008","0.75","0.02");
INSERT INTO t_aturan VALUES("137","G013","P007","0.02","0.75");
INSERT INTO t_aturan VALUES("136","G013","P006","0.02","0.75");
INSERT INTO t_aturan VALUES("135","G013","P005","0.02","0.75");
INSERT INTO t_aturan VALUES("134","G013","P003","0.02","0.75");
INSERT INTO t_aturan VALUES("133","G013","P002","0.02","0.75");
INSERT INTO t_aturan VALUES("132","G013","P001","0.02","0.75");
INSERT INTO t_aturan VALUES("131","G012","P008","0.8","0.02");
INSERT INTO t_aturan VALUES("130","G012","P007","0.02","0.8");
INSERT INTO t_aturan VALUES("129","G012","P006","0.02","0.8");
INSERT INTO t_aturan VALUES("128","G012","P005","0.02","0.8");
INSERT INTO t_aturan VALUES("127","G012","P003","0.02","0.8");
INSERT INTO t_aturan VALUES("126","G012","P002","0.02","0.8");
INSERT INTO t_aturan VALUES("125","G012","P001","0.02","0.8");
INSERT INTO t_aturan VALUES("124","G003","P008","0.8","0.02");
INSERT INTO t_aturan VALUES("123","G003","P007","0.8","0.02");
INSERT INTO t_aturan VALUES("122","G003","P006","0.8","0.02");
INSERT INTO t_aturan VALUES("121","G003","P005","0.02","0.8");
INSERT INTO t_aturan VALUES("120","G003","P003","0.02","0.8");
INSERT INTO t_aturan VALUES("119","G003","P002","0.8","0.02");
INSERT INTO t_aturan VALUES("118","G003","P001","0.8","0.02");
INSERT INTO t_aturan VALUES("117","G002","P008","0.02","0.8");
INSERT INTO t_aturan VALUES("116","G002","P007","0.02","0.8");
INSERT INTO t_aturan VALUES("115","G002","P006","0.02","0.75");
INSERT INTO t_aturan VALUES("114","G002","P005","0.02","0.8");
INSERT INTO t_aturan VALUES("113","G002","P003","0.02","0.75");
INSERT INTO t_aturan VALUES("112","G002","P002","0.8","0.02");
INSERT INTO t_aturan VALUES("111","G002","P001","0.8","0.02");
INSERT INTO t_aturan VALUES("110","G001","P008","0.75","0.02");
INSERT INTO t_aturan VALUES("109","G001","P007","0.02","0.8");
INSERT INTO t_aturan VALUES("108","G001","P006","0.02","0.8");
INSERT INTO t_aturan VALUES("107","G001","P005","0.02","0.75");
INSERT INTO t_aturan VALUES("106","G001","P003","0.02","0.8");
INSERT INTO t_aturan VALUES("105","G001","P002","0.02","0.8");
INSERT INTO t_aturan VALUES("104","G001","P001","0.8","0.02");
INSERT INTO t_aturan VALUES("141","G004","P003","0.08","0.02");
INSERT INTO t_aturan VALUES("142","G004","P005","0.7","0.02");
INSERT INTO t_aturan VALUES("143","G004","P006","0.02","0.75");
INSERT INTO t_aturan VALUES("144","G004","P007","0.6","0.02");
INSERT INTO t_aturan VALUES("145","G004","P008","0.02","0.75");
INSERT INTO t_aturan VALUES("146","G005","P001","0.8","0.02");
INSERT INTO t_aturan VALUES("147","G005","P002","0.02","0.8");
INSERT INTO t_aturan VALUES("148","G005","P003","0.02","0.8");
INSERT INTO t_aturan VALUES("149","G005","P005","0.02","0.8");
INSERT INTO t_aturan VALUES("150","G005","P006","0.01","0.8");
INSERT INTO t_aturan VALUES("151","G005","P007","0.02","0.75");
INSERT INTO t_aturan VALUES("152","G005","P008","0.02","0.8");
INSERT INTO t_aturan VALUES("153","G006","P001","0.8","0.02");
INSERT INTO t_aturan VALUES("154","G006","P002","0.02","0.75");
INSERT INTO t_aturan VALUES("155","G006","P003","0.02","0.8");
INSERT INTO t_aturan VALUES("156","G006","P005","0.01","0.75");
INSERT INTO t_aturan VALUES("157","G006","P006","0.02","0.7");
INSERT INTO t_aturan VALUES("158","G006","P007","0.02","0.8");
INSERT INTO t_aturan VALUES("159","G006","P008","0.02","0.75");
INSERT INTO t_aturan VALUES("160","G007","P001","0.8","0.02");
INSERT INTO t_aturan VALUES("161","G007","P002","0.02","0.75");
INSERT INTO t_aturan VALUES("162","G007","P003","0.75","0.02");
INSERT INTO t_aturan VALUES("163","G007","P005","0.7","0.01");
INSERT INTO t_aturan VALUES("164","G007","P006","0.02","0.75");
INSERT INTO t_aturan VALUES("165","G007","P007","0.02","0.8");
INSERT INTO t_aturan VALUES("166","G007","P008","0.8","0.01");
INSERT INTO t_aturan VALUES("167","G008","P001","0.02","0.75");
INSERT INTO t_aturan VALUES("168","G008","P002","0.8","0.02");
INSERT INTO t_aturan VALUES("169","G008","P003","0.02","0.8");
INSERT INTO t_aturan VALUES("170","G008","P005","0.02","0.75");
INSERT INTO t_aturan VALUES("171","G008","P006","0.02","0.8");
INSERT INTO t_aturan VALUES("172","G008","P007","0.02","0.75");
INSERT INTO t_aturan VALUES("173","G008","P008","0.01","0.8");
INSERT INTO t_aturan VALUES("174","G009","P001","0.02","0.75");
INSERT INTO t_aturan VALUES("175","G009","P002","0.8","0.02");
INSERT INTO t_aturan VALUES("176","G009","P003","0.01","0.8");
INSERT INTO t_aturan VALUES("177","G009","P005","0.02","0.8");
INSERT INTO t_aturan VALUES("178","G009","P006","0.02","0.8");
INSERT INTO t_aturan VALUES("179","G009","P007","0.02","0.75");
INSERT INTO t_aturan VALUES("180","G009","P008","0.02","0.8");
INSERT INTO t_aturan VALUES("181","G010","P001","0.02","0.8");
INSERT INTO t_aturan VALUES("182","G010","P002","0.02","0.8");
INSERT INTO t_aturan VALUES("183","G010","P003","0.8","0.02");
INSERT INTO t_aturan VALUES("184","G010","P005","0.02","0.75");
INSERT INTO t_aturan VALUES("185","G010","P006","0.01","0.8");
INSERT INTO t_aturan VALUES("186","G010","P007","0.02","0.8");
INSERT INTO t_aturan VALUES("187","G010","P008","0.02","0.8");
INSERT INTO t_aturan VALUES("188","G011","P001","0.02","0.8");
INSERT INTO t_aturan VALUES("189","G011","P002","0.01","0.8");
INSERT INTO t_aturan VALUES("190","G011","P003","0.8","0.02");
INSERT INTO t_aturan VALUES("191","G011","P005","0.02","0.8");
INSERT INTO t_aturan VALUES("192","G011","P006","0.02","0.75");
INSERT INTO t_aturan VALUES("193","G011","P007","0.02","0.8");
INSERT INTO t_aturan VALUES("194","G011","P008","0.02","0.8");
INSERT INTO t_aturan VALUES("195","G014","P001","0.02","0.8");
INSERT INTO t_aturan VALUES("196","G014","P002","0.02","0.8");
INSERT INTO t_aturan VALUES("197","G014","P003","0.02","0.8");
INSERT INTO t_aturan VALUES("198","G014","P005","0.02","0.75");
INSERT INTO t_aturan VALUES("199","G014","P006","0.02","0.75");
INSERT INTO t_aturan VALUES("200","G014","P007","0.01","0.8");
INSERT INTO t_aturan VALUES("201","G014","P008","0.8","0.02");
INSERT INTO t_aturan VALUES("202","G015","P001","0.02","0.8");
INSERT INTO t_aturan VALUES("203","G015","P002","0.02","0.8");
INSERT INTO t_aturan VALUES("204","G015","P003","0.02","0.8");
INSERT INTO t_aturan VALUES("205","G015","P005","0.02","0.75");
INSERT INTO t_aturan VALUES("206","G015","P006","0.02","0.8");
INSERT INTO t_aturan VALUES("207","G015","P007","0.02","0.75");
INSERT INTO t_aturan VALUES("208","G015","P008","0.8","0.02");
INSERT INTO t_aturan VALUES("209","G018","P001","0.02","0.8");
INSERT INTO t_aturan VALUES("210","G018","P002","0.02","0.8");
INSERT INTO t_aturan VALUES("211","G018","P003","0.02","0.75");
INSERT INTO t_aturan VALUES("212","G018","P005","0.8","0.02");
INSERT INTO t_aturan VALUES("213","G018","P006","0.02","0.8");
INSERT INTO t_aturan VALUES("214","G018","P007","0.02","0.75");
INSERT INTO t_aturan VALUES("215","G018","P008","0.02","0.8");
INSERT INTO t_aturan VALUES("216","G019","P001","0.02","0.8");
INSERT INTO t_aturan VALUES("217","G019","P002","0.02","0.8");
INSERT INTO t_aturan VALUES("218","G019","P003","0.02","0.75");
INSERT INTO t_aturan VALUES("219","G019","P005","0.8","0.02");
INSERT INTO t_aturan VALUES("220","G019","P006","0.02","0.75");
INSERT INTO t_aturan VALUES("221","G019","P007","0.02","0.8");
INSERT INTO t_aturan VALUES("222","G019","P008","0.02","0.8");
INSERT INTO t_aturan VALUES("223","G020","P001","0.02","0.8");
INSERT INTO t_aturan VALUES("224","G020","P002","0.02","0.75");
INSERT INTO t_aturan VALUES("225","G020","P003","0.02","0.8");
INSERT INTO t_aturan VALUES("226","G020","P005","0.02","0.75");
INSERT INTO t_aturan VALUES("227","G020","P006","0.02","0.8");
INSERT INTO t_aturan VALUES("228","G020","P007","0.8","0.02");
INSERT INTO t_aturan VALUES("229","G020","P008","0.02","0.8");
INSERT INTO t_aturan VALUES("230","G021","P001","0.02","0.75");
INSERT INTO t_aturan VALUES("231","G021","P002","0.02","0.8");
INSERT INTO t_aturan VALUES("232","G021","P003","0.02","0.75");
INSERT INTO t_aturan VALUES("233","G021","P005","0.02","0.8");
INSERT INTO t_aturan VALUES("234","G021","P006","0.02","0.8");
INSERT INTO t_aturan VALUES("235","G021","P007","0.8","0.02");
INSERT INTO t_aturan VALUES("236","G021","P008","0.01","0.8");
INSERT INTO t_aturan VALUES("237","G022","P001","0.02","0.8");
INSERT INTO t_aturan VALUES("238","G022","P002","0.02","0.08");
INSERT INTO t_aturan VALUES("239","G022","P003","0.02","0.75");
INSERT INTO t_aturan VALUES("240","G022","P005","0.02","0.8");
INSERT INTO t_aturan VALUES("241","G022","P006","0.02","0.8");
INSERT INTO t_aturan VALUES("242","G022","P007","0.8","0.02");
INSERT INTO t_aturan VALUES("243","G022","P008","0.02","0.75");
INSERT INTO t_aturan VALUES("244","G023","P001","0.02","0.8");
INSERT INTO t_aturan VALUES("245","G023","P002","0.02","0.75");
INSERT INTO t_aturan VALUES("246","G023","P003","0.8","0.02");
INSERT INTO t_aturan VALUES("247","G023","P005","0.02","0.8");
INSERT INTO t_aturan VALUES("248","G023","P006","0.02","0.75");
INSERT INTO t_aturan VALUES("249","G023","P007","0.02","0.8");
INSERT INTO t_aturan VALUES("250","G023","P008","0.02","0.8");
INSERT INTO t_aturan VALUES("251","G024","P001","0.02","0.8");
INSERT INTO t_aturan VALUES("252","G024","P002","0.02","0.8");
INSERT INTO t_aturan VALUES("253","G024","P003","0.02","0.75");
INSERT INTO t_aturan VALUES("254","G024","P005","0.02","0.8");
INSERT INTO t_aturan VALUES("255","G024","P006","0.8","0.02");
INSERT INTO t_aturan VALUES("256","G024","P007","0.02","0.75");
INSERT INTO t_aturan VALUES("257","G024","P008","0.02","0.8");
DROP TABLE t_gejala;
CREATE TABLE `t_gejala` (
`idg` varchar(4) NOT NULL,
`gejala` text NOT NULL,
`pertanyaan` text NOT NULL,
PRIMARY KEY (`idg`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
INSERT INTO t_gejala VALUES("G019","Mengeluarkan cairan encer bercampur darah","Apakah anda mengeluarkan cairan encer bercampur darah ?");
INSERT INTO t_gejala VALUES("G018","Benjolan pada vagina","Apakah anda merasa ada benjolan pada vagina ?");
INSERT INTO t_gejala VALUES("G017","Nyeri atau kesulitan dalam BAB","Apakah anda mengalami nyeri atau kesulitan dalam BAB ?");
INSERT INTO t_gejala VALUES("G016","Keputihan yang berbau","Apakah anda mengalami keputihan yang berbau ?");
INSERT INTO t_gejala VALUES("G015","Munculnya rasa terbakar dan panas serta rasa gatal pada daerah vulva","Apakah anda mengalami rasa terbakar dan panas serta rasa gatal pada daerah vulva ?");
INSERT INTO t_gejala VALUES("G014","Permukaan vulva menjadi lebih kasar","Apakah permukaan vulva menjadi lebih kasar ?");
INSERT INTO t_gejala VALUES("G013","Iritasi vulva atau prutitus (gatal-gatal)","Apakah anda mengalami iritasi vulva atau prutitus (gatal-gatal) ?");
INSERT INTO t_gejala VALUES("G012","Timbul benjolan di vulva","Apakah timbul benjolan di vulva ?");
INSERT INTO t_gejala VALUES("G011","Perut membuncit disertai sesak napas","Apakah perut anda membincit disertai sesak napas ?");
INSERT INTO t_gejala VALUES("G009","Perasaan lelah terus menerus","Apakah anda merasa terus menerus ?");
INSERT INTO t_gejala VALUES("G010","Perut membesar terasa ada benjolan","Apakah perut anda membesar terasa ada benjolan ?");
INSERT INTO t_gejala VALUES("G008","Pendarahan berat","Apakah anda mengalami pendarahan berat ?");
INSERT INTO t_gejala VALUES("G007","Buang air kecil dan sakit","Apakah anda mengalami buang air kecil dan sakit ?");
INSERT INTO t_gejala VALUES("G006","Sering berkemih","Apakah anda sering mengalami berkemih ?");
INSERT INTO t_gejala VALUES("G005","Nyeri pinggang dan panggul","Apakah anda mengalami nyeri pinggang dan panggul ?");
INSERT INTO t_gejala VALUES("G004","Nyeri panggul / bawah perut","Apakah anda mengalami nyeri panggul / bawah perut ?");
INSERT INTO t_gejala VALUES("G003","Pendarahan di luar haid / pendarahan monopause","Apakah anda mengalami pendarahan di luar haid / pendarahan monopause ?");
INSERT INTO t_gejala VALUES("G001","Keputihan","Apakah anda keputihan ?");
INSERT INTO t_gejala VALUES("G002","Sering pendarahan dan nyeri saat berhubungan","Apakah anda sering pendarahan dan nyeri saat berhubungan ?");
INSERT INTO t_gejala VALUES("G020","Keputihan bercampur darah","Apakah anda mengalami keputihan bercampur darah ?");
INSERT INTO t_gejala VALUES("G021","Keputihan yang berbau dan bercampur darah","Apakah anda mengalami keputihan yang berbau dan bercampur darah ?");
INSERT INTO t_gejala VALUES("G022","Keluhan sesak di abdomen bagian bawah","Apakah anda mengalami keluhan sesak di abdomen bagian bawah ?");
INSERT INTO t_gejala VALUES("G023","Perut terasa pegah, kembung dan tidak nyaman","Apakah perut anda terasa pegah, kembung dan tidak merasa nyaman ?");
INSERT INTO t_gejala VALUES("G024","Perasaan nyeri yang menjalar ke pangkal paha, punggung dan berdarah","Apakah anda merasa nyeri yang menjalar ke pangkal paha, punggung dan berdarah ?");
DROP TABLE t_konsultasi;
CREATE TABLE `t_konsultasi` (
`idk` int(3) NOT NULL,
`idn` int(3) NOT NULL,
`idp` int(3) NOT NULL,
`cf` float NOT NULL,
PRIMARY KEY (`idk`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
DROP TABLE t_pakar;
CREATE TABLE `t_pakar` (
`idr` int(2) NOT NULL AUTO_INCREMENT,
`nama` varchar(50) NOT NULL,
`password` varchar(15) NOT NULL,
PRIMARY KEY (`idr`)
) ENGINE=MyISAM AUTO_INCREMENT=2 DEFAULT CHARSET=latin1;
INSERT INTO t_pakar VALUES("1","maher","");
DROP TABLE t_pasien;
CREATE TABLE `t_pasien` (
`ids` int(3) NOT NULL AUTO_INCREMENT,
`nama` varchar(50) NOT NULL,
`username` varchar(25) NOT NULL,
`umur` varchar(2) NOT NULL,
`pek` varchar(25) NOT NULL,
`status` varchar(10) NOT NULL,
`email` varchar(25) NOT NULL,
`telepon` int(12) NOT NULL,
`postdate` datetime NOT NULL,
PRIMARY KEY (`ids`)
) ENGINE=MyISAM AUTO_INCREMENT=35 DEFAULT CHARSET=latin1;
INSERT INTO t_pasien VALUES("22","bismillah","bismillah","10","beroda","single","bismillah","2147483647","0000-00-00 00:00:00");
INSERT INTO t_pasien VALUES("28","umi sholiihah","umi","20","pelajar","a","<EMAIL>","86553434","0000-00-00 00:00:00");
INSERT INTO t_pasien VALUES("29","ind<NAME>","dewi","23","mahasiswa","belum kawi","<EMAIL>","2147483647","0000-00-00 00:00:00");
INSERT INTO t_pasien VALUES("31","vila yola","vila","23","swasta","Kawin","<EMAIL>","2147483647","0000-00-00 00:00:00");
INSERT INTO t_pasien VALUES("32","vila yola","gina","23","swasta","Kawin","<EMAIL>","2147483647","0000-00-00 00:00:00");
INSERT INTO t_pasien VALUES("34","meika wulandari ","meika","17","pelajar","belum kawi","<EMAIL>","2147483647","0000-00-00 00:00:00");
DROP TABLE t_penyakit;
CREATE TABLE `t_penyakit` (
`idp` varchar(4) NOT NULL,
`nama` varchar(50) NOT NULL,
`desk` text NOT NULL,
`penanganan` text NOT NULL,
PRIMARY KEY (`idp`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
INSERT INTO t_penyakit VALUES("P001","<NAME> (Cervical Cancer)","Kanker serviks adalah keganasan yang terjadi pada leher rahim. Kanker serviks disebut juga kanker leher rahim atau kanker mulut rahim dimulai pada lapisan serviks. ","1. IVA - Inspeksi Visual dengan Asam asetat. Merupakan deteksi dini yang dapat Anda lakukan di klinik. Caranya dengan mengoleskan larutan asam asetat 3%-5% ke leher rahim, kemudian mengamati apakah ada perubahan warna, misalnya muncul bercak putih. Jika ada, berarti kemungkinan terdapat infeksi pada serviks dan harus dilakukan pemeriksaaan lanjutan.");
INSERT INTO t_penyakit VALUES("P002","<NAME> (Endometrium Cancer)","Penyakit ini sering disebut juga kanker endometrium dan paling sering di alami oleh perempuan berusia 50-60 tahun. ","1. Melakukan pemeriksaan aspirasi kuretase kavum uteri, yaitu untuk pemeriksaan sitologik (patologi anatomi).
\n2. Mengobati obesitas.
\n3. Memilih pil KB kombinasi dan pengawasan pemberian pengobatan hormon estrogen.
\n4. Melakukan aspirasi kuretase pada wanita pasca-menopause yang gemuk dengan riwayat keluarga kanker endometrium dan payudara, wanita menopause pada usia lebih dari 52 tahun dan pada wanita pre-menopause dengan siklus anovulatoar yang lama.
\n");
INSERT INTO t_penyakit VALUES("P003","Kanker Indung Telur (Ovarium Cancer) ","Kanker ovarium merupakan sebuah penyakit di mana ovarium yang dimiliki wanita memiliki perkembangan sel-sel abnormal. Secara umum, kanker ovarium merupakan suatu bentuk kanker yang menyerang ovarium. Kanker ini bisa berkembang sangat cepat, bahkan, dari stadium awal hingga stadium lanjut bisa terjadi hanya dalam satu tahun saja. ","1. Pembedahan, ada dua tujuan yakni pengobatan dan penentuan stadium surgikal.
\n2. Kemoterapi , pasien dengan Stadium 1 A derajat 1 dan 2 jenis epitel mempunyai harapan hidup 5 tahun 95% dengan atau pemberian kemoterapi. ");
INSERT INTO t_penyakit VALUES("P008","<NAME>","<NAME> adalah kanker yang terjadi di bagian luar permukaan alat kelamin wanita. Kebanyakan kanker vulva adalah jenis dari squamous cell carcinoma, jenis kanker kulit yang berkembang secara lambat dalam hitungan tahun.Kanker Vulva adalah kanker yang terjadi di bagian luar permukaan alat kelamin wanita. Kebanyakan kanker vulva adalah jenis dari squamous cell carcinoma, jenis kanker kulit yang berkembang secara lambat dalam hitungan tahun.","Lakukan 1. Pembedahan dan radio terapi pasca bedah bila termasuk kelompok prognosis buruk. Bila massa tumor besar untuk pembedahan dan batas sayatan bebas tumor, maka perlu diberikan kemoradiasi prabedah dan dilanjutkan dengan pembedahan untuk mengangkat residu tumor");
INSERT INTO t_penyakit VALUES("P005","Kanker Vagina","Vagina adalah saluran yang menghubungkan mulut dan leher rahim dengan bagian luar tubuh. Kanker yang menyerang vagina biasanya ditemukan disaluran vagina atau pada dinding dalam vagina.","Lakukan anamnesis kemudian dilanjutkan pemeriksaan fisik lengkap, pemeriksaan foto paru-paru untuk menyingkirkan metastasis jauh, sistoskopi, dan prostoktopi untuk menyingkirkan metastasis kandung kemih atau rectum.");
INSERT INTO t_penyakit VALUES("P006","Kanker Tuba Fallopii",". Kanker ini merupakan 0,1% sampai 1,8% dari kanker ginekologik.Lebih dari 60% kanker tuba di jumpai pada usia pascamonopause. ","Pelaksanaan pengobatan pada dasarnya sama dengan pada kanker ovarium. Pada terapi pembedahan dilakukan histerektomi total dan salpingo-ooforektomi bilateral serta dilakukan penetapan stadium surgical, termasuk pemeriksaan cairan asites/ bilasan peritoneum dan pengambilan sampel kelenjar getah bening merupakan tindakan pembedahan yang optimal.");
INSERT INTO t_penyakit VALUES("P007","Kanker Korpus Uteri","Kankar korpus uteri terjadi pada usia lanjut, sekitar 40-80 tahun setelah melewati mati haid (menopause). Kejadiannya makin meningkat sejalan dengan banyaknya wanita mencapai usia lanjut. ","1. Di lakukan pemeriksaan papsmear
\n2. Melakukan pemeriksaan dalam
\n3. Pemberian obat klinis, dan terutama dengan kemoterapi/sitostatika (obat pembunuh sel kanker). Pada saat ini sudah ditemukan sebuah obat sitostatika yang sangat ampuh untuk keberhasilan pengobatan penyakit. ");
DROP TABLE t_pertanyaan;
CREATE TABLE `t_pertanyaan` (
`id` int(3) NOT NULL AUTO_INCREMENT,
`idg` varchar(4) NOT NULL,
`g_ya` varchar(5) NOT NULL,
`g_tidak` varchar(5) NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=MyISAM AUTO_INCREMENT=61 DEFAULT CHARSET=latin1;
INSERT INTO t_pertanyaan VALUES("57","G021","G022","G022");
INSERT INTO t_pertanyaan VALUES("56","G020","G021","G021");
INSERT INTO t_pertanyaan VALUES("55","G019","","");
INSERT INTO t_pertanyaan VALUES("54","G018","G019","G019");
INSERT INTO t_pertanyaan VALUES("53","G015","","");
INSERT INTO t_pertanyaan VALUES("52","G014","G015","G015");
INSERT INTO t_pertanyaan VALUES("51","G011","G023","G023");
INSERT INTO t_pertanyaan VALUES("50","G010","G011","G018");
INSERT INTO t_pertanyaan VALUES("49","G009","","");
INSERT INTO t_pertanyaan VALUES("48","G008","G009","G006");
INSERT INTO t_pertanyaan VALUES("47","G007","G010","G018");
INSERT INTO t_pertanyaan VALUES("46","G006","G005","G020");
INSERT INTO t_pertanyaan VALUES("45","G005","","");
INSERT INTO t_pertanyaan VALUES("44","G004","G007","G003");
INSERT INTO t_pertanyaan VALUES("43","G013","G014","G014");
INSERT INTO t_pertanyaan VALUES("42","G012","G013","G024");
INSERT INTO t_pertanyaan VALUES("41","G003","G001","G024");
INSERT INTO t_pertanyaan VALUES("40","G002","G008","");
INSERT INTO t_pertanyaan VALUES("39","G001","G012","G024");
INSERT INTO t_pertanyaan VALUES("58","G022","","");
INSERT INTO t_pertanyaan VALUES("59","G023","","");
INSERT INTO t_pertanyaan VALUES("60","G024","","");
DROP TABLE t_saran;
CREATE TABLE `t_saran` (
`idn` int(3) NOT NULL AUTO_INCREMENT,
`nama` varchar(50) NOT NULL,
`email` varchar(25) NOT NULL,
`saran` text NOT NULL,
`waktu` datetime NOT NULL,
PRIMARY KEY (`idn`)
) ENGINE=MyISAM AUTO_INCREMENT=7 DEFAULT CHARSET=latin1;
INSERT INTO t_saran VALUES("1","<NAME>","<EMAIL>","cepat diselesaikan supaya nadng
\ncepat diselesaikan supaya nadng wisuda
\ncepat diselesaikan supaya nadng wisuda
\ncepat diselesaikan supaya nadng wisuda
\ncepat diselesaikan supaya nadng wisuda","0000-00-00 00:00:00");
INSERT INTO t_saran VALUES("5","maherni","<EMAIL>","hari ini henny ada bimbingan ma bu uyun,,jangan smapai telat..buruan mandi yach hhhe:)","2013-02-01 08:12:41");
INSERT INTO t_saran VALUES("6","meika","<EMAIL>","ndari sedang sakit...semoga cepat sembuh...amin :)","2013-02-06 18:13:29");
DROP TABLE t_user;
CREATE TABLE `t_user` (
`userid` varchar(25) NOT NULL,
`password` varchar(25) NOT NULL,
`level` varchar(5) NOT NULL,
`create_date` date NOT NULL,
`last_login` date NOT NULL,
PRIMARY KEY (`userid`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
INSERT INTO t_user VALUES("bismillah","e172dd95f4feb21412a692e73","10","2013-01-20","2013-01-20");
INSERT INTO t_user VALUES("maher","henny","50","2013-01-08","2013-01-10");
INSERT INTO t_user VALUES("henny","hennymaher","10","2013-01-20","2013-01-20");
INSERT INTO t_user VALUES("mei","mei","10","2013-01-20","2013-01-20");
INSERT INTO t_user VALUES("umi","umi","10","2013-01-28","2013-01-28");
INSERT INTO t_user VALUES("dewi","dewi","10","2013-01-28","2013-01-28");
INSERT INTO t_user VALUES("vila","vila","10","2013-01-28","2013-01-28");
INSERT INTO t_user VALUES("gina","gina","10","2013-01-28","2013-01-28");
INSERT INTO t_user VALUES("mimi","mimi","10","2013-01-29","2013-01-29");
INSERT INTO t_user VALUES("meika","meika","10","2013-02-06","2013-02-06");
<file_sep>/modul/gejala.php
<?php
require "inc/koneksi.php";
require "script.php";
require 'inc/buatid.php';
if(empty($no))
$posisi=0;
$no=$posisi+1;
$que = mysql_query("SELECT * FROM t_gejala ORDER BY idg ASC ");
?>
<h3 align="center"><br>     .: DATA GEJALA PENYAKIT KANKER KANDUNGAN :.</h3><br>
<?php
if (!empty($_GET['message']) && $_GET['message'] == 'success') {
echo '<center>Berhasil Menambah Data Baru </center>';
}
?>
<div class="demo_jui">
<br><br>
<table cellpadding="0" cellspacing="0" border="0" class="display" id="example">
<thead>
<tr>
<td width="1%" align="center">No</td>
<td width="2%" align="center">Kode</td>
<td width="25%" align="center">Nama Gejala</td>
<td width="25%" align="center">Pertanyaan</td>
<td width="2%" align="center">Detail</td>
</tr>
</thead>
<?php
while ($konten = mysql_fetch_array($que)){
?>
<tr class="gradeC">
<td align="center"><?php echo $no; ?></td>
<td align="justify"><?php echo $konten['idg']; ?></td>
<td align="justify"><?php echo $konten['gejala']; ?></td>
<td align="justify"><?php echo $konten['pertanyaan']; ?></td>
<td align="center">
<a href="home.php?page=gejala&aksi=edit&idg=<?php echo $konten['idg'];?>"> <img src="images/icons/pencil.png" alt="" title="edite" border="0" /></a>
<a href="home.php?page=gejala&aksi=del&idg=<?php echo $konten['idg'];?>" onclick="return confirm('Anda yakin ingin menghapus?');"> <img src="images/icons/cross.png" alt="" title="Delete" border="0" /></a>
<?php
$no++;
?>
</td>
</tr>
<?php
}
?>
</table>
</div>
<div>
<p><h3>INPUT GEJALA</h3></p>
</div> <!-- End .content-box-header -->
<?php
require 'inc/koneksi.php';
if (isset($_POST['simpan'])){
$idg = buatID("t_gejala","G");
$gejala = $_POST['gejala'];
$pertanyaan = $_POST['pertanyaan'];
$insql= mysql_query("insert into t_gejala(idg,gejala,pertanyaan) values('$idg','$gejala','$pertanyaan')");
if ($insql){
echo "<script>window.location.href = 'home.php?page=gejala&message=success';</script>";
}else{
}
}
if (isset($_POST['update'])){
$idg = $_POST['idg'];
$gejala = $_POST['gejala'];
$pertanyaan = $_POST['pertanyaan'];
$edsql= mysql_query("UPDATE `sipadu`.`t_gejala` SET idg='$idg', gejala='$gejala', pertanyaan='$pertanyaan' WHERE `t_gejala`.`idg` = '$idg';") or die(mysql_error());
if ($edsql){
echo "<script>window.location.href = 'home.php?page=gejala';</script>";
}else{
}
}
if ($_GET['aksi']=="del"){
$idg = $_GET['idg'];
$delsql = mysql_query("DELETE FROM t_gejala WHERE idg='$idg'");
if ($delsql){
echo "<script>window.location.href = 'home.php?page=gejala';</script>";
}else{
}
}
?>
<form action="" id="form" method="post">
<input type="hidden" name="idg" value="<?php
if (isset($_GET['aksi'])=="edit"){
$idg = $_GET['idg'];
$qedit = mysql_query("SELECT * from t_gejala WHERE idg='$idg'");
$redit = mysql_fetch_assoc($qedit); echo $idg;
$idg = $redit['idg'];
$gejala = $redit['gejala'];
$pertanyaan = $redit['pertanyaan'];}?>">
<table>
<tr valign="top">
<td width="100">Kode gejala</td>
<td width="5">: </td>
<td><input name="idg" type="text" value="<? echo buatID ("t_gejala","G"); ?>" maxlength="4" disabled>
</td>
</tr>
<tr valign="top">
<td width="100">Gejala</td>
<td width="5">: </td>
<td><textarea name="gejala" cols="45" required ><?php echo $gejala;?></textarea></td>
</tr>
<tr valign="top">
<td width="100">Pertanyaan</td>
<td width="5">: </td>
<td><textarea name="pertanyaan" cols="45" required ><?php echo $pertanyaan;?></textarea></td>
</tr>
<tr valign="top">
<td width="75"></td>
<td width="5"></td>
<td><?php if (isset($_GET['aksi'])=='edit'){
echo '<input class="button" type="submit" value="Simpan" name="update" />';
}else{
echo '<input class="button" type="submit" value="Tambahkan" name="simpan" />';}
?></td>
</tr>
</table>
</form>
<file_sep>/modul/user.php
<?php
require "inc/koneksi.php";
require "script.php";
if(empty($no))
$posisi=0;
$no=$posisi+1;
$que = mysql_query("SELECT * FROM t_user ");
?>
<h3 align="center"><br>     .: DATA USER KESELURUHAN:.</h3><br>
<div class="demo_jui">
<table cellpadding="0" cellspacing="0" border="0" class="display" id="example">
<thead>
<tr>
<td align="center">No</td>
<td align="center">Username</td>
<td align="center">Password</td>
<td align="center">Level</td>
<td align="center">Create Date</td>
<td align="center">Last Login</td>
<td align="center">Detail</td>
</tr>
</thead>
<?php
while ($konten = mysql_fetch_array($que)){
?>
<tr class="gradeC">
<td align="center"><?php echo $no; ?></td>
<td align="justify"><?php echo $konten['userid']; ?></td>
<td align="justify"><?php echo $konten['password']; ?></td>
<td align="justify"><?php echo $konten['level']; ?></td>
<td align="justify"><?php echo $konten['create_date']; ?></td>
<td align="justify"><?php echo $konten['last_login']; ?></td>
<td align="center">
<a href="home.php?page=user&aksi=del&userid=<?php echo $konten['userid'];?>" onclick="return confirm('Anda yakin ingin menghapus?');"> <img src="images/icons/cross.png" alt="" title="Delete" border="0" /></a>
<?php
$no++;
?>
</td>
</tr>
<?php
}
?>
</table>
</div>
<?php
require 'inc/koneksi.php';
if ($_GET['aksi']=="del"){
$userid = $_GET['userid'];
$delsql = mysql_query("DELETE FROM t_user WHERE userid='$userid'");
if ($delsql){
echo "<script>window.location.href = 'home.php?page=user';</script>";
}else{
}
}
?>
|
f3c81e7c1211e01704303084a78c39c4d20fb503
|
[
"JavaScript",
"SQL",
"PHP"
] | 39
|
PHP
|
h3nnysa/RESKRIPSIT
|
9bfbc7fd997c45d923ae12a7cd9f1b0c56c32beb
|
fa771c21613ebff4b7c3010f355c93a19b28f846
|
refs/heads/master
|
<file_sep>package io.naonedmakers.imvui;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.preference.PreferenceManager;
import android.support.design.widget.Snackbar;
import android.support.v4.content.ContextCompat;
import android.util.Log;
import android.view.View;
import android.widget.Toast;
import io.naonedmakers.imvui.hotword.snowboy.AppResCopy;
import io.naonedmakers.imvui.hotword.snowboy.HotWordThread;
import io.naonedmakers.imvui.recognition.android.SpeechRecognizer;
/**
* Created by dbatiot on 19/09/17.
*/
public class HotWordActivity extends UiBaseActivity {
private static final String TAG = HotWordActivity.class.getSimpleName();
//private MediaPlayer player;
private HotWordThread hotWordThread;
private SpeechRecognizer speechRecognizer;
private String sensitivity;
private float audioGain;
private String activeModel;
private boolean hotWordActivated=false;
private String recoMode;
@Override
protected void onDestroy() {
//player.release();
//player=null;
super.onDestroy();
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//default value
sensitivity = getString(R.string.pref_default_sensitivity);
audioGain = Float.parseFloat(getString(R.string.pref_default_audio_gain));
activeModel = getString(R.string.pref_default_snowboy_model);
recoMode = getString(R.string.pref_default_reco_mode);
AppResCopy.copyResFromAssetsToSD(this);
/**
player = new MediaPlayer();
try {
player.setDataSource(getResources().openRawResourceFd(R.raw.startlistening));
player.prepare();
} catch (IOException e) {
Log.e(TAG, "Playing ding sound error", e);
}*/
}
protected void startHotWordDetection() {
Log.v(TAG, "startHotWordDetection "+hotWordActivated);
if(hotWordActivated){
hotWordThread.startDetecting();
updateLog(" Waiting HotWord", TextType.WAIT);
}else{
updateLog(" Waiting icon touch", TextType.WAIT);
}
fab.setImageDrawable(ContextCompat.getDrawable(this, android.R.drawable.ic_btn_speak_now));
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
switchToListenning();
}
});
}
protected void stopHotWordDetection() {
Log.v(TAG, "stopHotWordDetection");
if (hotWordThread != null) {
hotWordThread.stopDetecting();
}
}
protected void switchToListenning() {
Log.v(TAG, "switchToListenning");
updateLog(" Please Speak",TextType.SPEAK);
stopHotWordDetection();
speechRecognizer = new SpeechRecognizer(handle, recoMode.equals("offline_android"));//preferOffline true or false
speechRecognizer.startRecognizing(this);
onListenningStart();
}
/**
* Dispatch onListenningStart() to fragments.
*/
protected void onListenningStart() {
Snackbar.make(fab, "Listenning to speech", Snackbar.LENGTH_SHORT);
fab.setImageDrawable(ContextCompat.getDrawable(this, android.R.drawable.ic_menu_close_clear_cancel));
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
startHotWordDetection();
}
});
}
protected void switchToHotWordDetection() {
Log.v(TAG, "switchToHotWordDetection");
stopListenning();
startHotWordDetection();
}
protected void stopListenning() {
Log.v(TAG, "stopListenning");
if (speechRecognizer != null) {
speechRecognizer.stopRecognizing();
speechRecognizer = null;
}
}
/**
* Dispatch onPause() to fragments.
*/
@Override
protected void onPause() {
stopListenning();
stopHotWordDetection();
if (hotWordThread != null) {
hotWordThread.cleanDetecting();
hotWordThread=null;
}
updateLog(" ----> Pause",TextType.PAUSE);
super.onPause();
}
/**
* Dispatch onResume() to fragments. Note that for better inter-operation
* with older versions of the platform, at the point of this call the
* fragments attached to the activity are <em>not</em> resumed. This means
* that in some cases the previous state may still be saved, not allowing
* fragment transactions that modify the state. To correctly interact
* with fragments in their proper state, you should instead override
* {@link #onResumeFragments()}.
*/
@Override
protected void onResume() {
super.onResume();
try {
SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(this);
sensitivity = sharedPref.getString(SettingsActivity.HotWordPreferenceFragment.HOT_WORD_SENSITIVITY, sensitivity);
audioGain = Float.parseFloat(sharedPref.getString(SettingsActivity.HotWordPreferenceFragment.HOT_WORD_AUDIO_GAIN, "" + audioGain));
activeModel = sharedPref.getString(SettingsActivity.HotWordPreferenceFragment.HOT_WORD_MODEL, activeModel);
hotWordActivated = sharedPref.getBoolean(SettingsActivity.HotWordPreferenceFragment.HOT_WORD_ACTIVATED, false);
recoMode = sharedPref.getString(SettingsActivity.RecognitionPreferenceFragment.RECO_MODE, recoMode);
} catch (Exception e) {
Toast.makeText(this, "SharedPreferences Exception" + e.getMessage(), Toast.LENGTH_LONG).show();
}
if (checkAudioRecordPermission()) {
String commonRes = this.getFilesDir().getAbsolutePath() + "/common.res";
hotWordThread = new HotWordThread(handle, this.getFilesDir().getAbsolutePath() + "/" + activeModel, commonRes, sensitivity, audioGain);
startHotWordDetection();
}
}
public Handler handle = new Handler() {
@Override
public void handleMessage(Message msg) {
MsgEnum message = MsgEnum.getMsgEnum(msg.what);
switch (message) {
//######################
// HOT WORD
//######################
case MSG_HOT_DETECTED:
switchToListenning();
break;
case MSG_HOT_LEVEL:
updateSoundLevel((int) msg.obj);
break;
case MSG_HOT_ERROR:
updateLog(" ----> " + msg.toString(),TextType.ERROR);
stopHotWordDetection();
break;
//######################
// SST
//######################
case MSG_STT_TEXT:
onListeningFinished((String) msg.obj);
break;
case MSG_STT_ERROR:
onListeningError((String) msg.obj);
break;
case MSG_STT_LEVEL:
//updateLog(" ----> " + message, "black");
updateSoundLevel((int) msg.obj);
break;
default:
super.handleMessage(msg);
break;
}
}
};
/**
* Event fires when recognition engine finish listening
*/
public void onListeningFinished(String queryText) {
updateLog(queryText,TextType.REQ);
}
/**
* Event fires when recognition engine error
*/
public void onListeningError(String errorText) {
updateLog(errorText,TextType.ERROR);
switchToHotWordDetection();
}
}<file_sep>package io.naonedmakers.imvui.synthesis.android;
import android.content.Context;
import android.media.AudioManager;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.speech.tts.TextToSpeech;
import android.speech.tts.UtteranceProgressListener;
import android.util.Log;
import java.util.Locale;
import io.naonedmakers.imvui.MsgEnum;
import io.naonedmakers.imvui.R;
public class SpeechSynthetizer extends UtteranceProgressListener implements TextToSpeech.OnInitListener {
private static final String TAG = "SpeechSynthetizer";
private TextToSpeech textToSpeech;
private Handler synthHandler;
private boolean isReady = false;
public SpeechSynthetizer(final Context context, Handler pSynthHandler) {
this.isReady = false;
textToSpeech = new TextToSpeech(context.getApplicationContext(), this);
//arams.put(TextToSpeech.Engine.KEY_PARAM_STREAM, String.valueOf(audioManager.STREAM_ALARM);
textToSpeech.setOnUtteranceProgressListener(this);
synthHandler = pSynthHandler;
AudioManager audioManager = (AudioManager) context.getApplicationContext().getSystemService(context.AUDIO_SERVICE);
audioManager.adjustStreamVolume(AudioManager.STREAM_ALARM ,AudioManager.ADJUST_MUTE, 0);
audioManager.adjustStreamVolume(AudioManager.STREAM_SYSTEM ,AudioManager.ADJUST_MUTE, 0);
audioManager.adjustStreamVolume(AudioManager.STREAM_MUSIC ,AudioManager.ADJUST_RAISE, 0);
audioManager.adjustStreamVolume(AudioManager.STREAM_NOTIFICATION ,AudioManager.ADJUST_MUTE, 0);
audioManager.adjustStreamVolume(AudioManager.STREAM_VOICE_CALL ,AudioManager.ADJUST_MUTE, 0);
audioManager.adjustStreamVolume(AudioManager.STREAM_RING ,AudioManager.ADJUST_MUTE, 0);
amStreamVoiceCallMaxVol = audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);
if (audioManager.isBluetoothScoAvailableOffCall()) {
audioManager.startBluetoothSco();
}
if (!audioManager.isSpeakerphoneOn()) {
audioManager.setSpeakerphoneOn(true);
}
audioManager.requestAudioFocus(null, AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN);
}
int amStreamVoiceCallMaxVol = 10;
/**
* OnInitListener
*
* @param status
*/
@Override
public void onInit(int status) {
this.isReady = (status != TextToSpeech.ERROR);
Log.d(TAG, "onInit " + isReady + " " + !textToSpeech.isSpeaking());
if (this.isReady) {
//textToSpeech.setLanguage(Locale.getDefault());
//textToSpeech.setPitch(1.3f);
//textToSpeech.setSpeechRate(1f);
//textToSpeech.setLanguage(Locale.FRENCH);
//Log.e("XgetDefaultEngine",""+textToSpeech.getDefaultEngine());
//Log.e("XgetDefaultVoice",""+textToSpeech.getDefaultVoice());
//Log.e("XgetAvailableLanguages",""+textToSpeech.getAvailableLanguages());
//Log.e("XgetEngines",""+textToSpeech.getEngines());
//Log.e("XgetVoice",""+textToSpeech.getVoice());
//Log.e("XgetVoices",""+textToSpeech.getVoices());
//Log.e("XgetAvailableLanguages",""+textToSpeech.getAvailableLanguages());
//textToSpeech.addEarcon("bonjour", "io.naonedmakers.imvui", R.raw.goodmorn1);
} else {
sendMessage(MsgEnum.MSG_TTS_ERROR, null);
Log.e("XgetDefaultEngine", "FAILED_TO_INITILIZE_TTS_ENGINE");
}
}
/**
* Called when an utterance "starts" as perceived by the caller.
*
* @param utteranceId
*/
@Override
public void onStart(String utteranceId) {
Log.d(TAG, "onStart " + utteranceId);
sendMessage(MsgEnum.MSG_TTS_START, null);
}
/**
* Called when an utterance has successfully completed processing.
*
* @param utteranceId
*/
@Override
public void onDone(String utteranceId) {
Log.d(TAG, "onDone " + utteranceId);
if (utteranceId.startsWith("FINAL")) {
sendMessage(MsgEnum.MSG_TTS_FINAL_DONE, null);
} else {
sendMessage(MsgEnum.MSG_TTS_PARTIAL_DONE, null);
}
}
/**
* @param s
* @deprecated
*/
@Override
public void onError(String s) {
Log.d(TAG, "onError " + s);
sendMessage(MsgEnum.MSG_TTS_ERROR, null);
}
public void destroy() {
Log.d(TAG, "onDestroy ");
this.isReady = false;
if (this.textToSpeech != null) {
this.textToSpeech.stop();
this.textToSpeech.shutdown();
this.textToSpeech = null;
}
}
public void speak(final String message, boolean isPartial) {
Log.d(TAG, "speak " + message + " " + isReady);
if (isReady && !textToSpeech.isSpeaking() && message != null) {
String utteranceId = isPartial ? "PARTIAL" : "FINAL" + message.hashCode();
//Bundle params = null;
Bundle params = new Bundle();
params.putFloat(TextToSpeech.Engine.KEY_PARAM_VOLUME, (float) amStreamVoiceCallMaxVol);
params.putInt(TextToSpeech.Engine.KEY_PARAM_STREAM, AudioManager.STREAM_VOICE_CALL);
textToSpeech.speak(message, TextToSpeech.QUEUE_FLUSH, params, utteranceId);
} else {
sendMessage(MsgEnum.MSG_TTS_ERROR, null);
}
}
public void playEarcon(final String earcon) {
Log.d(TAG, "speak " + earcon + " " + isReady);
if (isReady && !textToSpeech.isSpeaking() && earcon != null) {
String utteranceId = earcon.hashCode() + "";
Bundle params = new Bundle();
params.putFloat(TextToSpeech.Engine.KEY_PARAM_VOLUME, (float) amStreamVoiceCallMaxVol);
params.putInt(TextToSpeech.Engine.KEY_PARAM_STREAM, AudioManager.STREAM_VOICE_CALL);
textToSpeech.playEarcon(earcon, TextToSpeech.QUEUE_FLUSH, params, utteranceId);
}
}
public void playSilence(long milis) {
Log.d(TAG, "playSilence " + isReady + "" + !textToSpeech.isSpeaking());
if (isReady && !textToSpeech.isSpeaking()) {
textToSpeech.playSilentUtterance(milis, TextToSpeech.QUEUE_FLUSH, "silent");
}
}
private void sendMessage(MsgEnum what, Object obj) {
if (null != synthHandler) {
Message msg = synthHandler.obtainMessage(what.ordinal(), obj);
synthHandler.sendMessage(msg);
}
}
}
<file_sep># im Voice User Interface
An android studio project for building the android voice and touch interface
**What's inside**
* Voice activation (thx snowboy) [HotWordActivity.java](app/src/main/java/io/naonedmakers/imvui/HotWordActivity.java)
* Speech Recognition & Transcription (android stt engine) [ConversationActivity.java](app/src/main/java/io/naonedmakers/imvui/ConversationActivity.java)
* Intent & Meaning (api.ai or local regexp) [io.naonedmakers.imvui.meaning.*](app/src/main/java/io/naonedmakers/imvui/meaning)
* Action (mqtt publish) [MqttActivity.java](app/src/main/java/io/naonedmakers/imvui/MqttActivity.java)
* Speech Response (android tts engine) [io.naonedmakers.imvui.synthesis.android.*](app/src/main/java/io/naonedmakers/imvui/synthesis/android)
* FullScreen Webview that display the web touch interface of im-broker [WebTouchActivity.java] (app/src/main/java/io/naonedmakers/imvui/WebTouchActivity.java)
<file_sep>package io.naonedmakers.imvui.meaning.local;
import com.google.gson.JsonElement;
import com.google.gson.JsonPrimitive;
import java.util.HashMap;
import java.util.Random;
import ai.api.AIServiceException;
import io.naonedmakers.imvui.meaning.MeanResponse;
import io.naonedmakers.imvui.meaning.MeanService;
/**
* Created by dbatiot on 29/09/17.
*/
public class LocalAiService implements MeanService {
private static final String TAG = LocalAiService.class.getSimpleName();
/**
* Find the intent behind the stringRequest
*
* @param stringRequest request object to the service. Cannot be <code>null</code>
* @return response object from service. Never <code>null</code>
*/
@Override
public MeanResponse request(String stringRequest) throws AIServiceException {
MeanResponse meanResponse = new MeanResponse();
meanResponse.source = "im-vui";
meanResponse.statusCode = 0;
meanResponse.actionIncomplete = false;
meanResponse.resolvedQuery = stringRequest;
stringRequest = stringRequest.toLowerCase();
if (stringRequest.contains("tête") | stringRequest.contains("êtes")) {
meanResponse.action = "head/move";
meanResponse.speech = "ok je bouge la tête";
meanResponse.intentName = "headmove";
} else if (stringRequest.contains("casque") | stringRequest.contains("visière")) {
meanResponse.action = "helmet/move";
meanResponse.speech = "ok je bouge mon casque";
meanResponse.intentName = "helmetmove";
}
else if (stringRequest.contains("bras") | stringRequest.contains("quoi") | stringRequest.contains("lebrun")) {
Random random = new Random();
if (random.nextBoolean()) {
meanResponse.action = "leftarm/move";
} else {
meanResponse.action = "rightarm/move";
}
meanResponse.speech = "ok je bouge les bras";
meanResponse.intentName = "sidepartmove";
} else if (stringRequest.contains("main") | stringRequest.contains("ama") | stringRequest.contains("gamin")) {
Random random = new Random();
if (random.nextBoolean()) {
meanResponse.action = "lefthand/move";
} else {
meanResponse.action = "righthand/move";
}
meanResponse.speech = "ok je bouge les mains";
meanResponse.intentName = "sidepartmove";
} else if (stringRequest.contains("bonjour")||stringRequest.contains("salut")) {
meanResponse.action = null;
meanResponse.speech = "Bonjour Tony";
meanResponse.intentName = "greetings";
} else if (stringRequest.contains("ieu") | stringRequest.contains("mk2")) {
meanResponse.action = "eyes";
meanResponse.speech = "ok j'active mes yeux";
meanResponse.intentName = "eyes";
} else if (stringRequest.contains("bleu")
| stringRequest.contains("rouge")
| stringRequest.contains("vert")
| stringRequest.contains("jaune")
| stringRequest.contains("orange")
| stringRequest.contains("rose")
| stringRequest.contains("violet")) {
meanResponse.action = "im/color";
meanResponse.speech = "J'aime bien cette couleur";
meanResponse.intentName = "color";
HashMap<String, JsonElement> params = new HashMap<String, JsonElement>();
if(stringRequest.contains("bleu")) {
params.put("rgba", new JsonPrimitive("0000FFFF"));
}else if(stringRequest.contains("rouge")) {
params.put("rgba", new JsonPrimitive("FF0000FF"));
}else if(stringRequest.contains("vert")) {
params.put("rgba", new JsonPrimitive("00FF00FF"));
}else if(stringRequest.contains("jaune")) {
params.put("rgba", new JsonPrimitive("FFFF00FF"));
}else if(stringRequest.contains("orange")) {
params.put("rgba", new JsonPrimitive("FFA500FF"));
}else if(stringRequest.contains("rose")) {
params.put("rgba", new JsonPrimitive("FFC0CBFF"));
}else if(stringRequest.contains("violet")) {
params.put("rgba", new JsonPrimitive("800080FF"));
}
meanResponse.parameters = params;
} else {
meanResponse.action = null;
meanResponse.speech = "Je ne comprends pas";
meanResponse.intentName = null;
//meanResponse.intentName = "none";
//HashMap<String, JsonElement> params = new HashMap<String, JsonElement>();
//params.put("wipparam", new JsonPrimitive("wipvalue"));
//meanResponse.parameters = params;
}
return meanResponse;
}
}
<file_sep>package io.naonedmakers.imvui;
import android.Manifest;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothHeadset;
import android.bluetooth.BluetoothProfile;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.graphics.PorterDuff;
import android.media.AudioManager;
import android.net.Uri;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.annotation.NonNull;
import android.support.design.widget.FloatingActionButton;
import android.support.transition.TransitionManager;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v4.media.session.MediaSessionCompat;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.text.Html;
import android.text.Spanned;
import android.transition.Fade;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.ProgressBar;
import android.widget.ScrollView;
import android.widget.TextView;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public class UiBaseActivity extends MqttActivity {
private static final String TAG = UiBaseActivity.class.getSimpleName();
private TextView log;
protected FloatingActionButton fab;
private ScrollView logView;
//TODO change with https://github.com/zagum/SpeechRecognitionView
private ProgressBar soundLevel;
private Menu menu;
private boolean headSetStatus = false;
private static final int REQUEST_AUDIO_PERMISSIONS_ID = 33;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
fab = (FloatingActionButton) findViewById(R.id.fab);
log = (TextView) findViewById(R.id.log);
logView = (ScrollView) findViewById(R.id.logView);
soundLevel = (ProgressBar) findViewById(R.id.progressbar_level);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_home, menu);
this.menu = menu;
displayBTHeadSetStatus();
displayMqtttStatus();
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
Intent intent = new Intent(this, SettingsActivity.class);
startActivity(intent);
return true;
} else if (id == R.id.action_admin) {
SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(this);
String lastBrokerIp = sharedPref.getString(SettingsActivity.MeanPreferenceFragment.BROKER_IP, null);
if (lastBrokerIp != null) {
Toast.makeText(this, "Opening Web admin", Toast.LENGTH_LONG).show();
Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse("http://" + lastBrokerIp + ":8081/"));
startActivity(browserIntent);
} else {
Toast.makeText(this, "No Server yet found", Toast.LENGTH_LONG).show();
}
return true;
} else if (id == R.id.action_web_touch) {
Intent intent = new Intent(this, WebTouchActivity.class);
startActivity(intent);
return true;
} else if (id == R.id.action_touch) {
Intent intent = new Intent(this, TouchActivity.class);
startActivity(intent);
return true;
} else if (id == R.id.action_mqtt) {
//findAndConnectToLanMqttBroker() {
return true;
} else if (id == R.id.action_headset) {
return true;
}
return super.onOptionsItemSelected(item);
}
//##############################################################
static int MAX_LOG_LINE_NUM = 14;
static ArrayList<String> textList = new ArrayList<String>();
static ArrayList<TextType> typeList = new ArrayList<TextType>();
static public enum TextType{
REQ,RES,WAIT,PAUSE,SPEAK,ERROR
}
public void updateLog(final String text, final TextType textType) {
log.post(new Runnable() {
@Override
public void run() {
Iterator<String> texti = textList.iterator();
Iterator<TextType> typei = typeList.iterator();
while (texti.hasNext()) {
String text = texti.next(); // must be called before you can call i.remove()
TextType type = typei.next();
if(TextType.WAIT.equals(type) || TextType.SPEAK.equals(type)||TextType.PAUSE.equals(type)){
texti.remove();
typei.remove();
}
}
typeList.add(textType);
textList.add(text);
if(textList.size()>MAX_LOG_LINE_NUM){
typeList.remove(0);
textList.remove(0);
}
String strLog ="";
for(int i=0;i<textList.size();i++){
String color = "black"; //typeList.get(i);
if(TextType.WAIT.equals(typeList.get(i))) {
color = "blue";
}else if(TextType.SPEAK.equals(typeList.get(i))) {
color = "#006400";
}else if(TextType.REQ.equals(typeList.get(i))) {
color = "black";
}else if(TextType.RES.equals(typeList.get(i))) {
color = "#FF69B4";
}else if(TextType.PAUSE.equals(typeList.get(i))) {
color = "black";
}else if(TextType.ERROR.equals(typeList.get(i))) {
color = "red";
}
String str = "<font color='" + color + "'>" + textList.get(i) + "</font>" + "<br>";
strLog = (strLog == null || strLog.length() == 0) ? str : strLog + str;
}
//Fade mFade = new Fade(Fade.IN);
// Start recording changes to the view hierarchy
//TransitionManager.beginDelayedTransition(log, mFade);
log.setText(Html.fromHtml(strLog));
}
});
logView.post(new Runnable() {
@Override
public void run() {
logView.fullScroll(ScrollView.FOCUS_DOWN);
}
});
}
public void updateSoundLevel(final int level) {
soundLevel.post(new Runnable() {
@Override
public void run() {
soundLevel.setProgress(level);
}
});
}
@SuppressWarnings("deprecation")
public static Spanned fromHtml(String html) {
Spanned result;
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.N) {
result = Html.fromHtml(html, Html.FROM_HTML_MODE_LEGACY);
} else {
result = Html.fromHtml(html);
}
return result;
}
protected boolean checkAudioRecordPermission() {
if (ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
// No explanation needed, we can request the permission.
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO}, REQUEST_AUDIO_PERMISSIONS_ID);
return false;
} else {
return true;
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
switch (requestCode) {
case REQUEST_AUDIO_PERMISSIONS_ID: {
// If request is cancelled, the result arrays are empty.
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
// permission was granted, yay! Do the
// contacts-related task you need to do.
} else {
this.finish();
// permission denied, boo! Disable the
// functionality that depends on this permission.
}
return;
}
}
}
@Override
protected void onResume() {
super.onResume();
initBT();
//initMediaSession();
//successfullyRetrievedAudioFocus();
textList.clear();
typeList.clear();
}
public void setMqttStatus(String connected) {
super.setMqttStatus(connected);
displayMqtttStatus();
}
public void displayMqtttStatus() {
if (menu != null) {
int colorId = 0;
if (getMqttStatusValue().equals(""+true)){
colorId = getResources().getColor(R.color.colorConnected);
} else if (getMqttStatusValue().equals(""+false)){
colorId = getResources().getColor(R.color.colorAccent);
} else if (getMqttStatusValue().equals("")){
colorId = getResources().getColor(R.color.colorWorking);
}
menu.findItem(R.id.action_mqtt).getIcon().mutate();
menu.findItem(R.id.action_mqtt).getIcon().setColorFilter(colorId, PorterDuff.Mode.SRC_ATOP);
menu.findItem(R.id.action_admin).getIcon().mutate();
menu.findItem(R.id.action_admin).getIcon().setColorFilter(colorId, PorterDuff.Mode.SRC_ATOP);
}
}
/*******************************************************************************************
* ******************************************************************************************
* BLUETOOTH
* ******************************************************************************************
*******************************************************************************************/
public void setBTHeadSetStatus(boolean connected) {
headSetStatus = connected;
displayBTHeadSetStatus();
}
public void displayBTHeadSetStatus() {
//at startup the menu is not present
if (menu != null) {
int colorId = 0;
if (headSetStatus) {
colorId = getResources().getColor(R.color.colorConnected);
} else {
colorId = getResources().getColor(R.color.colorAccent);
}
menu.findItem(R.id.action_headset).getIcon().mutate();
menu.findItem(R.id.action_headset).getIcon().setColorFilter(colorId, PorterDuff.Mode.SRC_ATOP);
}
}
MediaSessionCompat mediaSession;
private void initMediaSession() {
/**
BroadcastReceiver remoteReceiver = new MediaButtonReceiver(){
@Override public void onReceive(Context context, Intent intent) {
if (Intent.ACTION_MEDIA_BUTTON.equals(intent.getAction())) {
final KeyEvent event = intent.getParcelableExtra(Intent.EXTRA_KEY_EVENT);
Log.i(TAG, "onMediaButtonRecevier "+ event.getAction());
}else if (Intent.ACTION_VOICE_COMMAND.equals(intent.getAction())) {
Log.i(TAG, "onMediaButtonRecevier ACTION_VOICE_COMMAND");
}else{
Log.i(TAG, "onMediaButtonRecevier"+intent.getAction());
};
abortBroadcast();
}
};
IntentFilter filter = new IntentFilter();
filter.addAction(Intent.ACTION_MEDIA_BUTTON);
filter.addAction(Intent.ACTION_VOICE_COMMAND);
filter.setPriority(999);
if(mediaSession!=null) {
this.unregisterReceiver(remoteReceiver);
}
this.registerReceiver(remoteReceiver,filter);
*/
//ComponentName mediaButtonReceiver = new ComponentName(getApplicationContext(), MediaButtonReceiver.class);
mediaSession = new MediaSessionCompat(this.getApplicationContext(), TAG);
mediaSession.setMediaButtonReceiver(null);
mediaSession.setCallback(new MediaSessionCompat.Callback() {
@Override
public boolean onMediaButtonEvent(Intent mediaButtonEvent) {
Log.i(TAG, "onMediaButtonEvent ");
return super.onMediaButtonEvent(mediaButtonEvent);
}
});
mediaSession.setFlags(MediaSessionCompat.FLAG_HANDLES_MEDIA_BUTTONS | MediaSessionCompat.FLAG_HANDLES_TRANSPORT_CONTROLS);
mediaSession.setActive(true);
}
BluetoothHeadset mBluetoothHeadset;
private void initBT() {
BluetoothAdapter mBluetoothAdapter;
// Get the default adapter
mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
if (!mBluetoothAdapter.isEnabled()) {
Intent enableBtIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(enableBtIntent, 0);
}
BluetoothProfile.ServiceListener mProfileListener = new BluetoothProfile.ServiceListener() {
public void onServiceConnected(int profile, BluetoothProfile proxy) {
if (profile == BluetoothProfile.HEADSET) {
Log.d(TAG, "Connecting HeadsetService...");
mBluetoothHeadset = (BluetoothHeadset) proxy;
List<BluetoothDevice> devices = mBluetoothHeadset.getConnectedDevices();
Log.d(TAG, "HeadsetService..." + devices.size());
setBTHeadSetStatus((devices.size() > 0));
}
}
public void onServiceDisconnected(int profile) {
if (profile == BluetoothProfile.HEADSET) {
Log.d(TAG, "Unexpected Disconnect of HeadsetService...");
mBluetoothHeadset = null;
setBTHeadSetStatus(false);
}
}
};
// Establish connection to the proxy.
mBluetoothAdapter.getProfileProxy(this, mProfileListener, BluetoothProfile.HEADSET);
//Monitor profile events
IntentFilter filter = new IntentFilter();
//filter.addAction(BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED);
filter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED);
if (mProfileReceiver != null) {
try {
unregisterReceiver(mProfileReceiver);
} catch (Exception e) {//DO NOTING
}
try {
registerReceiver(mProfileReceiver, filter);
} catch (Exception e) {//DO NOTING
}
}
}
private boolean successfullyRetrievedAudioFocus() {
AudioManager audioManager = (AudioManager) this.getApplicationContext().getSystemService(Context.AUDIO_SERVICE);
int result = audioManager.requestAudioFocus(new AudioManager.OnAudioFocusChangeListener() {
@Override
public void onAudioFocusChange(int i) {
Log.i(TAG, "onAudioFocusChange " + (AudioManager.AUDIOFOCUS_GAIN == i));
}
}, AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN);
return result == AudioManager.AUDIOFOCUS_GAIN;
}
private BroadcastReceiver mProfileReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
final String action = intent.getAction();
mBluetoothHeadset = null;
//if (BluetoothHeadset.ACTION_AUDIO_STATE_CHANGED.equals(action)) {
// notifyAudioState(intent);
//}
if (BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED.equals(action)) {
notifyConnectState(intent);
}
}
};
/*
private void notifyAudioState(Intent intent) {
final int state = intent.getIntExtra(BluetoothHeadset.EXTRA_STATE, -1);
String message;
switch (state) {
case BluetoothHeadset.STATE_AUDIO_CONNECTED:
message = "Audio Connected";
this.setBTHeadSetStatus(true);
break;
case BluetoothHeadset.STATE_AUDIO_CONNECTING:
message = "Audio Connecting";
break;
case BluetoothHeadset.STATE_AUDIO_DISCONNECTED:
message = "Audio Disconnected";
this.setBTHeadSetStatus(false);
break;
default:
message = "Audio Unknown";
break;
}
Log.d(TAG, " HeadsetnotifyAudioState..."+message);
//Toast.makeText(this, message, Toast.LENGTH_SHORT).show();
}*/
private void notifyConnectState(Intent intent) {
final int state = intent.getIntExtra(BluetoothHeadset.EXTRA_STATE, -1);
String message;
switch (state) {
case BluetoothHeadset.STATE_CONNECTED:
message = "Connected";
this.setBTHeadSetStatus(true);
break;
case BluetoothHeadset.STATE_CONNECTING:
message = "Connecting";
break;
case BluetoothHeadset.STATE_DISCONNECTING:
message = "Disconnecting";
break;
case BluetoothHeadset.STATE_DISCONNECTED:
message = "Disconnected";
this.setBTHeadSetStatus(false);
break;
default:
message = "Connect Unknown";
break;
}
Log.d(TAG, " HeadsetnotifyConnectState..." + message);
//Toast.makeText(this, message, Toast.LENGTH_SHORT).show();
}
}
<file_sep>package io.naonedmakers.imvui.meaning;
import com.google.gson.JsonElement;
import com.google.gson.annotations.SerializedName;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import ai.api.model.AIContext;
import ai.api.model.AIEvent;
import ai.api.model.AIOriginalRequest;
import ai.api.model.AIOutputContext;
import ai.api.model.Fulfillment;
import ai.api.model.Metadata;
import ai.api.model.ResponseMessage;
import ai.api.model.Result;
import ai.api.model.Status;
/**
* Created by dbatiot on 30/09/17.
*/
public class MeanResponse {
private static final long serialVersionUID = 1L;
/**
* Unique identifier of the result.
*/
//private String id;
//private Date timestamp;
//private String lang;
//private String sessionId;
//private float score;
//private List<AIOutputContext> contexts;
public Integer statusCode;
public String action;
public String source;
public HashMap<String, JsonElement> parameters;
public String resolvedQuery;
public boolean actionIncomplete;
public String intentName;
public String speech;
public List<ResponseMessage> messages;
public String displayText;
//private Map<String, JsonElement> data;
@Override
public String toString() {
return "MeanResponse{" +
"statusCode=" + statusCode +
", action='" + action + '\'' +
", source='" + source + '\'' +
", parameters=" + parameters +
", resolvedQuery='" + resolvedQuery + '\'' +
", actionIncomplete=" + actionIncomplete +
", intentName='" + intentName + '\'' +
", speech='" + speech + '\'' +
", messages=" + messages +
", displayText='" + displayText + '\'' +
'}';
}
}
|
448230ddc0eb44a96d306834d9bc57273976b0de
|
[
"Markdown",
"Java"
] | 6
|
Java
|
naoned-makers/im-vui
|
d914a3d6af626003f724f7f0eec8b78d6e07f233
|
331bcb04b2bc39345000c272ea8d2247206b5eb0
|
refs/heads/main
|
<file_sep>#!/bin/bash
#SBATCH --job-name=v10032bench
#SBATCH --array=1-7
#SBATCH --mem=64G
#SBATCH --gres=gpu:V100:1
#SBATCH --time=00:30:00
#SBATCH --output=./v100_32GB_benchmark_results/slurm%j.out
#SBATCH --error=slurm%j.err
#SBATCH --partition=m3g
#SBATCH --constraint="V100-32G"
############################ EDIT YOUR FILE PATH HERE ###################################
# go to benchmarking folder
# cd path/to/ARDC-ML/ai-benchmark
# module load CUDA and CUDNN
module load cuda/11.0
module load cudnn/8.0.5-cuda11
# activate your virtual environment
source benchmark_venv/bin/activate
# install TensorFlow and ai-benchmark
pip install tensorflow-gpu
pip install ai-benchmark
# make folder to store results
mkdir -p v100_32GB_benchmark_results
echo "now processing task id:: " ${SLURM_ARRAY_TASK_ID}
python benchmark.py &> v100_32GB_benchmark_results/v100_32GB_benchmark_${SLURM_ARRAY_TASK_ID}.txt
<file_sep># Instructions to run this example job
This contains an example Python file to test the job monitoring script with. The Python file is taken from the Data Fluency Introduction to Deep Learning and Tensorflow course located here: https://github.com/MonashDataFluency/intro-to-tensorflow. It is a simple Tensorflow script which trains a convolutional neural network (CNN) to classify the MNIST dataset.
This example assumes you have a conda environment with tensorflow-gpu installed, and that the normal commands to run the job would be:
```
cd path/to/your/Intro_to_CNNs.py
source path/to/conda/bin/activate
conda activate CNN_venv
python Intro_to_CNNs.py
```
Edit the job monitoring script per the instructions on the other page to reflect this, or to reflect the method you would usually use to run a Python script.
Included in this repo is an example filtered log file `nvidia-filtered.log` from monitoring this job so you can see how it looks in the Jupyter notebook.
You'll still need to edit the notebook filepath to this logfile, and update where the plots are saved.
<file_sep># ai-benchmark for GPUs on M3
As part of the ongoing [ARDC Environments to Accelerate Machine Learning-Based Discovery project](https://ardc.edu.au/project/environments-to-accelerate-machine-learning-based-discovery/), we are working to better understand the hardware available to researchers who use M3. This work has been used to create documentation about the [GPUs on MASSIVE M3](https://docs.massive.org.au/M3/GPUs-on-M3.html) which will inform researchers how to select the right hardware for their needs. We have used [ai-benchmark](https://pypi.org/project/ai-benchmark/) to gather data about how the [GPUs available on M3](https://docs.massive.org.au/M3/m3users.html) perform across machine learning related activities.
This folder contains the scripts used to run ai-benchmark across our GPUs, including:
- Tesla V100-PCIE-16GB
- Tesla V100-PCIE-32GB
- Tesla P100-PCIE-16GB
- Tesla K80
- Tesla P4
ai-benchmark is an open source Python library which evaluates hardware across a range of AI activities, and provides a score for each individual activity, as well as a final;
- Device Inference Score;
- Device Training Score, and;
- Device AI score.
You can compare these results to the ai-benchmark ranking here: http://ai-benchmark.com/ranking_deeplearning.html.
## Information about the scripts
Each script provided here runs the benchmark 7 times per GPU so any variance can be observed. These scripts are written as [batch jobs](https://docs.massive.org.au/M3/slurm/simple-batch-jobs.html#running-simple-batch-jobs) that once submitted, will be queued and execute when there are sufficient resources available.
The P4 and K80 GPUs differ, as they are used exclusively for desktops. The best way to benchmark these is interactively through the [CvL desktop](https://www.cvl.org.au/cvl-desktop/getting-started-with-the-cvl). There is an instruction file on how to run these benchmarks too, called ```p4_instructions.txt``` and ```k80_instructions.txt```.
Each benchmark should take approximately 20 minutes to complete.
## Instructions to run the benchmarks
1. Clone the git repository and then navigate to ai-benchmark
```
$ git clone repository
$ git pull
$ cd path/to/ARDC-ML/ai-benchmark
```
2. Set up a Python virtual environment so you will be able to install ai-benchmark
```
$ /usr/local/python/3.8.7-static/bin/python3 -m venv benchmark_venv
```
3. Edit the relevant GPU file with your preferred text editor and ensure the path is set to your benchmark directory. The section to edit is clearly marked in the files. For example:
```nano gpu_benchmark.sh```
```
############################ EDIT YOUR FILE PATH HERE ###################################
# go to benchmarking folder
# cd path/to/ARDC-ML/ai-benchmark
```
4. Submit the benchmark using sbatch
```
sbatch gpu_benchmark.sh
```
This will then run the benchmarks and save the outputs in ```path/to/ARDC-ML/ai-benchmark/gpu_benchmark_results. ```
<file_sep>#!/bin/bash
# A script to monitor gpu and cpu usage for arbitrary commands (Python flavour?)
# Define BASH functions for monitoring
outputcputime () {
while true
do
getcputime | ts %.s >> cputime-$LOGDATE.log
sleep 1
done
}
getcputime () {
local proc="python"
local clk_tck=$(getconf CLK_TCK)
local usercputime=0
local syscputime=0
local pids=$(pgrep $proc)
for pid in $pids;
do
local stats=$(cat "/proc/$pid/stat")
local statarr=($stats)
local utime=${statarr[13]}
local stime=${statarr[14]}
usercputime=$(bc <<< "scale=3; $usercputime + $utime / $clk_tck")
syscputime=$(bc <<< "scale=3; $syscputime + $stime / $clk_tck")
done
echo $usercputime $syscputime
}
# Setup variables
LOGDATE=`date +%s`
# Set up your environment here
# ###############################################################
cd path/to/job-monitoring/CNN_example
source path/to/conda/bin/activate
conda activate CNN
# ################################################################
# Export environment to file for determining running parameters
env > environment-$LOGDATE.log
# Insert the command you want to monitor here, and add an “&” to the end of it
# ###############################################################
python path/to/job-monitoring/CNN_example/Intro_to_CNNs.py
# ###############################################################
# Monitoring the job
PID1=$!
echo $PID1
echo Running program PID: $PID1;
PYTHONPID=`pgrep python |sed 's/ /,/g'`
printf "PythonPIDs=",$PYTHONPID\\n
# Record gpu usage for node
nvidia-smi dmon -s umtp -i 0 -d 1 | ts %.s > nvidia-$LOGDATE.log &
# Loop to record user and sys cpu times from proc
outputcputime &
echo Job Monitoring Complete
<file_sep>from ai_benchmark import AIBenchmark
results = AIBenchmark().run()
<file_sep>#!/bin/bash
#SBATCH --job-name=p100benchmark
#SBATCH --array=1-7
#SBATCH --mem=64G
#SBATCH --gres=gpu:1
#SBATCH --time=00:30:00
#SBATCH --output=./p100_benchmark_results/slurm%j.out
#SBATCH --error=slurm%j.err
#SBATCH --partition=m3h
############################ EDIT YOUR FILE PATH HERE ###################################
# go to benchmarking folder
# cd path/to/ARDC-ML/ai-benchmark
# module load CUDA and CUDNN
module load cuda/11.0
module load cudnn/8.0.5-cuda11
# activate your virtual environment
source benchmark_venv/bin/activate
# install TensorFlow and ai-benchmark
pip install tensorflow-gpu
pip install ai-benchmark
# make folder to store results
mkdir -p p100_benchmark_results
echo "now processing task id:: " ${SLURM_ARRAY_TASK_ID}
python benchmark.py &> p100_benchmark_results/p100_benchmark_${SLURM_ARRAY_TASK_ID}.txt
<file_sep># Job Monitoring on M3.
Note: This documentation is under active development, meaning that it can change over time as we refine it. Please email <EMAIL> if you require assistance, or have suggestions to improve this documentation.
This folder includes templates required to monitor a Python job on MASSIVE, and a Jupyter notebook to analyse the outputs.
It is important to note the job monitoring template provided here is set up specifically for Python jobs - if you require assisstance editing it to be appropriate for other jobs please email us at <EMAIL>.
This also assumes you are using a desktop or smux session while monitoring.
## Instructions to monitor your own job
Having visibility over how your job runs can help you understand if your job is spending a lot of energy moving files around, or if it’s using the GPUs to their fullest capacity. This job monitoring script will allow you to gather metrics about your job, and then examine the outputs in a Jupyter notebook.
There are three steps to this process.
1. Editing the job monitoring file.
2. Running the job monitoring file, and filtering the output.
3. Editing the Jupyter notebook to visualise the results.
## Step One: Monitoring the Job
In order to monitor your job, you'll need to make two edits to `job_monitoring_template.bash` using your favourite text editor.
The first section to edit is where you are setting up the environment to run your job. For example, this might include performing module loads, or activating virtual environments. Anything that needs to happen before your job runs should be put here.
```
# Set up your environment
# ###############################################################
# Insert your environment set up commands here
# module load software
# cd to/your/directory
# ################################################################
```
The second section to edit is where you actually insert the command you would usually use to run your job. You must ensure you add an "&" to the end of your command.
```
# Insert the command you want to monitor here, and add an “&” to the end of it
# ###############################################################
python your/python/job &
# ###############################################################
```
Once you have made these edits, save the job monitoring file.
## Step Two: Running the job monitoring script, and filtering the output
Once you have your job monitoring script, you'll want to run it.
```
./job_monitoring_template.bash
```
It will output a logfile called `nvidia-$LOGDATE.log`.
In order to make this logfile readable to pandas in the Jupyter notebook, you'll need to filter the data with the following command:
```
export DATE=$LOGDATE; cat nvidia-$DATE.log | grep -v 'gpu\|Idx' > nvidia-$DATE-filtered.log
```
This creates a file called nvidia-$DATE-filtered.log which you will use in the Jupyter notebook.
## Step Three: Visualising results in the Jupyter Notebook
Once you open `gpu-usage.ipynb`, there are a few things you will need to edit before running the cells.
Firstly, you need to edit
```
logfile = 'your/logfile/nvidia-$DATE-filtered.log'
```
so the notebook accesses the filtered logfile.
Secondly, in all of the plots there is a line which will save them. Edit this to reflect the directory you would like your plots saved to.
```
# Edit with your filepath
matplotlib.pyplot.savefig('your/place/to/save/plots' + filename, dpi = (300), facecolour=fig.get_facecolor())
```
Once you've made these edits, you should be able to run the cells and see some plots.
More information about what these plots represent is to be added to this documentation shortly.
<file_sep># ARDC-ML
This respository is a place to store files related to the [ARDC Environments to Accelerate Machine Learning-Based Discovery project](https://ardc.edu.au/project/environments-to-accelerate-machine-learning-based-discovery/), a collaboration between Monash University, University of Queensland, QCIF, NCI, and the Pawsey Supercomputing Center, motivated by increasing research needs for resources to perform machine learning related activities.
<file_sep>"""This Python script is taken from Introduction to Deep Learning and Tensorflow, part 2. This is training a basic Convolutional Neural Network for MNIST classification."""
# Load all libraries and helper functions
import tensorflow as tf
import numpy as np
import sys
import pandas as pd
import os
import urllib
print("TensorFlow version %s is loaded." % tf.__version__)
print("Num GPUs Available: ", len(tf.config.experimental.list_physical_devices('GPU')))
# Load the mnist dataset built into TensorFlow
mnist = tf.keras.datasets.mnist
(inputs_train, labels_train), (inputs_test, labels_test) = mnist.load_data()
# Squash the training data to values that range between 0 and 1.
inputs_train, inputs_test = inputs_train / 255.0, inputs_test / 255.0
# TensorFlow networks take inputs with shape (number of samples, height, width, channels)
# The current shape for the inputs are (number of samples, height, width)
# We therefore need to add an extra dimension at the end
inputs_train = np.expand_dims(inputs_train, axis=-1)
inputs_test = np.expand_dims(inputs_test, axis=-1)
# Print shapes of all our data as a sanity check
print('inputs_train shape: ',inputs_train.shape)
print('labels_train shape: ',labels_train.shape)
print('inputs_test shape: ',inputs_test.shape)
print('labels_test shape: ',labels_test.shape)
# Use tf.data to batch and shuffle the dataset:
train_ds = tf.data.Dataset.from_tensor_slices((inputs_train, labels_train)).shuffle(10000).batch(32)
test_ds = tf.data.Dataset.from_tensor_slices((inputs_test, labels_test)).batch(32)
# Example of a very simple Conv Net
class MyConvNet(tf.keras.Model):
def __init__(self):
# This function runs whenever you create an instance of your model.
# Since it only runs once, you should initialise all your trainable variables here. Basically any layer that contains "weights" in your model.
super(MyConvNet, self).__init__()
self.conv1 = tf.keras.layers.Conv2D(32, 3, activation='relu')
self.flatten = tf.keras.layers.Flatten()
self.dense1 = tf.keras.layers.Dense(64, activation='relu')
self.dense2 = tf.keras.layers.Dense(10)
def call(self, x):
# This function runs every time you call the model: output = model(input).
# Since it runs frequently, you should NOT initialise any trainable variables here.
# If you do so, those weights will be re-initialised at every call, hence defeating the purpose of the training process.
x = self.conv1(x)
x = self.flatten(x)
x = self.dense1(x)
return self.dense2(x)
# Create an instance of the model
model = MyConvNet()
# We use SparseCategoricalCrossentropy as the loss function and Adam as the gradient descent function.
loss_function = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
optimizer = tf.keras.optimizers.Adam(learning_rate=1e-7)
# We create the x_loss and x_accuracy objects to help keep track of model performance during training.
# The loss and accuracy values at each step of the training process are aggregated in the objects and can be printed out at the end of each training epoch.
train_loss = tf.keras.metrics.Mean(name='train_loss')
train_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='train_accuracy')
test_loss = tf.keras.metrics.Mean(name='test_loss')
test_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='test_accuracy')
# The function train_step executes the forward and backward propagation of a single batch of images during the training process.
# As the TensorFlow documentation puts it, when you annotate a function with tf.function, you can still call it like any other function.
# But it will be compiled into a graph, which means you get the benefits of faster execution, running on GPU or TPU, or exporting to SavedModel.
@tf.function
def train_step(images, labels):
with tf.GradientTape() as tape:
# training=True is only needed if there are layers with different behavior during training versus inference (e.g. Dropout).
# It is best to include it if you are ever unsure. True during training, False during validation / testing / inference.
predictions = model(images, training=True)
train_step_loss = loss_function(labels, predictions)
# Determine the gradients for each trainable variable (weights) based on the loss function
gradients = tape.gradient(train_step_loss, model.trainable_variables)
# Apply the optimiser to the gradients to perform gradient descent
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
train_loss(train_step_loss)
train_accuracy(labels, predictions)
# The function test_step executes the inference (forward propagation) of a single batch of testing image.
def test_step(images, labels, confusion_matrix):
# training=True is only needed if there are layers with different behavior during training versus inference (e.g. Dropout).
# It is best to include it if you are ever unsure. True during training, False during validation / testing / inference.
predictions = model(images, training=False)
test_step_loss = loss_function(labels, predictions)
test_loss(test_step_loss)
test_accuracy(labels, predictions)
pred_class = np.argmax(predictions.numpy(), axis=-1)
for i in range(labels.shape[0]):
confusion_matrix[labels[i],pred_class[i]] += 1
max_epochs = 20
for epoch in range(max_epochs):
# Reset the metrics at the start of the next epoch
train_loss.reset_states()
train_accuracy.reset_states()
test_loss.reset_states()
test_accuracy.reset_states()
confusion_matrix = np.zeros((10,10))
# Perform training across the entire train set
for inputs, labels in train_ds:
train_step(inputs, labels)
# Perform testing across the entire test set
for test_inputs, test_labels in test_ds:
test_step(test_inputs, test_labels, confusion_matrix)
template = 'Epoch {}, Loss: {}, Accuracy: {}, Test Loss: {}, Test Accuracy: {}'
print(template.format(epoch+1,
train_loss.result(),
train_accuracy.result()*100,
test_loss.result(),
test_accuracy.result()*100))
print('Confusion matrix: rows represent labels, columns represent predictions')
print(np.asarray(confusion_matrix,np.int32))
|
7de7bc4f1bfa8d3d7200c19957e46ca35c378bf4
|
[
"Markdown",
"Python",
"Shell"
] | 9
|
Shell
|
kiowa-scott-hurley/ARDC-ML
|
af3549f02648ff616455af8f84d0e4370998e0d2
|
08a68f83be7e602274cb4770fab8dc436beb4dab
|
refs/heads/master
|
<file_sep>"use strict";
exports.__esModule = true;
exports.vaLittle = vaLittle;
function vaLittle() {
var _this = this;
// rules
this.required = function(value) {
return !value.trim();
};
this.email = function(value) {
return value === "" ||
value.match(
/^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/
)
? false
: true;
};
this.phone = function(value) {
return value === "" || value.match(/^(?:\(?\?)?(?:[-\.\(\)\s]*(\d)){9}\)?$/)
? false
: true;
};
this.postCode = function(value) {
return value === "" || value.match(/^([0-9]{2}\-[0-9]{3})$/) ? false : true;
};
this.requireGroupState = {};
this.requireGroup = function(value, group) {
if (
value.trim() &&
value !== false &&
!_this.requireGroupState.hasOwnProperty(group)
) {
_this.requireGroupState[group] = false;
}
return false;
};
this.equalGroupState = {};
this.equalGroup = function(value, group) {
if (
value.trim() &&
value !== false &&
!_this.equalGroupState.hasOwnProperty(group)
) {
_this.equalGroupState[group] = value;
} else if (
value.trim() &&
value !== false &&
value !== _this.equalGroupState[group]
) {
delete _this.equalGroupState[group];
}
return false;
};
this.min = function(value, val) {
return value === "" || value.length >= val ? false : true;
};
this.max = function(value, val) {
return value === "" || value.length <= val ? false : true;
};
this.minVal = function(value, val) {
return value === "" || value >= val ? false : true;
};
this.maxVal = function(value, val) {
return value === "" || value <= val ? false : true;
};
this.number = function(value) {
return value === "" || value.match(/^([0-9 -]+)$/) ? false : true;
};
this.text = function(value) {
return value === "" || value.match(/^([a-zA-Z _-]+)$/) ? false : true;
};
this.regex = function(value, val) {
return value === "" || value.match(new RegExp(val)) ? false : true;
}; // check
this.rules = {};
this.messages = {};
this.prepareResults = function() {
for (var r in _this.results) {
if (!Object.keys(_this.results[r]).indexOf("requireGroup")) {
var g = _this.requireGroupState[_this.rules[r].requireGroup];
_this.results[r].requireGroup =
g === true || g === undefined ? true : false;
}
if (
Object.keys(_this.results[r]).indexOf("equalGroup") > -1 &&
!_this.equalGroupState[_this.rules[r].equalGroup]
) {
var _g = _this.equalGroupState[_this.rules[r].equalGroup];
_this.results[r].equalGroup = _g === undefined ? true : false;
}
var _e = Object.values(_this.results[r]).indexOf(true);
_this.results[r].errors = _e == -1 ? false : true;
_this.results[r].message =
_e == -1 ? false : _this.messages[r][Object.keys(_this.rules[r])[_e]];
}
var e = Object.keys(_this.results)
.map(function(e) {
return _this.results[e].errors;
})
.indexOf(true);
_this.results.errors = e !== -1 ? true : false;
};
this.check = function(data) {
var t = _this;
_this.results = {};
var _loop = function _loop(r) {
_this.results[r] = {};
error = false;
Object.keys(t.rules[r]).forEach(function(rule) {
var v = t.rules[r][rule];
if (v === true) {
var error = _this[rule](data[r]);
} else if (v !== false) {
var error = _this[rule](data[r], v);
}
_this.results[r][rule] = error;
});
};
for (var r in t.rules) {
var error;
_loop(r);
}
_this.prepareResults();
_this.requireGroupState = {};
return _this.results;
};
}
<file_sep>export function vaLittle(){
// rules
this.required = value => {
return !value.trim();
}
this.email = value => {
return value === '' || value.match(/^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/) ? false : true;
}
this.phone = value => {
return value === '' || value.match(/^(?:\(?\?)?(?:[-\.\(\)\s]*(\d)){9}\)?$/) ? false : true;
}
this.postCode = value => {
return value === '' || value.match(/^([0-9]{2}\-[0-9]{3})$/) ? false : true;
}
this.requireGroupState = {};
this.requireGroup = (value,group) => {
if (value.trim() && value !== false && !this.requireGroupState.hasOwnProperty(group)) {
this.requireGroupState[group] = false;
}
return false;
}
this.equalGroupState = {};
this.equalGroup = (value,group) => {
if (value.trim() && value !== false && !this.equalGroupState.hasOwnProperty(group)) {
this.equalGroupState[group] = value;
}else if (value.trim() && value !== false && value !== this.equalGroupState[group]) {
delete this.equalGroupState[group];
}
return false;
}
this.min = (value,val) => {
return value === '' || value.length >= val ? false : true;
}
this.max = (value,val) => {
return value === '' || value.length <= val ? false : true;
}
this.minVal = (value,val) => {
return value === '' || value >= val ? false : true;
}
this.maxVal = (value,val) => {
return value === '' || value <= val ? false : true;
}
this.number = value => {
return value === '' || value.match(/^([0-9 -]+)$/) ? false : true;
}
this.text = value => {
return value === '' || value.match(/^([a-zA-Z _-]+)$/) ? false : true;
}
this.regex = (value,val) => {
return value === '' || value.match(new RegExp(val)) ? false : true;
}
// check
this.rules = {};
this.messages = {};
this.prepareResults = () => {
for(let r in this.results){
if (!Object.keys(this.results[r]).indexOf('requireGroup')) {
let g = this.requireGroupState[this.rules[r].requireGroup];
this.results[r].requireGroup = g === true || g === undefined ? true : false;
}
if (Object.keys(this.results[r]).indexOf('equalGroup') > -1 && !this.equalGroupState[this.rules[r].equalGroup]) {
let g = this.equalGroupState[this.rules[r].equalGroup];
this.results[r].equalGroup = g === undefined ? true : false;
}
let e = Object.values(this.results[r]).indexOf(true);
this.results[r].errors = e == -1 ? false : true;
this.results[r].message = e == -1 ? false : this.messages[r][Object.keys(this.rules[r])[e]];
}
let e = Object.keys(this.results).map(e => { return this.results[e].errors; }).indexOf(true);
this.results.errors = e !== -1 ? true : false;
}
this.check = data => {
var t = this;
this.results = {}
for (let r in t.rules) {
this.results[r] = {};
var error = false;
Object.keys(t.rules[r]).forEach((rule) => {
let v = t.rules[r][rule];
if (v === true) {
var error = this[rule](data[r]);
}else if (v !== false) {
var error = this[rule](data[r],v);
}
this.results[r][rule] = error;
})
}
this.prepareResults();
this.requireGroupState = {};
return this.results;
}
}
<file_sep># vaLittle
Lightweight validation plugin in pure js
## Example of ussage
```
import { vaLittle } from "./scripts/vaLittle";
(function() {
// Create new validation
const form = new vaLittle;
// Set rules
form.rules = {
name:{
required:true,
text:true
}
};
// Set messages
form.messages = {
name:{
required:"Pole wymagane",
text:"Dozwolone jedynie małe i wielkie litery"
}
}
// Example form data - field_name:'value'
const ContactFormData = {
name:'test123'
};
// Check data return object
console.log(form.check(ContactFormData));
})();
```
## Return example
```
field_name:{
email: false, // is error in that rule?
requireGroup: true, // is error in that rule?
message: "Podaj e-mail lub telefon", // error message
errors: true // if error
},
errors:true // has any field error?
```
## Rules
### Custom Regex
As param set custom regular expession
```
regex:'/^([a-zA-Z _-]+)$/'
```
### Text only
Accepts only upper and lower letters
```
text:true
```
### Numbers Only
Accepts only numbers
```
number:true
```
### Max value
Set max number value
```
maxVal:5
```
### Min value
Set min number value
```
minVal:5
```
### Min length
Set min field length
```
min:5
```
### Max length
Set max field length
```
max:5
```
### Require
Requires value
```
require:true
```
### Require from group
Requires value from one or more inputs in group
```
requireGroup:'group_name'
```
### Post Code
Requires a post code format XX-XXX
```
postCode:true
```
### Phone
Requires a valid phone
```
phone:true
```
### E-mail
Requires a valid e-mail
```
email:true
```
|
616aeb3680356fc30ab473d510b6c4a787618f51
|
[
"JavaScript",
"Markdown"
] | 3
|
JavaScript
|
jpolskicom/vaLittle
|
bb5285f56094a714da526dbcda41669435864235
|
6cc8b5a1d8c85680ead3d2b6a38d9406f4fee18e
|
refs/heads/master
|
<repo_name>kasu207/tth-techdegree-project6<file_sep>/README.md
# tth-techdegree-project6
Extended:
1# Package-json
## added express
## pug
2# Error Page
## Added error.pug with individual style
3# CSS
## changed portfolio-me : background color
## changed portfolio-e a:hover: to a brighter color
## changed heading font: to 'Roboto'<file_sep>/app.js
const express = require('express');
const app = express();
app.use('/static', express.static('public'));
app.set('view engine', 'pug');
//routes
const routes = require('./routes/index');
app.use('/', routes);
//Errors HANDLERS
// 404 Error handler *
app.use((req, res, next) => {
const err = new Error('ERROR: 404 - I am sorry, the page you\'ve requested, could not be found');
err.status = 404;
next(err);
});
/* Global error Handler */
app.use((err, req, res, next) => {
if (err.status === 404) {
res.status(err.status || 404 );
err.message = err.message;
res.locals.error = err;
console.log(err.message);
return res.render('page-not-found', { err });
} else if (err.status === 500 ) {
res.status(err.status || 500);
err.message = err.message || "Looks like there was a problem on the server";
res.locals.error = err;
console.log(err.message);
res.render('error', { err });
}
});
//localhost Path
app.listen(3000, () => {
console.log('The application is running on localhost:3000!')
});
|
edacd96cf14f27c17c6fa355914e65d186d21c05
|
[
"Markdown",
"JavaScript"
] | 2
|
Markdown
|
kasu207/tth-techdegree-project6
|
8f71d33dbf228904506668115b621c77f312b281
|
6a2fbfdc3f2249ab0712a757b81931bfc5d03f0e
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.