python_code stringlengths 0 4.04M | repo_name stringlengths 7 58 | file_path stringlengths 5 147 |
|---|---|---|
# -*- coding: utf-8 -*-
# file: d3mds.py
# lab: MIT Lincoln Lab
# author(s): sw26425
# description: a rudimentary API for interacting with D3MDataSupply, which mainly consists of a Dataset and a Problem
import os, json
import pandas as pd
import numpy as np
import warnings
DATASET_SCHEMA_VERSION = '3.0'
PROBLEM_SCHEM... | d3m-model-search-master | test_data/test_cases_only/534_cps_85_wages/534_cps_85_wages_solution/modules/d3mds.py |
import os, sys, json
import pandas as pd
from sklearn.pipeline import Pipeline
from sklearn.linear_model import SGDClassifier
from sklearn.metrics import f1_score, mean_squared_error
here = os.path.dirname(os.path.abspath(__file__))
from d3mds import D3MDataset, D3MProblem, D3MDS
from feature_extraction import *
from... | d3m-model-search-master | test_data/test_cases_only/534_cps_85_wages/534_cps_85_wages_solution/modules/pipeline.py |
from abc import ABC, abstractmethod
from collections import OrderedDict
import numpy as np
from numpy import ndarray
from scipy.sparse import csr_matrix
from pandas import DataFrame
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.feature_selection.base import SelectorMixin
# https://stackoverflow... | d3m-model-search-master | test_data/test_cases_only/534_cps_85_wages/534_cps_85_wages_solution/modules/base.py |
d3m-model-search-master | Stanford-D3M-Full/__init__.py | |
import sys
import logging
import time
import random
import os
from concurrent import futures
import multiprocessing
import os
import json
import copy
import random
import grpc
import time
import json
from multiprocessing import pool, context
from google.protobuf import json_format
import ta3ta2.api.core_pb2 as core_pb... | d3m-model-search-master | Stanford-D3M-Full/ta3ta2/server.py |
d3m-model-search-master | Stanford-D3M-Full/ta3ta2/__init__.py | |
import time
import sys
import json
import os
from multiprocessing import pool, context
from google.protobuf import json_format
import grpc
import ta3ta2.api.core_pb2_grpc as core_pb2_grpc
import ta3ta2.api.core_pb2 as core_pb2
import ta3ta2.test_server_messages as msg
import ta3ta2.api.utils as ta3ta2utils
from d3m.m... | d3m-model-search-master | Stanford-D3M-Full/ta3ta2/test_server.py |
"""
Assembles all the protobuf messages needed to
fulfill API contracts.
"""
import ta3ta2.api.core_pb2 as core_pb2
import ta3ta2.api.value_pb2 as value_pb2
import ta3ta2.api.problem_pb2 as problem_pb2
def GetHello():
msg = core_pb2.HelloRequest()
return msg
def GetProblemDescription():
msg = problem_pb2... | d3m-model-search-master | Stanford-D3M-Full/ta3ta2/test_server_messages.py |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
| d3m-model-search-master | Stanford-D3M-Full/ta3ta2/api/pipeline_pb2_grpc.py |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
| d3m-model-search-master | Stanford-D3M-Full/ta3ta2/api/problem_pb2_grpc.py |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: pipeline.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _... | d3m-model-search-master | Stanford-D3M-Full/ta3ta2/api/pipeline_pb2.py |
d3m-model-search-master | Stanford-D3M-Full/ta3ta2/api/__init__.py | |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
import ta3ta2.api.core_pb2 as core__pb2
class CoreStub(object):
"""See each message's comments for information about each particular call.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Cha... | d3m-model-search-master | Stanford-D3M-Full/ta3ta2/api/core_pb2_grpc.py |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: primitive.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _ref... | d3m-model-search-master | Stanford-D3M-Full/ta3ta2/api/primitive_pb2.py |
"""
We represent values in this module at three levels:
* A GRPC ``Value`` message.
* An *intermediate level* with a dict with ``type`` and ``value`` fields
where ``type`` can be one of the ``object``, ``dataset_uri``, ``csv_uri``,
``pickle_uri``, ``plasma_id``, ``error``. All values except for ``object``
type a... | d3m-model-search-master | Stanford-D3M-Full/ta3ta2/api/utils.py |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
| d3m-model-search-master | Stanford-D3M-Full/ta3ta2/api/primitive_pb2_grpc.py |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: core.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _mess... | d3m-model-search-master | Stanford-D3M-Full/ta3ta2/api/core_pb2.py |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: problem.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _m... | d3m-model-search-master | Stanford-D3M-Full/ta3ta2/api/problem_pb2.py |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
| d3m-model-search-master | Stanford-D3M-Full/ta3ta2/api/value_pb2_grpc.py |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: value.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _mes... | d3m-model-search-master | Stanford-D3M-Full/ta3ta2/api/value_pb2.py |
# Description
# -----------------
# This file trains/tests a LSTM that predicts the performance of a pipeline.
#
# Training:
# One can train a new LSTM by supplying a data-set of pipelines and their respective performance scores.
# The data can be collected by running apps/basic_run.py with 'save_all_scored_pipeli... | d3m-model-search-master | Stanford-D3M-Full/experimental/lstm_predictor/LSTMMetaLearnerApp.py |
from keras.models import Sequential, load_model
from keras.layers.embeddings import Embedding
from keras.layers import Dense, LSTM, BatchNormalization, Activation
from keras import backend
from keras import regularizers
from keras import optimizers
from keras.preprocessing import sequence
import numpy as np
import ti... | d3m-model-search-master | Stanford-D3M-Full/experimental/lstm_predictor/LSTMMetaLearner.py |
import numpy as np
import pickle
import json
from experimental.lstm_predictor.LSTMConstants import MAX_NUM_CONV_LAYERS
from experimental.lstm_predictor.LSTMUtils import get_data_from_json, get_model_path
'''
This file loads an lstm model and filters a given list of data-loading pipelines
to predict which pipel... | d3m-model-search-master | Stanford-D3M-Full/experimental/lstm_predictor/LSTMPredictWorker.py |
import json
from sklearn.model_selection import train_test_split
def get_data_from_json(pipeline_jsons):
pipeline_data = []
pipeline_rank = []
for pipeline_json in pipeline_jsons:
pipeline = json.loads(pipeline_json)
stepIds = [ step["primitive"]["python_path"] for step in pipeline["st... | d3m-model-search-master | Stanford-D3M-Full/experimental/lstm_predictor/LSTMUtils.py |
MAX_NUM_CONV_LAYERS=15
EMBEDDING_SIZE=40
LSTM_DIM = 100
DENSE_LAYER_DIMS = [100, 10, 1]
BN_AXIS = 1
BATCH_SIZE = 32
| d3m-model-search-master | Stanford-D3M-Full/experimental/lstm_predictor/LSTMConstants.py |
"""Run all tests with filenames beginning with "test*" inside module"""
import sys
import unittest
if __name__ == '__main__':
print("run_tests.py running all tests...")
test_suite = unittest.defaultTestLoader.discover('.', 'test*py')
test_runner = unittest.TextTestRunner(resultclass=unittest.TextTestResul... | d3m-model-search-master | Stanford-D3M-Full/tests/run_tests.py |
d3m-model-search-master | Stanford-D3M-Full/tests/__init__.py | |
d3m-model-search-master | Stanford-D3M-Full/tests/unit_tests/__init__.py | |
d3m-model-search-master | Stanford-D3M-Full/tests/unit_tests/ta3ta2/__init__.py | |
import unittest
import os
import utils.utils
import utils.primitive_pipeline_utils
import utils.train_utils
import shutil
import pdb
import numpy as np
class UtilsTest(unittest.TestCase):
def test_get_global_score(self):
# Load baseball
data_uri = utils.utils.get_git_root(os.path.dirname(os.p... | d3m-model-search-master | Stanford-D3M-Full/tests/unit_tests/utils/test_utils.py |
import unittest
import utils.train_utils
import utils.utils
import utils.primitive_pipeline_utils
import random
import numpy as np
import os
import pdb
class TrainUtilsTest(unittest.TestCase):
def test_extract_labels(self):
random.seed(0)
np.random.seed(0)
# Load sample data
... | d3m-model-search-master | Stanford-D3M-Full/tests/unit_tests/utils/test_train_utils.py |
d3m-model-search-master | Stanford-D3M-Full/tests/unit_tests/utils/__init__.py | |
import unittest
import os
import random
import utils.primitive_pipeline_utils
import utils.utils
import utils.train_utils
import numpy as np
from d3m.metadata import problem
class PrimitivePipelineUtilsTest(unittest.TestCase):
def test_simple_sklearn_pipeliner(self):
np.random.seed(0)
random.see... | d3m-model-search-master | Stanford-D3M-Full/tests/unit_tests/utils/test_primitive_pipeline_utils.py |
import os
import unittest
import utils.utils
import utils.train_utils
import executors.HyperbandExecutor
import executors.Executor
from multiprocessing import Process, Queue, JoinableQueue
from queue import Empty
def get_data_loading_pipelines_override():
return [
[
{
"stage_nam... | d3m-model-search-master | Stanford-D3M-Full/tests/unit_tests/executor/test_HyperbandExecutor.py |
import os
import unittest
import utils.utils
import utils.train_utils
import executors.SklearnStackedLSTMExecutor
import executors.Executor
from multiprocessing import Process, Queue, JoinableQueue
from queue import Empty
def get_data_loading_pipelines_override():
return [
[
{
"... | d3m-model-search-master | Stanford-D3M-Full/tests/unit_tests/executor/test_SklearnStackedLSTMExecutor.py |
d3m-model-search-master | Stanford-D3M-Full/tests/unit_tests/executor/__init__.py | |
import os
import unittest
import utils.utils
import utils.train_utils
import executors.SimpleRandomSklearnExecutor
import executors.Executor
from multiprocessing import Process, Queue, JoinableQueue
from queue import Empty
def get_data_loading_pipelines_override():
return [
[
{
... | d3m-model-search-master | Stanford-D3M-Full/tests/unit_tests/executor/test_SimpleRandomSklearnExecutor.py |
import os
import unittest
import shutil
import utils.utils
import utils.train_utils
import executors.NistSaverExecutor
import executors.Executor
from multiprocessing import Process, Queue, JoinableQueue
from queue import Empty
class SimpleRandomSklearnExecutorTest(unittest.TestCase):
def test_saver_executor(self... | d3m-model-search-master | Stanford-D3M-Full/tests/unit_tests/executor/test_NistSaverExecutor.py |
d3m-model-search-master | Stanford-D3M-Full/tests/unit_tests/apps/__init__.py | |
d3m-model-search-master | Stanford-D3M-Full/tests/integration_tests/__init__.py | |
d3m-model-search-master | Stanford-D3M-Full/tests/integration_tests/ta3ta2/__init__.py | |
d3m-model-search-master | Stanford-D3M-Full/tests/integration_tests/utils/__init__.py | |
d3m-model-search-master | Stanford-D3M-Full/tests/integration_tests/executor/__init__.py | |
d3m-model-search-master | Stanford-D3M-Full/tests/integration_tests/apps/__init__.py | |
"""
train_utils.py
------------------------------------
Contains utilities for:
- K-fold cross validation
- Scoring
- Extracting labels
"""
import sys
import glob
import pdb
import shutil
import json
import pandas
import numpy as np
import os
import copy
import traceback
import time
from d3m import metadata
from d3m.... | d3m-model-search-master | Stanford-D3M-Full/utils/train_utils.py |
d3m-model-search-master | Stanford-D3M-Full/utils/__init__.py | |
"""
pipeline_utils.py
------------------------------------
Contains utilities for:
- Saving and loading piplelines
- Constructing pipelines
- Running pipelines
- Loading primitives
"""
import sys
import glob
import pdb
import shutil
import uuid
import json
import random
import pandas
import numpy as np
import os
impor... | d3m-model-search-master | Stanford-D3M-Full/utils/primitive_pipeline_utils.py |
"""
utils.py
------------------------------------------------
Contains general utilities for:
- Loading d3m dataset, writing d3m predictions
- Dealing with problem docs, protos
- Running functions with timelimit
"""
import sys
import glob
import pdb
import shutil
import json
import pandas
import numpy as np
import os
... | d3m-model-search-master | Stanford-D3M-Full/utils/utils.py |
import utils.utils
import utils.primitive_pipeline_utils
import utils.train_utils
import executors.Executor
import executors.SimpleRandomSklearnExecutor
class ExtendedSklearnExecutor(executors.SimpleRandomSklearnExecutor.SimpleRandomSklearnExecutor):
"""
Executor that processes tuples of (problem_doc, dataset... | d3m-model-search-master | Stanford-D3M-Full/executors/ExtendedSklearnExecutor.py |
import utils.utils
import utils.primitive_pipeline_utils
import utils.train_utils
import executors.Executor
import executors.NistSaverExecutor
from d3m.metadata.pipeline import Pipeline, PrimitiveStep
from d3m.metadata.base import ArgumentType, Context
from d3m.runtime import Runtime
import uuid
import os
import sys
... | d3m-model-search-master | Stanford-D3M-Full/executors/ScoreFitProduceExecutor.py |
d3m-model-search-master | Stanford-D3M-Full/executors/__init__.py | |
import utils.utils
import utils.primitive_pipeline_utils
import utils.train_utils
import executors.Executor
class NistSaverExecutor(executors.Executor.Executor):
"""
Executor which saves (baseoutputdir, pipeline_json, score) to output.
"""
def process_item(self, input_item):
base_outputdir... | d3m-model-search-master | Stanford-D3M-Full/executors/NistSaverExecutor.py |
import utils.utils
import utils.primitive_pipeline_utils
import utils.train_utils
import executors.Executor
import executors.SimpleRandomSklearnExecutor
import numpy as np
import sys
import math
import random
from d3m.metadata.pipeline import Pipeline, PrimitiveStep
class HyperbandExecutor(executors.Executor.Execut... | d3m-model-search-master | Stanford-D3M-Full/executors/HyperbandExecutor.py |
import utils.utils
import utils.primitive_pipeline_utils
import utils.train_utils
import executors.Executor
from experimental.lstm_predictor.LSTMPredictWorker import LSTM_filter
import random
import json
MAX_TRIAL = 10
class SklearnStackedLSTMExecutor(executors.Executor.Executor):
"""
Executor that processes ... | d3m-model-search-master | Stanford-D3M-Full/executors/SklearnStackedLSTMExecutor.py |
import sys
import os
from multiprocessing import Process, Queue, JoinableQueue
from queue import Empty
import logging
import uuid
import d3m.utils
from d3m.utils import redirect_to_logging
QUEUE_TIMEOUT = 3
"""
The Executor class, a framework around a process to make
processing input items from a queue and returning... | d3m-model-search-master | Stanford-D3M-Full/executors/Executor.py |
import utils.utils
import utils.primitive_pipeline_utils
import utils.train_utils
import executors.Executor
class SimpleRandomSklearnExecutor(executors.Executor.Executor):
"""
Executor that processes tuples of (problem_doc, dataset) and writes
(pipelines, scores) to the results.
"""
def __init__(se... | d3m-model-search-master | Stanford-D3M-Full/executors/SimpleRandomSklearnExecutor.py |
import matplotlib.pyplot as plt
import numpy as np
import argparse
import json
'''
Given multiple benchmark data, compare them and plot the result
'''
if __name__ == "__main__":
benchmark_results = ["190225_lstm_sklearn_n_threads_1_tlimit_100.json", "190225_simple_sklearn_n_threads_1_tlimit_100.json"]
re... | d3m-model-search-master | Stanford-D3M-Full/apps/compare_benchmarks.py |
import time
import json
import sys
import os
import argparse
from multiprocessing import Process, Queue, JoinableQueue
from d3m.metadata.pipeline import Pipeline, PrimitiveStep
from d3m.metadata.base import ArgumentType, Context
from d3m.runtime import Runtime
import executors.ExtendedSklearnExecutor
import executors.... | d3m-model-search-master | Stanford-D3M-Full/apps/compute_score.py |
import time
import json
import sys
import os
import argparse
import copy
from multiprocessing import Process, Queue, JoinableQueue
import executors.ExtendedSklearnExecutor
import executors.SimpleRandomSklearnExecutor
import executors.HyperbandExecutor
import executors.NistSaverExecutor
import executors.SklearnStackedL... | d3m-model-search-master | Stanford-D3M-Full/apps/basic_run.py |
import sys
import json
import pandas
import numpy as np
import os
import traceback
from d3m.runtime import Runtime
import utils.utils
import utils.train_utils
import utils.primitive_pipeline_utils
if __name__ == "__main__":
# Get args
path_to_pipeline_json = sys.argv[1]
inputdir = sys.argv[2]
output... | d3m-model-search-master | Stanford-D3M-Full/apps/predict.py |
d3m-model-search-master | Stanford-D3M-Full/apps/__init__.py | |
import time
import json
import sys
import os
import argparse
from multiprocessing import Process, Queue, JoinableQueue
from d3m.metadata.pipeline import Pipeline, PrimitiveStep
from d3m.runtime import Runtime
import executors.ExtendedSklearnExecutor
import executors.SimpleRandomSklearnExecutor
import executors.NistSav... | d3m-model-search-master | Stanford-D3M-Full/apps/compare_score.py |
import time
import json
import sys
import os
import argparse
from multiprocessing import Process, Queue, JoinableQueue
from d3m.runtime import Runtime
import executors.ExtendedSklearnExecutor
import executors.SimpleRandomSklearnExecutor
import executors.NistSaverExecutor
import executors.Executor
import utils.utils
im... | d3m-model-search-master | Stanford-D3M-Full/apps/debug_pipeline.py |
# coding=utf-8
# Copyright 2020- The Google AI Language Team Authors and The HuggingFace Inc. team and Facebook Inc.
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the... | bio-lm-main | biolm/utils_sequence_labelling.py |
# Copyright (c) 2020-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# | bio-lm-main | biolm/__init__.py |
# coding=utf-8
# Copyright 2020- The Google AI Language Team Authors and The HuggingFace Inc. team and Facebook Inc.
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the... | bio-lm-main | biolm/utils_classification.py |
# coding=utf-8
# Copyright 2020- The Google AI Language Team Authors and The HuggingFace Inc. team and Facebook Inc.
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the... | bio-lm-main | biolm/run_sequence_labelling.py |
# coding=utf-8
# Copyright 2020- The Google AI Language Team Authors and The HuggingFace Inc. team and Facebook Inc.
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the... | bio-lm-main | biolm/run_classification.py |
# Copyright (c) 2020-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
from transformers import AutoTokenizer
import argparse
from tqdm import tqdm
def main(args):
"""Inputs and cleans and ensu... | bio-lm-main | preprocessing/clean_conll_file.py |
# Copyright (c) 2020-present Emily Alsentzer and Facebook Inc.
# Copyright (c) 2019 Emily Alsentzer
# All rights reserved.
#
# This source code is licensed under the MIT license, which can be found here https://github.com/EmilyAlsentzer/clinicalBERT/blob/master/LICENSE
#
"""Adapted from clinicalBERT preprocessing noteb... | bio-lm-main | preprocessing/preprocess_i2b2_2012_ner.py |
# Copyright (c) 2020-present Emily Alsentzer and Facebook Inc.
# Copyright (c) 2019 Emily Alsentzer
# All rights reserved.
#
# This source code is licensed under the MIT license, which can be found here https://github.com/EmilyAlsentzer/clinicalBERT/blob/master/LICENSE
#
"""Adapted from clinicalBERT preprocessing noteb... | bio-lm-main | preprocessing/preprocess_i2b2_2010_ner.py |
# Copyright (c) 2020-present Emily Alsentzer and Facebook Inc.
# Copyright (c) 2019 Emily Alsentzer
# All rights reserved.
#
# This source code is licensed under the MIT license, which can be found here https://github.com/EmilyAlsentzer/clinicalBERT/blob/master/LICENSE
#
"""Adapted from clinicalBERT preprocessing noteb... | bio-lm-main | preprocessing/preprocess_i2b2_2014_ner.py |
"""Run dimensionality reduction experiment."""
import argparse
import logging
import networkx as nx
import numpy as np
import torch
import geom.hyperboloid as hyperboloid
import geom.poincare as poincare
from learning.frechet import Frechet
from learning.pca import TangentPCA, EucPCA, PGA, HoroPCA, BSA
from utils.da... | HoroPCA-main | main.py |
HoroPCA-main | learning/__init__.py | |
"""Frechet data statistics."""
import torch
import geom.poincare as poincare
class Frechet:
"""Class to compute Frechet statiscs (mean and variance)."""
def __init__(self, lr=1e-1, eps=1e-5, max_steps=5000, max_lr_try=3):
self.lr = lr
self.eps = eps
self.max_steps = max_steps
... | HoroPCA-main | learning/frechet.py |
"""Hyperbolic dimensionality reduction models."""
from abc import ABC, abstractmethod
import torch
import torch.nn as nn
import geom.euclidean as euclidean
import geom.hyperboloid as hyperboloid
import geom.minkowski as minkowski
import geom.poincare as poincare
from geom.horo import busemann, project_kd
from utils.... | HoroPCA-main | learning/pca.py |
"""Evaluation metrics."""
import numpy as np
import torch
import geom.poincare as poincare
from learning.frechet import Frechet
def avg_distortion_measures(distances1, distances2, tau=1.0):
"""Computes different measures of average distortion between two distance matrices.
:param distances1: N x N torch tens... | HoroPCA-main | utils/metrics.py |
HoroPCA-main | utils/__init__.py | |
"""Sarkar's combinatorial construction."""
import networkx as nx
import numpy as np
import scipy
MIN_NORM = 1e-15
# ################# CIRCLE INVERSIONS ########################
def reflect_at_zero(x, mu): # Note: this differs from geom.poincare.reflect_at_zero because it's numpy instead of torch
"""
Image... | HoroPCA-main | utils/sarkar.py |
"""Data utils."""
import networkx as nx
import numpy as np
import geom.poincare as poincare
from learning.frechet import Frechet
def load_graph(dataset):
"""Loads a graph dataset.
Return: networkx graph object
"""
G = nx.Graph()
with open(f"data/edges/{dataset}.edges", "r") as f:
for li... | HoroPCA-main | utils/data.py |
"""Util functions for hyperboloid models
Convention: The ambient Minkowski space has signature -1, 1, 1, ...
i.e. the squared norm of (t,x,y,z) is -t^2 + x^2 + y^2 + z^2,
And we are using the positive sheet, i.e. every point on the hyperboloid
has positive first coordinate.
"""
i... | HoroPCA-main | geom/hyperboloid.py |
"""Poincare utils functions."""
import torch
import geom.euclidean as euclidean
MIN_NORM = 1e-15
BALL_EPS = {torch.float32: 4e-3, torch.float64: 1e-5}
def expmap0(u):
"""Exponential map taken at the origin of the Poincare ball with curvature c.
Args:
u: torch.Tensor of size B x d with hyperbolic po... | HoroPCA-main | geom/poincare.py |
HoroPCA-main | geom/__init__.py | |
"""Horocycle projection utils (Poincare model)."""
import torch
MIN_NORM = 1e-15
def busemann(x, p, keepdim=True):
"""
x: (..., d)
p: (..., d)
Returns: (..., 1) if keepdim==True else (...)
"""
xnorm = x.norm(dim=-1, p=2, keepdim=True)
pnorm = p.norm(dim=-1, p=2, keepdim=True)
p = p... | HoroPCA-main | geom/horo.py |
""" Util functions for the Minkowski metric.
Note that functions for the hyperboloid model itself are in geom.hyperboloid
Most functions in this file has a bilinear_form argument that can generally be ignored.
That argument is there just in case we need to use a non-standard norm/signature.
"""
import torch
def pro... | HoroPCA-main | geom/minkowski.py |
""" Geometric utility functions, mostly for standard Euclidean operations."""
import torch
MIN_NORM = 1e-15
def orthonormal(Q):
"""Return orthonormal basis spanned by the vectors in Q.
Q: (..., k, d) k vectors of dimension d to orthonormalize
"""
k = Q.size(-2)
_, _, v = torch.svd(Q, some=False... | HoroPCA-main | geom/euclidean.py |
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import torch
from crlapi.benchmark import StreamTrainer
import hydra
from omegaconf import DictConfig, OmegaConf
def ... | alma-main | configs/mnist/run.py |
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import torch
from crlapi.benchmark import StreamTrainer
import hydra
from omegaconf import DictConfig, OmegaConf
def ... | alma-main | configs/cifar10/run.py |
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
from crlapi import instantiate_class,get_class,get_arguments
class StreamTrainer:
def create_logger(self, logger_a... | alma-main | crlapi/benchmark.py |
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
def instantiate_class(arguments):
from importlib import import_module
d = dict(arguments)
if "classname" in... | alma-main | crlapi/__init__.py |
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import copy
class TaskResources:
""" Describe resources for a task (e.g a dataset, and environments, etc...)
"... | alma-main | crlapi/core.py |
#
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
from torch.utils.tensorboard import SummaryWriter
import sqlite3
import os
import os.path
import csv
import copy
from datetime import date... | alma-main | crlapi/logger.py |
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
from crlapi.core import TaskResources, Stream, Task
import torchvision.datasets
import torchvision.transforms
import num... | alma-main | crlapi/sl/streams/cifar10.py |
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
from crlapi.core import TaskResources, Stream, Task
import torchvision.datasets
import torchvision.transforms
import nump... | alma-main | crlapi/sl/streams/emnist.py |
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
from crlapi.core import TaskResources, Stream, Task
import torchvision.datasets
import torchvision.transforms
import num... | alma-main | crlapi/sl/streams/mnist.py |
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import torch
import torch.nn as nn
import numpy as np
from copy import deepcopy
from crlapi.sl.architectures.mixture_mod... | alma-main | crlapi/sl/architectures/vgg.py |
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import torch
import torch.nn as nn
import torch.nn.functional as F
import copy
import random
import numpy as np
class So... | alma-main | crlapi/sl/architectures/mixture_model.py |
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import torch
import torch.nn as nn
from crlapi.sl.architectures.mixture_model import MixtureLayer,SoftMaxGateModule,HardS... | alma-main | crlapi/sl/architectures/mlp.py |
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import math
import logging
import numpy as np
import torch
from torch import nn
import torch.utils.data
from torch.nn im... | alma-main | crlapi/sl/architectures/resnet.py |
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import math
import copy
import torch
import torch.autograd as autograd
import torch.nn as nn
import torch.nn.functional a... | alma-main | crlapi/sl/architectures/sp_vgg.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.