python_code stringlengths 0 34.9k |
|---|
from distutils.core import setup
from setuptools import find_packages
# When publishing the Docker image, a script checks for the first line with "version" and an equals sign to get the version.
version='1.0.0'
install_requires = [
'bokeh>=0.13',
'expiringdict>=1.1.4',
'injector>=0.16.2',
'joblib>=0.... |
import json
from collections import defaultdict
from dataclasses import dataclass
from itertools import cycle
from logging import Logger
from operator import itemgetter
from pathlib import Path
from typing import List, Dict
from bokeh import colors
from bokeh.io import export_png
from bokeh.models import FuncTickForma... |
import json
import logging
import os
import random
import time
from dataclasses import asdict, dataclass
from functools import partial
from itertools import cycle
from logging import Logger
from platform import uname
from queue import PriorityQueue
from threading import Thread
from typing import List
import numpy as n... |
import os
import sys
import math
from injector import inject, Injector
from decai.simulation.contract.classification.classifier import Classifier
from decai.simulation.contract.classification.decision_tree import DecisionTreeModule
from decai.simulation.contract.collab_trainer import DefaultCollaborativeTrainerModule... |
import json
import os
import random
import sys
from collections import Counter
from typing import cast
import math
import numpy as np
from injector import inject, Injector
from decai.simulation.contract.classification.classifier import Classifier
from decai.simulation.contract.classification.decision_tree import Deci... |
import os
import sys
import math
from injector import inject, Injector
from decai.simulation.contract.classification.classifier import Classifier
from decai.simulation.contract.classification.decision_tree import DecisionTreeModule
from decai.simulation.contract.collab_trainer import DefaultCollaborativeTrainerModule... |
import logging
from dataclasses import dataclass, field
from logging import Logger
from injector import Module, provider, singleton
@dataclass
class LoggingModule(Module):
_log_level: int = field(default=logging.INFO)
@provider
@singleton
def provide_logger(self) -> Logger:
result = logging.... |
import os
import sys
from typing import Optional
from injector import Injector
from decai.simulation.contract.classification.perceptron import PerceptronModule
from decai.simulation.contract.collab_trainer import DefaultCollaborativeTrainerModule
from decai.simulation.contract.incentive.stakeable import StakeableImMo... |
import os
import sys
import math
from injector import inject, Injector
from decai.simulation.contract.balances import Balances
from decai.simulation.contract.classification.perceptron import PerceptronModule
from decai.simulation.contract.collab_trainer import DefaultCollaborativeTrainerModule
from decai.simulation.c... |
import os
import re
import sys
from injector import Injector
from sklearn.naive_bayes import MultinomialNB
from decai.simulation.contract.classification.ncc_module import NearestCentroidClassifierModule
from decai.simulation.contract.classification.perceptron import PerceptronModule
from decai.simulation.contract.cla... |
import os
import sys
import math
from injector import inject, Injector
from sklearn.naive_bayes import MultinomialNB
from decai.simulation.contract.classification.classifier import Classifier
from decai.simulation.contract.classification.scikit_classifier import SciKitClassifierModule
from decai.simulation.contract.c... |
from abc import ABC, abstractmethod
from injector import Module, inject, singleton
from decai.simulation.contract.balances import Balances
from decai.simulation.contract.classification.classifier import Classifier
from decai.simulation.contract.data.data_handler import DataHandler
from decai.simulation.contract.incen... |
from dataclasses import dataclass, field
from logging import Logger
from typing import Dict
from injector import inject, singleton
from decai.simulation.contract.objects import Address
@inject
@singleton
@dataclass
class Balances(object):
"""
Tracks balances in the simulation.
"""
_logger: Logger
... |
# Objects for all smart contracts.
from dataclasses import dataclass, field
from typing import Optional
from injector import singleton
Address = str
""" An address that can receive funds and participate in training models. """
@dataclass
class Msg:
"""
A message sent to a smart contract.
:param sender:... |
from collections import Counter
from injector import inject
from sklearn.neighbors import NearestCentroid
# Purposely not a singleton so that it is easy to get a model that has not been initialized.
@inject
class NearestCentroidClassifier(NearestCentroid):
def fit(self, X, y):
self._num_samples_per_centr... |
import os
from sklearn.linear_model import SGDClassifier
from decai.simulation.contract.classification.scikit_classifier import SciKitClassifierModule
class PerceptronModule(SciKitClassifierModule):
def __init__(self, class_weight=None):
super().__init__(
_model_initializer=lambda: SGDClassi... |
import logging
from abc import ABC, abstractmethod
from typing import List
from decai.simulation.contract.objects import SmartContract
from decai.simulation.data.featuremapping.feature_index_mapper import FeatureIndexMapping
class Classifier(ABC, SmartContract):
"""
A classifier that can take a data sample a... |
import json
import logging
import os
import time
from dataclasses import dataclass
from logging import Logger
from pathlib import Path
from typing import Any, Callable, List
import joblib
import numpy as np
import scipy.sparse
from injector import ClassAssistedBuilder, Module, inject, provider
from sklearn.linear_mode... |
from decai.simulation.contract.classification.ncc import NearestCentroidClassifier
from decai.simulation.contract.classification.scikit_classifier import SciKitClassifierModule
class NearestCentroidClassifierModule(SciKitClassifierModule):
def __init__(self):
super().__init__(
_model_initializ... |
from skmultiflow.trees import HAT, RegressionHAT
from decai.simulation.contract.classification.scikit_classifier import SciKitClassifierModule
class DecisionTreeModule(SciKitClassifierModule):
def __init__(self, regression=False):
if regression:
model_initializer = lambda: RegressionHAT(
... |
import unittest
import numpy as np
from injector import Injector
from decai.simulation.contract.classification.classifier import Classifier
from decai.simulation.contract.classification.ncc_module import NearestCentroidClassifierModule
from decai.simulation.logging_module import LoggingModule
class TestNearestCentr... |
import random
import unittest
import numpy as np
from injector import Injector
from decai.simulation.contract.balances import Balances
from decai.simulation.contract.classification.classifier import Classifier
from decai.simulation.contract.classification.perceptron import PerceptronModule
from decai.simulation.contr... |
from collections import Counter
from logging import Logger
import math
from injector import inject, Module, singleton
from decai.simulation.contract.balances import Balances
from decai.simulation.contract.data.data_handler import StoredData
from decai.simulation.contract.incentive.incentive_mechanism import Incentive... |
import random
from collections import Counter, defaultdict
from dataclasses import dataclass, field
from enum import Enum
from hashlib import sha256
from logging import Logger
from typing import Dict, List, Optional, Tuple
import math
import numpy as np
from injector import ClassAssistedBuilder, inject, Module, provid... |
from abc import ABC, abstractmethod
import math
from decai.simulation.contract.data.data_handler import StoredData
from decai.simulation.contract.objects import Address, SmartContract
class IncentiveMechanism(ABC, SmartContract):
"""
Defines incentives for others to contribute "good" quality data.
"""
... |
import unittest
from collections import defaultdict
from typing import cast
from injector import Injector
from decai.simulation.contract.balances import Balances
from decai.simulation.contract.classification.perceptron import PerceptronModule
from decai.simulation.contract.data.data_handler import StoredData
from dec... |
from collections import defaultdict
from dataclasses import dataclass, field
from typing import Dict
import numpy as np
from injector import inject, singleton
from decai.simulation.contract.objects import Address, RejectException, SmartContract, TimeMock
@dataclass
class StoredData:
# Storing the data is not ne... |
import unittest
from queue import PriorityQueue
from decai.simulation.simulate import Agent
class TestAgent(unittest.TestCase):
def test_queue(self):
q = PriorityQueue()
agents = [
Agent('a1', 10, 1, 1, 1),
Agent('a2', 10, 1, 1, 1),
Agent('a0', 10, 1, 1, 1),
... |
from dataclasses import dataclass, field
from logging import Logger
from typing import List
import numpy as np
from injector import inject, Module
from sklearn.utils import shuffle
from tqdm import trange
from .data_loader import DataLoader
@inject
@dataclass
class TicTacToeDataLoader(DataLoader):
"""
Load ... |
import os
from dataclasses import dataclass, field
from logging import Logger
from typing import List
import numpy as np
import pandas as pd
from injector import inject, Module
from sklearn.utils import shuffle
from decai.simulation.data.data_loader import DataLoader
@inject
@dataclass
class TitanicDataLoader(DataL... |
from abc import ABC, abstractmethod
from typing import List
class DataLoader(ABC):
"""
Base class for providing simulation data.
"""
@abstractmethod
def classifications(self) -> List[str]:
"""
:return: The classifications for this dataset.
"""
pass
@abstractme... |
from dataclasses import dataclass
from logging import Logger
from typing import List
from injector import inject, Module
from keras.datasets import boston_housing
from decai.simulation.data.data_loader import DataLoader
@inject
@dataclass
class BhpDataLoader(DataLoader):
"""
Load data from Boston Housing Pr... |
import itertools
import json
import os
import random
import time
from collections import Counter
from dataclasses import dataclass
from enum import Enum
from logging import Logger
from operator import itemgetter
from pathlib import Path
from typing import Collection, List, Optional, Tuple
import numpy as np
import pan... |
from dataclasses import dataclass
from logging import Logger
from typing import List
import numpy as np
from injector import Binder, inject, Module
from decai.simulation.data.data_loader import DataLoader
@inject
@dataclass
class SimpleDataLoader(DataLoader):
"""
Load simple data for testing.
"""
_... |
import ast
import logging
import os
import re
import time
from collections import Counter
from dataclasses import dataclass, field
from logging import Logger
from pathlib import Path
from typing import List, Set, Tuple
import numpy as np
from injector import ClassAssistedBuilder, inject, Module, provider, singleton
fr... |
import html
import itertools
import os
from collections import Counter
from dataclasses import dataclass, field
from logging import Logger
from pathlib import Path
from typing import Dict, Iterator, List, Tuple
import numpy as np
import pandas as pd
import requests
from injector import ClassAssistedBuilder, Module, in... |
from dataclasses import dataclass, field
from logging import Logger
from typing import List
import numpy as np
from injector import ClassAssistedBuilder, Module, inject, provider, singleton
from keras.datasets import imdb
from .data_loader import DataLoader
@inject
@dataclass
class ImdbDataLoader(DataLoader):
"... |
import unittest
from typing import cast
from injector import Injector
from decai.simulation.data.data_loader import DataLoader
from decai.simulation.data.featuremapping.hashing.murmurhash3 import MurmurHash3Module
from decai.simulation.data.featuremapping.hashing.token_hash import TokenHash
from decai.simulation.data... |
import unittest
from typing import cast
from injector import Injector
from decai.simulation.data.data_loader import DataLoader
from decai.simulation.data.news_data_loader import NewsDataLoader, NewsDataModule
from decai.simulation.logging_module import LoggingModule
class TestNewsDataLoader(unittest.TestCase):
... |
import unittest
from typing import cast
from injector import Injector
from decai.simulation.data.data_loader import DataLoader
from decai.simulation.data.ttt_data_loader import TicTacToeDataLoader, TicTacToeDataModule
from decai.simulation.logging_module import LoggingModule
class TestTicTacToeDataLoader(unittest.T... |
import unittest
from typing import cast
from injector import Injector
from decai.simulation.data.data_loader import DataLoader
from decai.simulation.data.fitness_data_loader import FitnessDataLoader, FitnessDataModule
from decai.simulation.logging_module import LoggingModule
class TestFitnessDataLoader(unittest.Tes... |
from typing import List, Optional, Tuple
import numpy as np
from injector import singleton
FeatureIndexMapping = List[int]
@singleton
class FeatureIndexMapper:
"""
Helps with mapping sparse data matrices to compact dense ones
since some classifiers don't work well with sparse data:
* SGDClassifier t... |
import unittest
import numpy as np
import scipy.sparse
from injector import Injector
from decai.simulation.data.featuremapping.feature_index_mapper import FeatureIndexMapper
from decai.simulation.logging_module import LoggingModule
class TestFeatureIndexMapper(unittest.TestCase):
@classmethod
def setUpClass... |
import mmh3
from injector import Module
from decai.simulation.data.featuremapping.hashing.token_hash import TokenHash
class MurmurHash3(TokenHash):
def hash(self, text: str) -> int:
# Made to be equivalent to the JavaScript demo code.
return mmh3.hash(text, signed=False)
class MurmurHash3Module... |
from abc import ABC, abstractmethod
class TokenHash(ABC):
"""
Hashes token to unsigned integers.
Useful for sparse representation.
"""
@abstractmethod
def hash(self, text: str) -> int:
raise NotImplementedError
|
import unittest
from decai.simulation.data.featuremapping.hashing.murmurhash3 import MurmurHash3
class TestMurmurHash3(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.h = MurmurHash3()
def test_classifications(self):
h = self.h.hash("hey")
assert type(h) == int
... |
from setuptools import setup, find_packages
setup(
name='accbpg',
version='0.2',
packages=find_packages(exclude=['tests*']),
license='MIT',
description='Accelerated Bregman proximal gradient (ABPG) methods',
long_description=open('README.md').read(),
long_description_content_type='text/mark... |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import numpy as np
class RSmoothFunction:
"""
Relatively-Smooth Function, can query f(x) and gradient
"""
def __call__(self, x):
assert 0, "RSmoothFunction: __call__(x) is not defined"
... |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import numpy as np
import time
def BPG(f, h, L, x0, maxitrs, epsilon=1e-14, linesearch=True, ls_ratio=1.2,
verbose=True, verbskip=1):
"""
Bregman Proximal Gradient (BGP) method for min_{x in C} f(x) + Psi(x... |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import numpy as np
from .functions import *
from .utils import load_libsvm_file
def D_opt_libsvm(filename):
"""
Generate a D-Optimal Design problem from LIBSVM datasets
"""
X, y = load_libsvm_file(filename)... |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from .functions import *
from .algorithms import BPG, ABPG, ABPG_expo, ABPG_gain, ABDA
from .applications import D_opt_libsvm, D_opt_design, D_opt_KYinit, Poisson_regrL1, Poisson_regrL2, KL_nonneg_regr
from .D_opt_alg import... |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import numpy as np
#import matplotlib.pyplot as plt
from matplotlib.pyplot import *
def plot_comparisons(axis, y_vals, labels, x_vals=[], plotdiff=False,
yscale="linear", xscale="linear",
... |
import os.path
import numpy as np
import scipy.sparse as sparse
def _open_file(filename):
_, ext = os.path.splitext(filename)
if ext == '.gz':
import gzip
return gzip.open(filename, 'rt')
elif ext == '.bz2':
import bz2
return bz2.open(filename, 'rt')
else:
retu... |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import numpy as np
import time
def D_opt_FW(V, x0, eps, maxitrs, verbose=True, verbskip=1):
"""
Solve the D-optimal design problem by the Frank-Wolfe algorithm
minimize - log(det(V*diag(x)*V'))
... |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
"""
Example of logistic regression with L1 regularization and Linf bounds
minimize_x f(x) = (1/m) * sum_{i=1}^m log(1 + exp(-b_i*(ai'*x)))
subject to x in R^n, and ||x||_inf <= B
The objective f is 1-relative s... |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import numpy as np
import matplotlib.pyplot as plt
from .functions import *
def plotTSE(h, dim=10, nTriples=10, nThetas=100, R=1, onSimplex=True,
randseed=-1):
"""
Plot estimated triangle scaling expon... |
"""
This demo code for Adafruit's CircuitPlayground Express (CPX) is
compatible with the Device Simulator Express Visual Studio Code extension.
The extension allows you to code CircuitPython for your
CircuitPlayground Express (CPX) by testing and debugging on
the device simulator, before running your code on the actua... |
import os
import logging
import flask
from flask import request, jsonify
from flask import json
from flask_cors import CORS
from dapr.clients import DaprClient
logging.basicConfig(level=logging.INFO)
app = flask.Flask(__name__)
CORS(app)
@app.route('/order', methods=['GET'])
def getOrder():
app.logger.info('orde... |
import logging
from typing import Optional, Dict, Any, List, Tuple, NamedTuple
import torch
from torch import nn
from data.edits import Edit
from dpu_utils.ptutils import BaseComponent
from mlcomponents.seqdecoding import SeqDecoder
from mlcomponents.seqencoder import SequenceEncoder
class CopyEditor(BaseComponent)... |
from collections import Hashable
from typing import Optional, Dict, Any, NamedTuple
import numpy as np
import torch
from dpu_utils.mlutils import Vocabulary
from torch import nn
from torch.nn.utils.rnn import pack_padded_sequence
from data.edits import ChangeType, sequence_diff, AlignedDiffRepresentation
from dpu_uti... |
#!/usr/bin/env python3
"""
Test the ability of the model to do one-shot generation, given an edit representation of a different sample of the same edit type.
Usage:
oneshotgentesting.py [options] MODEL_FILENAME DATA
Options:
--azure-info=<path> Azure authentication information file (JSON). Used to load ... |
#!/usr/bin/env python
"""
Usage:
outputparallelpredictions.py [options] MODEL_FILENAME TEST_DATA OUT_PREFIX
Options:
--azure-info=<path> Azure authentication information file (JSON). Used to load data from Azure storage.
--data-type=<type> The type of data to be used. Possible options fce, c... |
#!/usr/bin/env python
"""
Usage:
tsnejson.py [options] MODEL_FILENAME TEST_DATA OUT_PATH
Options:
--azure-info=<path> Azure authentication information file (JSON). Used to load data from Azure storage.
--data-type=<type> The type of data to be used. Possible options fce, code, wikiatomicedit... |
from typing import Optional
from pytorch_transformers import BertConfig
from editrepcomponents.alignededitencoder import AlignedEditTokensEmbedding
from mlcomponents.seqdecoding.spancopydecoder import GruSpanCopyingDecoder
from mlcomponents.seqencoder import BiGruSequenceEncoder
from editrepcomponents.copyeditor impo... |
#!/usr/bin/env python
"""
Usage:
test.py [options] MODEL_FILENAME TEST_DATA
Options:
--azure-info=<path> Azure authentication information file (JSON). Used to load data from Azure storage.
--data-type=<type> The type of data to be used. Possible options fce, code, wikiatomicedits, wikiedits.... |
#!/usr/bin/env python
"""
Usage:
testencdec.py [options] MODEL_FILENAME TEST_DATA
Options:
--azure-info=<path> Azure authentication information file (JSON). Used to load data from Azure storage.
--data-type=<type> The type of data to be used. Possible options fce, code, wikiatomicedits, wiki... |
#!/usr/bin/env python
"""
Usage:
train.py [options] TRAIN_DATA_PATH VALID_DATA_PATH MODEL_TYPE TARGET_MODEL_FILENAME
train.py [options] --split-valid TRAIN_DATA_PATH MODEL_TYPE TARGET_MODEL_FILENAME
Options:
--azure-info=<path> Azure authentication information file (JSON). Used to load data from Azu... |
import sys
import streamlit as st
import matplotlib.pyplot as plt
import numpy as np
from dpu_utils.utils import RichPath
from data.edits import Edit
from dpu_utils.ptutils import BaseComponent
'''
# Copy Span Visualization
'''
model_path = sys.argv[1]
@st.cache
def get_model(filename):
path = RichPath.create(f... |
#!/usr/bin/env python3
"""
Save the edit representations
Usage:
exportrepresentations.py [options] MODEL_FILENAME DATA OUT_FILE
Options:
--azure-info=<path> Azure authentication information file (JSON). Used to load data from Azure storage.
--data-type=<type> The type of data to be used. Pos... |
#!/usr/bin/env python
"""
Usage:
score.py [options] MODEL_FILENAME
Options:
--azure-info=<path> Azure authentication information file (JSON). Used to load data from Azure storage.
--cpu Use cpu only.
--verbose Print predictions to console.
--quiet ... |
import logging
import numpy as np
from dpu_utils.utils import run_and_debug, RichPath
from data.representationviz import RepresentationsVisualizer
from data.synthetic.charedits import get_dataset
from editrepcomponents.alignededitencoder import AlignedEditTokensEmbedding
from dpu_utils.ptutils import ComponentTrainer... |
import logging
import random
import numpy as np
from dpu_utils.utils import run_and_debug, RichPath
from data.representationviz import RepresentationsVisualizer
from data.synthetic.charedits import get_dataset
from editrepcomponents.alignededitencoder import AlignedEditTokensEmbedding
from dpu_utils.ptutils import B... |
import logging
from typing import Set
from dpu_utils.utils import run_and_debug, RichPath
from data.edits import Edit
from dpu_utils.ptutils import ComponentTrainer
from mlcomponents.seqencoder import BiGruSequenceEncoder
from mlcomponents.embeddings import TokenSequenceEmbedder
from mlcomponents.encoderdecoder impor... |
import logging
from typing import Optional, Dict, Any, List, Tuple, NamedTuple
import torch
from data.edits import Edit
from dpu_utils.ptutils import BaseComponent
from mlcomponents.seqdecoding import SeqDecoder
from mlcomponents.seqencoder import SequenceEncoder
class EncoderDecoder(BaseComponent):
LOGGER = lo... |
from .sequenceencoder import SequenceEncoder
from .bigruencoder import BiGruSequenceEncoder
__all__ = [SequenceEncoder, BiGruSequenceEncoder]
|
from typing import Optional, Dict, Any, Tuple, Union
import torch
from torch import nn
from torch.nn.utils.rnn import pad_packed_sequence
from mlcomponents.embeddings import SequenceEmbedder
from .sequenceencoder import SequenceEncoder
class BiGruSequenceEncoder(SequenceEncoder):
def __init__(self, name: str, t... |
from abc import ABC, abstractmethod
from typing import Optional, Dict, Any, Tuple, List, Union
import torch
from dpu_utils.ptutils import BaseComponent
from mlcomponents.embeddings import SequenceEmbedder
class SequenceEncoder(BaseComponent, ABC):
"""
A general encoder of sequences.
"""
def __init_... |
from abc import ABC, abstractmethod
from typing import Union, Tuple, List, Any
import torch
from dpu_utils.mlutils import Vocabulary
from torch.nn.utils.rnn import PackedSequence, pack_padded_sequence
from dpu_utils.ptutils import BaseComponent
class SequenceEmbedder(BaseComponent, ABC):
@property
@abstract... |
from .sequenceembedder import SequenceEmbedder
from .tokensequenceembedder import TokenSequenceEmbedder
__all__ = [SequenceEmbedder, TokenSequenceEmbedder] |
import logging
from collections import Counter
import typing
from typing import Optional, Dict, Any, List, NamedTuple
import numpy as np
import torch
from dpu_utils.mlutils import Vocabulary
from torch import nn
from mlcomponents.embeddings.sequenceembedder import SequenceEmbedder
class TokenSequenceEmbedder(Sequen... |
from typing import Optional, Dict, Any
import torch
from torch import nn
from dpu_utils.ptutils import BaseComponent
class LuongAttention(BaseComponent):
"""
A Luong-style attention that also includes the inner product of targets-lookup
"""
def __init__(self, name: str, hyperparameters: Optional[Dic... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.