content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def getOrc(orcName):
"""Get an orchestra stored in the user namespace.
One can store an orchestra in the user name space with the %%orc magic.
"""
ip = get_ipython()
return ip.user_ns["__orc"][orcName] | 5,357,500 |
def check_dwi_volume(in_dwi, in_bvec, in_bval):
# TODO return str instead of raising an error, so that user can be informed of which file is causing problem
"""
Check that # DWI = # B-val = # B-vec.
Raises
IOError
"""
import nibabel as nib
import numpy as np
bvals = np.loadtxt(... | 5,357,501 |
def get_prompt_data_from_batse(grb: str, **kwargs: None) -> pd.DataFrame:
"""Get prompt emission data from BATSE. Creates a directory structure and saves the data.
Returns the data, though no further action needs to be taken by the user.
:param grb: Telephone number of GRB, e.g., 'GRB140903A' or '140903A' ... | 5,357,502 |
def angular_error(a, b):
"""Calculate angular error (via cosine similarity)."""
a = pitchyaw_to_vector(a) if a.shape[1] == 2 else a
b = pitchyaw_to_vector(b) if b.shape[1] == 2 else b
ab = np.sum(np.multiply(a, b), axis=1)
a_norm = np.linalg.norm(a, axis=1)
b_norm = np.linalg.norm(b, axis=1)
... | 5,357,503 |
def includeme(config):
"""
Get build Git repository directory and make it accessible
to all requests generated via Cornice
"""
# Make DB connection accessible as a request property
def _get_repos(request):
_settings = request.registry.settings
repo_dir = _settings['repo_basedir'... | 5,357,504 |
def add_dot_csv(filename: Union[Path, str]) -> str:
"""Adds a .csv extension to filename."""
return add_extension(filename, '.csv') | 5,357,505 |
def main():
"""
主函数
"""
app_sid = detect_edge_sid()
if app_sid is None:
print u'注册表中没有找到Edge的sid'
return
if enable_loopback(app_sid):
print u'激活成功!'
else:
print u'激活失败。。。' | 5,357,506 |
def load_settings_from_file(filename: str) -> Dict[str, Any]:
"""Load amset configuration settings from a yaml file.
If the settings file does not contain a required parameter, the default
value will be added to the configuration.
An example file is given in *amset/examples/example_settings.yaml*.
... | 5,357,507 |
def test_main():
"""Executes the list of steps in parallel
Computes and returns the testcase status"""
tree = ET.parse(os.path.join(os.path.split(__file__)[0], "testcase_custom_par.xml"))
timestamp = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S")
with open(result_dir+'/'+'resultfile.txt', 'w'... | 5,357,508 |
def execute_query(query, *arguments):
"""Execute a query on the DB with given arguments."""
_db = labpals.model.get_db()
cursor = _db.execute(query, arguments)
rows = cursor.fetchall()
return rows | 5,357,509 |
def CreateClientPool(n):
"""Create n clients to run in a pool."""
clients = []
# Load previously stored clients.
try:
certificates = []
with open(flags.FLAGS.cert_file, "rb") as fd:
# Certificates are base64-encoded, so that we can use new-lines as
# separators.
for l in fd:
c... | 5,357,510 |
def format_search_events_results(response: Dict[str, Any], limit: int) -> tuple:
"""
Format the output of the search events results command.
Args:
response (Dict[str,Any]): API response from FortiSIEM.
limit (int):Maximum number of results to retrieve.
Returns:
str: Formatted comm... | 5,357,511 |
def radec_obs_vec_mpc(inds, mpc_object_data):
"""Compute vector of observed ra,dec values for MPC tracking data.
Args:
inds (int array): line numbers of data in file
mpc_object_data (ndarray): MPC observation data for object
Returns:
rov (1xlen(inds) array): vector o... | 5,357,512 |
def mod(x, y) -> ProcessBuilder:
"""
Modulo
:param x: A number to be used as the dividend.
:param y: A number to be used as the divisor.
:return: The remainder after division.
"""
return _process('mod', x=x, y=y) | 5,357,513 |
def test_person_info_photosdb_v5(photosdb5):
""" Test PersonInfo object """
import json
test_key = "katie_5"
katie = [p for p in photosdb5.person_info if p.uuid == UUID_DICT[test_key]][0]
assert katie.facecount == 3
assert katie.name == "Katie"
assert katie.display_name == "Katie"
phot... | 5,357,514 |
def find_test_file_loc(test_data_dir):
"""
Return a new, unique and non-existing base name location suitable to create
a new copyright test.
"""
template = "copyright_{}.txt"
idx = 1
while True:
test_file_loc = path.join(test_data_dir, template.format(idx))
if not path.exists... | 5,357,515 |
def get_list_of_encodings() -> list:
"""
Get a list of all implemented encodings.
! Adapt if new encoding is added !
:return: List of all possible encodings
"""
return ['raw', '012', 'onehot', '101'] | 5,357,516 |
def region_to_bin(chr_start_bin, bin_size, chr, start):
"""Translate genomic region to Cooler bin idx.
Parameters:
----------
chr_start_bin : dict
Dictionary translating chromosome id to bin start index
bin_size : int
Size of the bin
chr : str
Chromosome
start : int
... | 5,357,517 |
def loop_invariant_statement_but_name_while():
"""Catch basic loop-invariant function call."""
i = 6
for _ in range(10_000):
i | 5,357,518 |
def Wspan_plot(rv, ccf, rvunits='km/s'):
""" Make a pretty plot of the line profile and the Wspan """
pass | 5,357,519 |
def test_get_profile_topics(
api_client, enable_premium_requirement, profile_topic_factory, user_factory
):
"""
Premium users should be able to list their own profile topics.
"""
password = "password"
user = user_factory(has_premium=True, password=password)
api_client.log_in(user.primary_ema... | 5,357,520 |
def test_get_object_not_in_indexd(client):
"""
Test the GET object endpoint when the provided key does NOT exist
in indexd, or when indexd errors.
If the key exists in MDS, the metadata should be returned regardless
of a non-200 response from indexd.
"""
guid_or_alias = "dg.hello/test_guid"
... | 5,357,521 |
def get_img_name(img_path: str):
"""
Get the name from the image path.
Args:
img_path (str): a/b.jpg or a/b.png ...
Returns:
name (str): a/b.jpg -> b
"""
image_name = os.path.split(img_path)[-1].split('.')[0]
return image_name | 5,357,522 |
def cvCalcProbDensity(*args):
"""
cvCalcProbDensity(CvHistogram hist1, CvHistogram hist2, CvHistogram dst_hist,
double scale=255)
"""
return _cv.cvCalcProbDensity(*args) | 5,357,523 |
def get_filter_para(node_element):
"""Return paragraph containing the used filter description"""
para = nodes.paragraph()
filter_text = "Used filter:"
filter_text += " status(%s)" % " OR ".join(node_element["status"]) if len(
node_element["status"]) > 0 else ""
if len(node_element["status"])... | 5,357,524 |
def mk_multi_line_figax(nrows, ncols, xlabel='time (s)', ylabel='signal (a.u.)'):
"""
Create the figure and axes for a
multipanel 2d-line plot
"""
# ncols and nrows get
# restricted via the plotting frontend
x_size = ncols * pltConfig['mXSize']
y_size = nrows * pltConfig['mYSize']
... | 5,357,525 |
def execute_test(test_function=None,
supported_fmts=[], supported_oses=['linux'],
supported_cache_modes=[], unsupported_fmts=[],
supported_protocols=[], unsupported_protocols=[]):
"""Run either unittest or script-style tests."""
# We are using TEST_DIR and QEM... | 5,357,526 |
def load_sizes(infile_path: str, header: bool=None):
"""
Load and parse a gtf file. More information on the gtf format is here:
https://asia.ensembl.org/info/website/upload/gff.html
Arguments:
(REQUIRED) infile_path: path to gtf file
(OPTIONAL) header: headers in size file (DEFAULT:... | 5,357,527 |
def latent_tree_mutate(g):
"""Produce an offspring genome by mutation through dict
manipulation. Choose a random key in the dict, and overwrite its
value with a random int. Later, repair must make sure the
offspring is valid, including using the mod rule to map from a
(possibly) large int to the cor... | 5,357,528 |
def AddResourceUsageExportFlags(parser, is_update=False, hidden=False):
"""Adds flags about exporting cluster resource usage to BigQuery."""
group = parser.add_group(
"Exports cluster's usage of cloud resources", hidden=hidden)
if is_update:
group.is_mutex = True
group.add_argument(
'--clea... | 5,357,529 |
def store_restore_example():
"""Connects to the first scanned drive and store and restores the
current configuration."""
net = CanopenNetwork(device=CAN_DEVICE.IXXAT,
channel=0,
baudrate=CAN_BAUDRATE.Baudrate_1M)
nodes = net.scan_slaves()
print(nodes... | 5,357,530 |
def load_model(file_path: string):
"""
Used to serialize an save a trained model, so it can be reused later on again.
-----------------------------------------------------------------------------------
Parameters:
-----------------------------------------------------------------------------------
... | 5,357,531 |
def clear_response_status(response):
"""
Clear the 'status_type" and 'status_msg' attributes of the given response's cookies.
:param response: the response being processed
"""
response.delete_cookie(key='status_type')
response.delete_cookie(key='status_msg') | 5,357,532 |
def gfs_mos_forecast(stid, forecast_date):
"""
Do the data retrieval.
"""
# Generate a Forecast object
forecast = Forecast(stid, default_model_name, forecast_date)
import numpy as np
forecast.daily.high = np.round(np.random.rand() * 100.)
forecast.daily.low = np.round(np.random.rand() ... | 5,357,533 |
def print_mem_info(title = "Memory Info", device_id = None):
"""
Prints the memory used for the specified device.
Parameters
----------
title: optional. Default: "Memory Info"
- Title to display before printing the memory info.
device_id: optional. Default: None
- Specifies the de... | 5,357,534 |
def evaluate_accuracy_score(preprocessing, prediction_binary):
"""
Evaluates the accuracy score
:param preprocessing: prepared DataPreprocess instance
:param prediction_binary: boolean expression for the predicted classes
"""
accuracy = []
for j in range(len(DETECTION_CLASSES)):
... | 5,357,535 |
def test_domain_visualize(case, visu_case):
"""
test the domain visualization
"""
dom = pylbm.Domain(case)
views = dom.visualize(**visu_case)
return views.fig | 5,357,536 |
def create_supervised_evaluator(model, metrics,
device=None):
"""
Factory function for creating an evaluator for supervised models
Args:
model (`torch.nn.Module`): the model to train
metrics (dict of str - :class:`ignite.metrics.Metric`): a map of metric name... | 5,357,537 |
def get_available_operations():
""" Return a dict of available operations """
return True, runtime.get_available_operations() | 5,357,538 |
def word_distance(word1, word2):
"""Computes the number of differences between two words.
word1, word2: strings
Returns: integer
"""
assert len(word1) == len(word2)
count = 0
for c1, c2 in zip(word1, word2):
if c1 != c2:
count += 1
return count | 5,357,539 |
def rmLineno(node):
"""Strip lineno attributes from a code tree."""
if node.__dict__.has_key('lineno'):
del node.lineno
for child in node.getChildren():
if isinstance(child, ast.Node):
rmLineno(child) | 5,357,540 |
def get_parent_child(root: dict,
path: str) -> Union[Tuple[Tuple[None, None],
Tuple[None, None]],
Tuple[Tuple[dict, None],
Tuple[Any, str]],
... | 5,357,541 |
def get_data_loaders(personachat, tokenizer, args_num_candidates=1, args_personality_permutations=1, args_max_history=2):
""" Prepare the dataset for training and evaluation """
print("Build inputs and labels")
datasets = {"train": defaultdict(list), "valid": defaultdict(list)}
for dataset_name, da... | 5,357,542 |
def remote(ctx, verbose: bool = False, debug: bool = False):
"""List remotes if no additional command is provided"""
if not ctx.invoked_subcommand:
try:
with fdp_session.FAIR(os.getcwd(), debug=debug) as fair_session:
fair_session.list_remotes(verbose)
except fdp_exc.... | 5,357,543 |
def main(inargs):
"""
Main routine for evaluation. Saves the CRPS for all experiments
and plots a histogram.
"""
assert inargs.date_start == '2016-01-01' and inargs.date_stop == '2017-01-01', \
'Flexible dates not implemented.'
# Get observation data
raw_crps = prepare_obs_df_and_c... | 5,357,544 |
def polyMergeUV(q=1,e=1,cch=1,ch=1,d="float",n="string",nds="int",uvs="string",ws=1):
"""
http://help.autodesk.com/cloudhelp/2019/ENU/Maya-Tech-Docs/CommandsPython/polyMergeUV.html
-----------------------------------------
polyMergeUV is undoable, queryable, and editable.
Merge UVs of an object based on their ... | 5,357,545 |
def unescape_strict(s):
"""
Re-implements html.unescape to use our own definition of `_charref`
"""
if '&' not in s:
return s
return _charref.sub(_replace_charref, s) | 5,357,546 |
def CheckVPythonSpec(input_api, output_api, file_filter=None):
"""Validates any changed .vpython files with vpython verification tool.
Args:
input_api: Bag of input related interfaces.
output_api: Bag of output related interfaces.
file_filter: Custom function that takes a path (relative to client root)... | 5,357,547 |
def assert_is_valid_notebook(nb):
"""These are the current assumptions on notebooks in these tests. Loosen on demand."""
assert nb["nbformat"] == 4
# assert nb["nbformat_minor"] == 0
assert isinstance(nb["metadata"], dict)
assert isinstance(nb["cells"], list)
assert all(isinstance(cell, dict) fo... | 5,357,548 |
def _read_from_file(paramfile):
"""
Code to load parameter data from a YAML file, moved out of
check_metadata_format to allow different inputs to that function.
:param paramfile: The parameter file created by 'precheck_data_format' and
'select_data_templates'.
:type paramfile: str
"""
... | 5,357,549 |
def test_infer_errs() -> None:
"""Test inference applied to functions."""
with f.Fun(MockServer()):
a = f.put(b"bla bla")
b = f.put(3)
with pytest.raises(TypeError):
f.py(lambda x, y, z: (x, y), a, a, b)
# should NOT raise
f.py(
lambda x, y, z: (x... | 5,357,550 |
def decode_captions(captions, idx_to_word):
""" Decode text captions from index in vocabulary to words.
"""
if captions.ndim == 1:
T = captions.shape[0]
N = 1
else:
N, T = captions.shape
decoded = []
for i in range(N):
words = []
for t in range(T):
... | 5,357,551 |
def save_kdeplot(df: DataFrame,
output_plot: str,
x_name: str,
title: str,
color: str,
x_label: str = None,
y_label: str = None,
normalize_x: bool = True,
fig_size: Tuple[int] = (24, 1... | 5,357,552 |
def make_loc(caller):
"""
turn caller location into a string
"""
# return caller["file"] + ":" + caller["func"] + ":" + caller["line"]
return caller["file"] + ":" + str(caller["line"]) | 5,357,553 |
def diff(file1, file2):
"""
Compare two files, ignoring line end differences
If there are differences, print them to stderr in unified diff format.
@param file1 The full pathname of the first file to compare
@param file2 The full pathname of the second file to compare
@return True if the files... | 5,357,554 |
def run_part2(file_content):
"""Implmentation for Part 2."""
numbers = (int(number) for number in file_content.split())
root = _build_tree(numbers)
return _node_value(root) | 5,357,555 |
def get_cap_selected_frame(cap, show_frame):
"""
Gets a frame from an opencv video capture object to a specific frame
"""
cap_set_frame(cap, show_frame)
ret, frame = cap.read()
if not ret:
return None
else:
return frame | 5,357,556 |
def rotate_coords_x(pos, angle):
""" Rotate a set of coordinates about the x-axis
:param pos: (n, 3) xyz coordinates to be rotated
:param angle: angle to rotate them by w.r.t origin
:type pos: numpy.ndarray
:type angle: float
:return: array of rotated coordinates
:rtype: numpy.ndarray
... | 5,357,557 |
def q_learning(env, num_episodes, discount_factor=1.0, alpha=0.5, epsilon=0.1):
"""
Q-Learning algorithm: Off-policy TD control. Finds the optimal greedy policy
while following an epsilon-greedy policy
Args:
env: OpenAI environment.
num_episodes: Number of episodes to run for.
d... | 5,357,558 |
def test_empty_document(text_window):
"""Empty text document can be rendered."""
text_window.dispatch_events()
text_window.close() | 5,357,559 |
def write_quality_stats(input_files, output_file):
"""
Iterate over a list of fastqc output files and generate a dataframe
containing summary statistics for each file, then write the result
to disk.
"""
quality_df = get_quality_stats(input_files)
quality_df.to_csv(output_file, sep='\t', in... | 5,357,560 |
def test_delete_contact_no_token_returns_401(client, session): # pylint:disable=unused-argument, invalid-name
"""Assert that deleting a contact without a token returns a 401."""
rv = client.delete('/api/v1/users/contacts', headers=None, content_type='application/json')
assert rv.status_code == http_status.... | 5,357,561 |
def run_experiment(hparams):
"""Run the training and evaluate using the high level API"""
train_input = model._make_training_input_fn(
hparams.tft_working_dir,
hparams.train_filebase,
num_epochs=hparams.num_epochs,
batch_size=hparams.train_batch_size,
buffer_size=hparams.train_buffer_s... | 5,357,562 |
def find_rand_source_reg():
"""Find random source register based on readAfterWrite probability"""
prob=random.uniform(0,1)
while len(previousIntegerSources)>numberOfPreviousRegistersToConsider:
previousIntegerSources.popleft()
if prob<readAfterWrite and previousIntegerDestinations:
num=random.choice(previousInt... | 5,357,563 |
def _get_qnode_class(device, interface, diff_method):
"""Returns the class for the specified QNode.
Args:
device (~.Device): a PennyLane-compatible device
interface (str): the interface that will be used for classical backpropagation
diff_method (str, None): the method of differentiatio... | 5,357,564 |
def test_alamut_link(app, institute_obj, variant_obj):
"""Test to add a link to alamut browser"""
# GIVEN an institute with settings for Alamut Visual Plus
alamut_api_key = "test_alamut_key"
institute_obj["alamut_key"] = alamut_api_key
# GIVEN that genome build 38 is provided
build = 38
#... | 5,357,565 |
def evaluate_and_log_bleu(model, bleu_source, bleu_ref, vocab_file):
"""Calculate and record the BLEU score."""
subtokenizer = tokenizer.Subtokenizer(vocab_file)
uncased_score, cased_score = translate_and_compute_bleu(
model, subtokenizer, bleu_source, bleu_ref)
tf.compat.v1.logging.info("Bleu score (un... | 5,357,566 |
def transform_call(red_node):
"""
Converts Python style function calls to VHDL style:
self.d(a) -> d(self, a)
If function owner is not exactly 'self' then 'type' is prepended.
self.next.moving_average.main(x) -> type.main(self.next.moving_average, x)
self.d(a) -> d(self, a)
self.next.d(a) ... | 5,357,567 |
def delta_next_time_to_send(G, u, v):
"""How long to wait before U should send a message to V under diffusion
spreading. Per the Bitcoin protocol, this depends on if we have an outgoing
connection or an incoming connection."""
is_outgoing = G[u][v][ORIGINATOR] == u
average_interval_seconds = 2 if is... | 5,357,568 |
def processData(list_pc, imo):
"""
Cette fonction traite les données de getData pour écrire une seule string
prête à être copié dans le csv et qui contient toutes les lignes d'un bateau
"""
str_pc = ''
for i in range(len(list_pc)):
if list_pc[i] == 'Arrival (UTC)':
tab = list... | 5,357,569 |
def wrn(num_classes):
"""Constructs a wideres-28-10 model without dropout.
"""
return Wide_ResNet(28, 10, 0, num_classes) | 5,357,570 |
def info():
"""
Print application and environment version info
"""
print(f"PlexTraktSync Version: {get_version()}")
py_version = sys.version.replace("\n", "")
print(f"Python Version: {py_version}")
print(f"Plex API Version: {PLEX_API_VERSION}")
print(f"Trakt API Version: {TRAKT_API_VER... | 5,357,571 |
def test_find_dup_timestamps_4():
"""Timestamps 500, 1200 should be detected as duplicate."""
assert logfile_check.find_duplicate_timestamps(
[0, 100, 200, 300, 500, 500, 600, 1200, 1200]) == [500, 1200] | 5,357,572 |
def _state_worker_func(indices, programs, params):
"""Compute the wavefunction for each program in indices."""
x_np = _convert_complex_view_to_np(INFO_DICT['arr'], INFO_DICT['shape'])
simulator = INFO_DICT['sim']
for i, index in enumerate(indices):
result = simulator.simulate(programs[i], param... | 5,357,573 |
def run(
config: Dict[str, Any],
log_dir: str = "",
kernel_seed: int = 0,
kernel_random_state: Optional[np.random.RandomState] = None,
) -> Dict[str, Any]:
"""
Wrapper function that enables to run one simulation.
It does the following steps:
- instantiation of the kernel
- running of... | 5,357,574 |
def _print_result(case, summary):
""" Show some statistics from the run """
for dof, data in summary.items():
print(" " + case + " " + dof)
print(" -------------------")
for header, val in data.items():
print(" " + header + " : " + str(val))
print("") | 5,357,575 |
def python_to_pydict(script_contents, namespace=None):
"""Load a Python script with dictionaries into a dictionary."""
if namespace is None:
namespace = {}
exec script_contents in {}, namespace
return to_lower(namespace) | 5,357,576 |
def check(atomTypes):
"""
Parameters
----------
atomTypes
Returns
-------
"""
N = len(atomTypes)
for a in atomTypes:
if len(a.matrix) != N:
print(a)
sys.exit('Matrix with wrong size\n') | 5,357,577 |
def convertInt(s):
"""Tells if a string can be converted to int and converts it
Args:
s : str
Returns:
s : str
Standardized token 'INT' if s can be turned to an int, s otherwise
"""
try:
int(s)
return "INT"
except:
return s | 5,357,578 |
def client_blocking_handler(messages, socket_object):
"""
Applies for numbers: 1
"""
pass | 5,357,579 |
def test_dll_shift(dll_20):
"""Testing the shift() function of dll."""
for num in range(20):
new_tail = dll_20.tail.previous
dll_20.shift()
assert dll_20.tail == new_tail
with pytest.raises(IndexError):
dll_20.shift() | 5,357,580 |
def action2cls():
"""
Root
/home/ty/DB/mmaction2/data/classification_data
train
- normal
-foldername
-img_00001.jpg
-img_00002.jpg
- cancer
test
- normal
- cancer
To
train
- normal
foldername.jp... | 5,357,581 |
def pairwise_distance(A, B):
"""
Compute distance between points in A and points in B
:param A: (m,n) -m points, each of n dimension. Every row vector is a point, denoted as A(i).
:param B: (k,n) -k points, each of n dimension. Every row vector is a point, denoted as B(j).
:return: Matrix with (m... | 5,357,582 |
def print_information(
option, option_string, value, parser, option_manager=None
):
"""Print debugging information used in bug reports.
:param option:
The optparse Option instance.
:type option:
optparse.Option
:param str option_string:
The option name
:param value:
... | 5,357,583 |
def _split_full_name(full_name: str) -> Tuple[str, str, str]:
"""Extracts the `(ds name, config, version)` from the full_name."""
if not tfds.core.registered.is_full_name(full_name):
raise ValueError(
f'Parsing builder name string {full_name} failed.'
'The builder name string must be of the foll... | 5,357,584 |
def query_paginate(resources, arguments):
"""Return the resources paginated
Args:
resources(list): List to paginate
arguments(FormsDict): query arguments
Returns:
list: Paginated resource (asc or desc)
"""
if '_page' not in arguments:
return resources
page = i... | 5,357,585 |
def test_train(task_dataset, mocker):
"""Test train runs without crashing."""
wandb_log = mocker.patch.object(wandb, 'log')
probe = nn.Linear(N_DIMS_PER_REP, N_UNIQUE_FEATS)
before = probe.weight.data.clone()
learning.train(probe, task_dataset, epochs=EPOCHS, also_log_to_wandb=True)
after = pr... | 5,357,586 |
def convert_grad(graph):
"""Remove all instances of SymbolicKeyType in the graphs.
They will be replaced by globally-unique integers.
"""
mng = graph.manager
counter = 0
key_map = {}
for node in mng.all_nodes:
if node.is_constant(SymbolicKeyInstance):
if node.value not... | 5,357,587 |
def patch_marshmallow_fields():
"""
Patch marshmallow fields to look more like drf fields
"""
global _MA_FIELDS_PATCHED
if _MA_FIELDS_PATCHED:
return
ma_fields = {key: value
for key, value in globals().items()
if safe_issubclass(value, ma.fields.FieldABC... | 5,357,588 |
def site_data(db, settings):
"""Simple fake site data
"""
if organizations_support_sites():
settings.FEATURES['FIGURES_IS_MULTISITE'] = True
site_data = make_site_data()
ce = site_data['enrollments'][0]
lcgm = [
LearnerCourseGradeMetricsFactory(site=site_data['site'],
... | 5,357,589 |
def render_curve(name,
data,
x_range=None,
y_range=None,
x_label=None,
y_label=None,
legends=None,
legend_kwargs={},
img_height=None,
img_width=None,
... | 5,357,590 |
def CreateBooleanUnion1(breps, tolerance, manifoldOnly, multiple=False):
"""
Compute the Boolean Union of a set of Breps.
Args:
breps (IEnumerable<Brep>): Breps to union.
tolerance (double): Tolerance to use for union operation.
manifoldOnly (bool): If true, non-manifold input breps... | 5,357,591 |
def make_singleton_class(class_reference, *args, **kwargs):
"""
Make the given class a singleton class.
*class_reference* is a reference to a class type, not an instance of a class.
*args* and *kwargs* are parameters used to instantiate a singleton instance.
To use this, suppose we have a class c... | 5,357,592 |
def get_coaches(soup):
"""
scrape head coaches
:param soup: html
:return: dict of coaches for game
"""
coaches = soup.find_all('tr', {'id': "HeadCoaches"})
# If it picks up nothing just return the empty list
if not coaches:
return coaches
coaches = coaches[0].find... | 5,357,593 |
def _check_base_parsers(
base_parsers: List[base.BaseParser], attrdict: dict
) -> None:
"""Check that the base parser list fulfills all requirements."""
if base.BaseParser.REPO_DISCOVERY in base_parsers:
# the REPO_DISCOVERY parser requires both the STUDENTS parser and
# the api argument to ... | 5,357,594 |
async def test_sending_none_message(opp, events):
"""Test send with None as message."""
await setup_notify(opp)
with pytest.raises(vol.Invalid):
await opp.services.async_call(
notify.DOMAIN, notify.SERVICE_NOTIFY, {notify.ATTR_MESSAGE: None}
)
await opp.async_block_till_done(... | 5,357,595 |
def create_app(config_name):
"""
Factory to create Flask application context using config option found in
app.config
:param config_name: (string) name of the chosen config option
:return app: (Flask application context)
"""
logging.basicConfig(
filename="app.log",
filemode="... | 5,357,596 |
def _rectify_countdown_or_bool(count_or_bool):
"""
used by recrusive functions to specify which level to turn a bool on in
counting down yeilds True, True, ..., False
conting up yeilds False, False, False, ... True
Args:
count_or_bool (bool or int): if positive will count down, if negative
... | 5,357,597 |
def test_web_driver_available(testdir):
"""
`web_driver` fixture should be available for test run.
"""
result = testdir.runpytest('--fixtures')
assert ' Defines WebDriver instance with desired capabilities.' in result.stdout.lines | 5,357,598 |
def POpen (inUV, access, err):
""" Open an image persistent (disk) form
inUV = Python UV object
access = access 1=READONLY, 2=WRITEONLY, 3=READWRITE
err = Python Obit Error/message stack
"""
################################################################
if ('myClass' in inUV.__... | 5,357,599 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.