content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def geopad(lon, lat, data, /, nlon=1, nlat=0):
"""
Return array padded circularly along longitude and over the poles for finite
difference methods.
"""
# Pad over longitude seams
if nlon > 0:
pad = ((nlon, nlon),) + (data.ndim - 1) * ((0, 0),)
data = np.pad(data, pad, mode='wrap'... | 5,357,600 |
def test_diffusion_constant():
"""Ensure the diffusion constant is giving a reasonable result."""
known_diffusion = 1e-3
offset = 1e-4
time = np.arange(10000)
msd = time*known_diffusion + offset
diff, diff_err = relaxation.diffusion_constant(time, msd)
assert np.isclose(diff, known_diffusion... | 5,357,601 |
def SL_EAKF(N,loc_rad,taper='GC',ordr='rand',infl=1.0,rot=False,**kwargs):
"""
Serial, covariance-localized EAKF.
Ref: Karspeck, Alicia R., and Jeffrey L. Anderson. (2007):
"Experimental implementation of an ensemble adjustment filter..."
Used without localization, this should be equivalent
(full ensemble... | 5,357,602 |
def compose_test_module_skeleton(module_file):
"""
Writes a pytest file based on the given module.
Args:
module_file (str): path to python module. e.g. "example_module.py"
"""
module = str(inspect.getmodulename(module_file))
test_module_file = f"test_{module}.py"
exec(f"import {modu... | 5,357,603 |
def CalculateNMaxNCharge(mol):
"""
#################################################################
Most negative charge on N atoms
-->QNmin
Usage:
result=CalculateNMaxNCharge(mol)
Input: mol is a molecule object.
Output: result is a numeric value.
#... | 5,357,604 |
def cnn_5l4(image, **kwargs):
"""
:param in: (TensorFlow Tensor) Image input placeholder
:param kwargs: (dict) Extra keywords parameters for the convolutional layers of the CNN
:return: (TensorFlow Tensor) The CNN output layer
"""
activ = tf.nn.relu
layer_1 = activ(conv(image, 'c1', n_filter... | 5,357,605 |
def purge_yaml(data):
"""Checks and converts data in basic types."""
basic_types = [int, float, text_type, list]
for key, value in data.items():
if isinstance(value, dict):
purge_yaml(value)
elif isinstance(value, date):
data[key] = value.i... | 5,357,606 |
def read_dns_data(dns_fn):
"""
Read data in from a DNS file
:param str dns_fn: The filename of the DNS
"""
fed = open(dns_fn, 'r')
begin_data = False
dns_data = {}
for line in fed.readlines():
if begin_data:
if "t = " in line:
... | 5,357,607 |
def arithmetic_mean(iterable):
"""Zero-length-safe arithmetic mean."""
values = np.asarray(iterable)
if not values.size:
return 0
return values.mean() | 5,357,608 |
def play_process(url):
""" Create and return process to read audio from url and send to analog output"""
return FfmpegProcess(f'ffmpeg -i {url} -f alsa default') | 5,357,609 |
def lint_all_views():
"""Mimic a modification of all views, which will trigger a relint."""
for window in sublime.windows():
for view in window.views():
if view.buffer_id() in persist.view_linters:
hit(view) | 5,357,610 |
def iterable_to_wikitext(
items: Iterable[object], *, prefix: str = "\n* "
) -> str:
"""
Convert iterable to wikitext.
Pages are converted to links.
All other objects use their string representation.
:param items: Items to iterate
:param prefix: Prefix for each item when there is more than... | 5,357,611 |
def ComputeRelativeRisk(first_pmf, other_pmf):
"""Computes relative risks for two PMFs.
first_pmf: Pmf object
other_pmf: Pmf object
"""
print 'Risks:'
funcs = [ProbEarly, ProbOnTime, ProbLate]
risks = {}
for func in funcs:
for pmf in [first_pmf, other_pmf]:
prob = f... | 5,357,612 |
def plot_timeSeries(df, col_name, divide=None, xlabel="Days", line=True, title="Time series values", figsize=(9,9)):
"""
Plot a column of the given time series DataFrame.
Parameters
----------
df: pd.DataFrame
DataFrame indexed by days (i.e. the index is a pd.DatetimeIndex).
col... | 5,357,613 |
def save_bedtools(cluster_regions, clusters, assigned_dir):
"""
Given cluster regions file saves all bedtools sanely and returns result
:param cluster_regions:
:return:
"""
for region in cluster_regions:
output_file = "%s.%s.real.BED" % (clusters, region)
cluster_regions[region][... | 5,357,614 |
def is_bv(a):
"""Return `True` if `a` is a Z3 bit-vector expression.
>>> b = BitVec('b', 32)
>>> is_bv(b)
True
>>> is_bv(b + 10)
True
>>> is_bv(Int('x'))
False
"""
return isinstance(a, BitVecRef) | 5,357,615 |
def bgsub_1D(raw_data, energy_axis, edge, **kwargs):
"""
Full background subtraction function for the 1D case-
Optional LBA, log fitting, LCPL, and exponential fitting.
For more information on non-linear fitting function, see information at https://docs.scipy.org/doc/scipy/reference/generated/scipy.opti... | 5,357,616 |
def about_garble():
"""
about_garble
Returns one of several strings for the about page
"""
garble = ["leverage agile frameworks to provide a robust synopsis for high level overviews.",
"iterate approaches to corporate strategy and foster collaborative thinking to further the ov... | 5,357,617 |
def parse_term_5_elems(expr_list, idx):
"""
Try to parse a terminal node from five elements of {expr_list}, starting
from {idx}.
Return the new expression list on success, None on error.
"""
# The only 3 items node is pk_h
if expr_list[idx : idx + 2] != [OP_DUP, OP_HASH160]:
return
... | 5,357,618 |
def display_clusters():
"""
Method to display the clusters
"""
offset = int(request.args.get('offset', '0'))
limit = int(request.args.get('limit', '50'))
clusters_id_sorted = sorted(clusters, key=lambda x : -len(clusters[x]))
batches = chunks(range(len(clusters_id_sorted)), size=limit)
r... | 5,357,619 |
def densify_sampled_item_predictions(tf_sample_predictions_serial, tf_n_sampled_items, tf_n_users):
"""
Turns the serial predictions of the sample items in to a dense matrix of shape [ n_users, n_sampled_items ]
:param tf_sample_predictions_serial:
:param tf_n_sampled_items:
:param tf_n_users:
:... | 5,357,620 |
def get_market_book(symbols=None, **kwargs):
"""
Top-level function to obtain Book data for a symbol or list of symbols
Parameters
----------
symbols: str or list, default None
A symbol or list of symbols
kwargs:
Additional Request Parameters (see base class)
"""
return ... | 5,357,621 |
def ndarange(*args, shape: tuple = None, **kwargs):
"""Generate arange arrays of arbitrary dimensions."""
arr = np.array([np.arange(*args[i], **kwargs) for i in range(len(args))])
return arr.reshape(shape) if shape is not None else arr.T | 5,357,622 |
def runningSum(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
5% faster
100% less memory
"""
sum = 0
runningSum = [0] * len(nums)
for i in range(len(nums)):
for j in range(i+1):
runningSum[i] += nums[j]
return runningSum | 5,357,623 |
def setup_vmedia_for_boot(task, boot_iso, parameters=None):
"""Sets up the node to boot from the given ISO image.
This method attaches the given boot_iso on the node and passes
the required parameters to it via virtual floppy image.
:param task: a TaskManager instance containing the node to act on.
... | 5,357,624 |
def _select_features_1run(df, target, problem_type="regression", verbose=0):
"""
One feature selection run.
Inputs:
- df: nxp pandas DataFrame with n data points and p features; to avoid overfitting, only provide data belonging
to the n training data points. The variables have to be s... | 5,357,625 |
def main():
"""Console script for github_terminal."""
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group()
group.add_argument("-v",
"--verbose",
action="store_true",
help="Show verbose information")
grou... | 5,357,626 |
def async_add_defaults(hass: HomeAssistant, config_entry: ConfigEntry):
"""Populate default options."""
host: str = config_entry.data[CONF_HOST]
imported_options: dict = hass.data[DOMAIN].get(f"imported_options_{host}", {})
options = {
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
CONF_CONS... | 5,357,627 |
def edit_recovery(request, recovery_id):
"""This view is used to edit/update existing tag recoveries."""
clip_codes = sorted(list(CLIP_CODE_CHOICES), key=lambda x: x[0])
tag_types = sorted(list(TAG_TYPE_CHOICES), key=lambda x: x[0])
tag_origin = sorted(list(TAG_ORIGIN_CHOICES), key=lambda x: x[0])
... | 5,357,628 |
def bio2output(text_dir, input_dir, output_dir, output_template, do_copy_text, file_suffix='ann'):
"""
we expect the input as a directory of all bio files end with .txt suffix
we expect the each bio file contain the offset info (start; end position of each words) and tag info;
original words are not req... | 5,357,629 |
def e(string, *args):
"""Function which formats error messages."""
return string.format(*[pformat(arg) for arg in args]) | 5,357,630 |
def membership_ending_task(user):
"""
:return: Next task that will end the membership of the user
"""
task = (UserTask.q
.filter_by(user_id=user.id,
status=TaskStatus.OPEN,
type=TaskType.USER_MOVE_OUT)
# Casting jsonb -> bool directl... | 5,357,631 |
def hmsstr_to_rad(hmsstr):
"""Convert HH:MM:SS.SS sexigesimal string to radians.
"""
hmsstr = np.atleast_1d(hmsstr)
hours = np.zeros(hmsstr.size)
for i,s in enumerate(hmsstr):
# parse string using regular expressions
match = hms_re.match(s)
if match is None:
warn... | 5,357,632 |
def find_optimum_transformations(init_trans, s_pts, t_pts, template_spacing,
e_func, temp_tree, errfunc):
"""
Vary the initial transformation by a translation of up to three times the
grid spacing and compute the transformation with the smallest least square
error.
... | 5,357,633 |
def export_excel(filename, data: list or dict, columns: list, **kwargs):
"""导出excel文件"""
df = pd.DataFrame(data=data, columns=columns)
file_path = os.path.join(os.path.join(base_dir, "export_files"), filename)
df.to_excel(file_path, **kwargs)
print(f"===== Finished in saving Excel file: {file_path... | 5,357,634 |
def execute_transaction(query):
"""Execute Transaction"""
return Neo4jHelper.run_single_query(query) | 5,357,635 |
def generate_linear_constraints(points, verbose=False):
""" Given point coordinates, generate angle constraints. """
from scipy.linalg import null_space
from angle_set import create_theta, get_n_linear, perturbe_points
N, d = points.shape
num_samples = get_n_linear(N) * 2
if verbose:
p... | 5,357,636 |
def add_node_to_parent(node, parent_node):
"""
Add given object under the given parent preserving its local transformations
:param node: str
:param parent_node: str
"""
return maya.cmds.parent(node, parent_node, add=True, s=True) | 5,357,637 |
def coerce(from_, to, **to_kwargs):
"""
A preprocessing decorator that coerces inputs of a given type by passing
them to a callable.
Parameters
----------
from : type or tuple or types
Inputs types on which to call ``to``.
to : function
Coercion function to call on inputs.
... | 5,357,638 |
def get_module_config_filename():
"""Returns the path of the module configuration file (e.g. 'app.yaml').
Returns:
The path of the module configuration file.
Raises:
KeyError: The MODULE_YAML_PATH environment variable is not set.
"""
module_yaml_path = os.environ['MODULE_YAML_PATH'... | 5,357,639 |
def Binary(value):
"""construct an object capable of holding a binary (long) string value."""
return value | 5,357,640 |
def _get_domain_session(token, domain_name=None):
"""
Return v3 session for token
"""
domain_name = domain_name or 'default'
auth = v3.Token(auth_url=get_auth_url(),
domain_id=domain_name,
token=token)
return session.Session(auth=auth, user_agent=USER_AGEN... | 5,357,641 |
def open_invoice_as_email(inv: Invoice):
""" Opens E-Mail windows to send the invoice """
recipients = []
if inv.payer.email != "":
recipients.append(inv.payer.email)
accounting_company = Company(config.CONSTANTS["COMPANY_NAME_ACCOUNTING"])
if accounting_company.email != "":
recipien... | 5,357,642 |
def load_model(name: str, root: str = "") -> Tuple[Model, Any]:
"""Load the trained model (structure, weights) and vectorizer from files."""
json_file, h5_file, vec_file = (
os.path.join(root, "{}.{}".format(name, ext)) for ext in ("json", "h5", "pkl")
)
with open(json_file) as fp:
mode... | 5,357,643 |
def fix_troposphere_references(template):
""""Tranverse the troposphere ``template`` looking missing references.
Fix them by adding a new parameter for those references."""
def _fix_references(value):
if isinstance(value, troposphere.Ref):
name = value.data['Ref']
if name no... | 5,357,644 |
def parse_config_to_dict(cfg_file, section):
""" Reads config file and returns a dict of parameters.
Args:
cfg_file: <String> path to the configuration ini-file
section: <String> section of the configuration file to read
Returns:
cfg: <dict> configuration parameters of 'section' as ... | 5,357,645 |
def cnn_net(data,
dict_dim,
emb_dim=128,
hid_dim=128,
hid_dim2=96,
class_dim=2,
win_size=3):
"""
Conv net
"""
# embedding layer
emb = fluid.layers.embedding(input=data, size=[dict_dim, emb_dim])
# convolution layer
conv... | 5,357,646 |
def get_commands(servo):
"""Get specific flash commands for the build target.
Each board needs specific commands including the voltage for Vref, to turn
on and turn off the SPI flash. The get_*_commands() functions provide a
board-specific set of commands for these tasks. The voltage for this board
needs to ... | 5,357,647 |
def test_random_deviation_profile_count(game, _):
"""Test dev profile count"""
rest = game.random_restriction()
devs = restrict.deviation_profiles(game, rest)
assert devs.shape[0] == restrict.num_deviation_profiles(game, rest), \
"num_deviation_profiles didn't return correct number"
assert ... | 5,357,648 |
def start_linux(user, password, url, personal, branch, remote,
mvngoals, mvnargs, jdk):
"""
Start a custom linux build
"""
props = dict_as_properties({'project-default-jdk': "%{}%".format(jdk),
'maven-goals': mvngoals,
'mave... | 5,357,649 |
def FormatRow(Cn, Row, COLSP):
"""
"""
fRow = ""
for i, c in enumerate(Row):
sc = str(c)
lcn = len(Cn[i])
sc = sc[ 0 : min(len(sc), lcn+COLSP-2) ]
fRow += sc + " "*(COLSP+lcn-len(sc))
return fRow | 5,357,650 |
def handle_release(pin, evt):
"""
Clears the last tone/light when a button
is released.
"""
if pin > 4:
return False
pin -= 1
explorerhat.light[pin].off()
tone.power_off() | 5,357,651 |
def makeRoute(start : str, end : str) -> List[str]:
"""Find the shortest route between two systems.
:param str start: string name of the starting system. Must exist in bbData.builtInSystemObjs
:param str end: string name of the target system. Must exist in bbData.builtInSystemObjs
:return: list of stri... | 5,357,652 |
def list_composers():
""" GET all composers """
r = requests.get(url = COMPOSERS_ENDPOINT)
result_text = r.text
print("")
print("result: " + result_text) | 5,357,653 |
def main():
"""The main program.
"""
parser = \
argparse.ArgumentParser(description='Deep-learning based classifiers')
parser.add_argument('--evaluate', action='store_true', default=False,
help='evaluate the classifier on the given datasource')
parser.add_argument('-... | 5,357,654 |
def update_node_categories(
target_graph: BaseGraph,
clique_graph: nx.MultiDiGraph,
clique: List,
category_mapping: Optional[Dict[str, str]],
strict: bool = True,
) -> List:
"""
For a given clique, get category for each node in clique and validate against Biolink Model,
mapping to Biolin... | 5,357,655 |
def _configSpecial_OrthoOpts_zcentre(
target, parser, shortArg, longArg, helpText):
"""Configures the ``zcentre`` option for the ``OrthoOpts`` class. """
parser.add_argument(
shortArg, longArg, metavar=('X', 'Y'),
type=float, nargs=2, help=helpText) | 5,357,656 |
def norm_error(series):
"""Normalize time series.
"""
# return series
new_series = deepcopy(series)
new_series[:,0] = series[:,0] - np.mean(series[:,0])
return 2*(new_series)/max(abs(new_series[:,0])) | 5,357,657 |
def f1d(x):
"""Non-linear function for simulation"""
return(1.7*(1/(1+np.exp(-(x-0.5)*20))+0.75*x)) | 5,357,658 |
def get_field_map(src, flds):
"""
Returns a field map for an arcpy data itme from a list or dictionary.
Useful for operations such as renaming columns merging feature classes.
Parameters:
-----------
src: str, arcpy data item or arcpy.mp layer or table
Source data item containing the de... | 5,357,659 |
def b32qlc_decode(value):
"""
Decodes a value in qlc encoding to bytes using base32 algorithm
with a custom alphabet: '13456789abcdefghijkmnopqrstuwxyz'
:param value: the value to decode
:type: bytes
:return: decoded value
:rtype: bytes
>>> b32qlc_decode(b'fxop4ya=')
b'okay'
"""
... | 5,357,660 |
def createPreProcessingLayers():
"""
Creates a model with the initial pre-processing layers.
"""
model = Sequential()
model.add(Lambda(lambda x: (x / 255.0) - 0.5, input_shape=(160, 320, 3)))
model.add(Cropping2D(cropping=((50, 20), (0, 0))))
return model | 5,357,661 |
def get_random_atoms(a=2.0, sc_size=2, numbers=[6, 8],
set_seed: int = None):
"""Create a random structure."""
if set_seed:
np.random.seed(set_seed)
cell = np.eye(3) * a
positions = np.array([[0, 0, 0], [a/2, a/2, a/2]])
unit_cell = Atoms(cell=cell, positions=position... | 5,357,662 |
def pocsense(kspace, sensitivities, i=None, r=None, l=None, g=None, o=None, m=None):
"""
Perform POCSENSE reconstruction.
:param kspace array:
:param sensitivities array:
:param i int: max. number of iterations
:param r float: regularization parameter
:param l int: toggle l1-wavelet or l2... | 5,357,663 |
def vgg16_bn(pretrained=False, **kwargs):
"""VGG 16-layer model (configuration "D") with batch normalization
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
if pretrained:
kwargs['init_weights'] = False
model = VGG(make_layers(cfg['D'], batch_norm=True),... | 5,357,664 |
def dump_yaml(content: dict, filepath: str):
"""Dump the content into filepath."""
with open(filepath, "w") as file:
file.write(yaml.dump(content)) | 5,357,665 |
def tau(x, cval):
"""Robust estimators of location and scale, with breakdown points of 50%.
Also referred to as: Tau measure of location by Yohai and Zamar
Source: Yohai and Zamar JASA, vol 83 (1988), pp 406-413 and
Maronna and Zamar Technometrics, vol 44 (2002), pp. 307-317"""
med =... | 5,357,666 |
def shn_gis_location_represent(id, showlink=True):
""" Represent a location given its id """
table = db.gis_location
try:
location = db(table.id == id).select(table.id,
table.name,
table.level,
... | 5,357,667 |
def de_pearson_dataframe(df, genes, pair_by='type', gtex=True, tcga=True):
"""
PearsonR scores of gene differential expression between tumor and normal types.
1. Calculate log2FC of genes for TCGA tumor samples with matching TCGA normal types
2. Compare log2fc to tumor type compared to all other normal... | 5,357,668 |
def chunks(list_, n):
"""
Yield successive n-sized chunks from list_.
Based on https://stackoverflow.com/questions/312443/how-do-you-split-a-list-into-evenly-sized-chunks
"""
for offset in range(0, len(list_), n):
yield list_[offset:offset + n] | 5,357,669 |
def load_YUV_as_dic_tensor(path_img):
"""
Construct a dic with 3 entries ('y','u', 'v'), each of them
is a tensor and is loaded from path_img + key + '.png'.
! Return a dictionnary of 3D tensor (i.e. without a dummy batch index)
"""
dic_res = {}
key = ['y', 'u', 'v']
for k i... | 5,357,670 |
def tensor_log10(t1, out_format, dtype=None):
"""
Takes the log base 10 of each input in the tensor.
Note that this is applied to all elements in the tensor not just non-zeros.
Warnings
---------
The log10 of 0 is undefined and is performed on every element in the tensor re... | 5,357,671 |
def threadVideoGet(source=0):
"""
Dedicated thread for grabbing video frames with VideoGet object.
Main thread shows video frames.
"""
video_getter = VideoGet(source).start()
cps = CountsPerSec().start()
while True:
if (cv2.waitKey(1) == ord("q")) or video_getter.stopped:... | 5,357,672 |
def main():
"""Convert YAML specifications to database DDL."""
parser = cmd_parser("Generate SQL statements to update a PostgreSQL "
"database to match the schema specified in a "
"YAML-formatted file(s)", __version__)
parser.add_argument('-m', '--multiple-fil... | 5,357,673 |
def test_score_scaling(sequences):
"""
Scaling the substitution scores and gap penalties by a constant
factor should not influence the obtained E-values.
Test this by aligning real sequences with a standard and scaled
scoring scheme and comparing the calculated E-values of these
alignments.
... | 5,357,674 |
def networkedge_polygon_intersection(
edge_shapefile,
hazard_shapefile,
output_shapefile,
edge_id_column,
polygon_id_column,
edge_length_column,
crs={"init": "epsg:4326"},
):
"""Intersect network edges and hazards and write results to shapefiles
Parameters
----------
edge_sh... | 5,357,675 |
def get_file_phenomena_i(index):
"""
Return file phenomena depending on the value of index.
"""
if index <= 99:
return [phen[0]]
elif index >= 100 and index <= 199:
return [phen[1]]
elif index >= 200 and index <= 299:
return [phen[2]]
elif index >= 300 and index <= ... | 5,357,676 |
def resource_cache_map(resource_id, flush=True):
"""cache resource info"""
if flush:
map_resources(resource_ids=[resource_id, ])
if resource_id not in CDNRESOURCE:
raise InvalidArgument('Resource not exit')
return CDNRESOURCE[resource_id] | 5,357,677 |
def _preprocess_html(table_html):
"""Parses HTML with bs4 and fixes some glitches."""
table_html = table_html.replace("<br />", "<br /> ")
table = bs4.BeautifulSoup(table_html, "html5lib")
table = table.find("table")
# Delete hidden style annotations.
for tag in table.find_all(attrs={"style": "display:none"... | 5,357,678 |
def parse_campus_hours(data_json, eatery_model):
"""Parses a Cornell Dining json dictionary.
Returns 1) a list of tuples of CampusEateryHour objects for a corresponding CampusEatery object and their unparsed
menu 2) an array of the items an eatery serves.
Args:
data_json (dict): a valid dictio... | 5,357,679 |
def _change_relationships(edge: Dict) -> Tuple[bool, bool]:
"""Validate relationship."""
if 'increases' in edge[1]['relation'] or edge[1]['relation'] == 'positive_correlation':
return True, True
elif 'decreases' in edge[1]['relation'] or edge[1]['relation'] == 'negative_correlation':
return ... | 5,357,680 |
def extract_behaviour_sync(sync, chmap=None, display=False, tmax=np.inf):
"""
Extract wheel positions and times from sync fronts dictionary
:param sync: dictionary 'times', 'polarities' of fronts detected on sync trace for all 16 chans
:param chmap: dictionary containing channel index. Default to const... | 5,357,681 |
def download_and_load_model(model_files) -> RecursiveScriptModule:
"""
Downloads and torch.jit.load the model from google drive, the downloaded model is saved in /tmp
since in heroku we get /tmp to save all our stuff, if the app is not running in production
the model must be saved in load stora... | 5,357,682 |
def is_attr_defined(attrs,dic):
"""
Check if the sequence of attributes is defined in dictionary 'dic'.
Valid 'attrs' sequence syntax:
<attr> Return True if single attrbiute is defined.
<attr1>,<attr2>,... Return True if one or more attributes are defined.
<attr1>+<attr2>+... Return True if all ... | 5,357,683 |
def c_str_repr(str_):
"""Returns representation of string in C (without quotes)"""
def byte_to_repr(char_):
"""Converts byte to C code string representation"""
char_val = ord(char_)
if char_ in ['"', '\\', '\r', '\n']:
return '\\' + chr(char_val)
elif (ord(' ') <= cha... | 5,357,684 |
def _check_signature(signature, template):
"""
Check that the given `Signature` is valid.
"""
pick = _LockPick()
template.format_map(pick)
path_vars = {name for name, _ in _get_parameters(Path, signature)}
path_vars_diff = pick.keys - path_vars
if path_vars_diff:
raise FurnishEr... | 5,357,685 |
def hour_paths_for_range(hours_path, start, end):
"""Generate a list of hour paths to check when looking for segments between start and end."""
# truncate start and end to the hour
def truncate(dt):
return dt.replace(microsecond=0, second=0, minute=0)
current = truncate(start)
end = truncate(end)
# Begin in the... | 5,357,686 |
def getActiveTeamAndID():
"""Returns the Team ID and CyTeam for the active player."""
return getActiveTeamID(), getActiveTeam() | 5,357,687 |
def is_nitf(
file_name: Union[str, BinaryIO],
return_version=False) -> Union[bool, Tuple[bool, Optional[str]]]:
"""
Test whether the given input is a NITF 2.0 or 2.1 file.
Parameters
----------
file_name : str|BinaryIO
return_version : bool
Returns
-------
is_nitf_... | 5,357,688 |
def main():
"""A simple main for testing via command line."""
parser = argparse.ArgumentParser(
description='A manual test for ros-pull-request-builder access'
'to a GitHub repo.')
parser.add_argument('user', type=str)
parser.add_argument('repo', type=str)
parser.add_argu... | 5,357,689 |
def get_xlsx_filename() -> str:
"""
Get the name of the excel file. Example filename:
kesasetelihakemukset_2021-01-01_23-59-59.xlsx
"""
local_datetime_now_as_str = timezone.localtime(timezone.now()).strftime(
"%Y-%m-%d_%H-%M-%S"
)
filename = f"kesasetelihakemukset_{local_datetime_now... | 5,357,690 |
def retrieved_secret(secret_name):
"""retrieved_secret"""
log_level = environ.get("APP_LOG_LEVEL", logging.INFO)
logging.basicConfig(format="%(levelname)s:%(message)s", level=log_level)
if (
"tenant_id" in environ.keys()
and "client_id" in environ.keys()
and "client_secret" in en... | 5,357,691 |
def get_content_directory() -> Path:
"""
Get the path of the markdown `content` directory.
"""
return get_base_directory() / "content" | 5,357,692 |
def mag_inc(x, y, z):
"""
Given *x* (north intensity), *y* (east intensity), and *z*
(vertical intensity) all in [nT], return the magnetic inclincation
angle [deg].
"""
h = math.sqrt(x**2 + y**2)
return math.degrees(math.atan2(z, h)) | 5,357,693 |
def bootstrap():
""" initialize remote host environment (virtualenv, deploy, update) """
# Require a valid env.root value
require('root', provided_by=('pro'))
# Create env.root directory
run('mkdir -p %(root)s' % env)
create_virtualenv()
deploy()
update_requirements() | 5,357,694 |
def rgb_to_rgba(image, alpha_val):
"""
Convert an image from RGB to RGBA.
"""
if not isinstance(image, torch.Tensor):
raise TypeError(f"Input type is not a torch.Tensor. Got {type(image)}")
if len(image.shape) < 3 or image.shape[-3] != 3:
raise ValueError(f"Input size must have ... | 5,357,695 |
def pdpc_decision(csv, download, corpus, action, root, extras, extra_corpus, verbose):
"""
Scripts to scrape all decisions of the Personal Data Protection Commission of Singapore.
Accepts the following actions.
"all" Does all the actions (scraping the website, saving a csv, downloading all files... | 5,357,696 |
def has_labels(dataset_dir, filename=LABELS_FILENAME):
"""Specifies whether or not the dataset directory
contains a label map file.
Args:
dataset_dir: The directory in which the labels file is found.
filename: The filename where the class names are written.
Returns:
`True` if the labels fi... | 5,357,697 |
def get(identifier):
"""get the activation function"""
if identifier is None:
return linear
if callable(identifier):
return identifier
if isinstance(identifier, str):
activations = {
"relu": relu,
"sigmoid": sigmoid,
"tanh": tanh,
"... | 5,357,698 |
def df_add_column_codelines(self, key):
"""Generate code lines to add new column to DF"""
func_lines = df_set_column_index_codelines(self) # provide res_index = ...
results = []
for i, col in enumerate(self.columns):
col_loc = self.column_loc[col]
type_id, col_id = col_loc.type_id, col... | 5,357,699 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.