content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def setup_page(choice, pagepanel, frame):
"""
Creates a :class:`Page` inside a :class:`Notebook`.
:Args:
- choice (tuple)
A tuple of (name, module path, module alias)
- pagepanel
"""
if isinstance(choice.module, str):
try:
__import__(choice.module)
... | 5,354,200 |
def customization_data(client=None):
"""Produce any customization definitions (types, fields, message destinations, etc)
that should be installed by `resilient-circuits customize`
"""
# This import data contains:
# Function inputs:
# artifact_id
# artifact_type
# artifa... | 5,354,201 |
def init_app(app):
""" This will be called by application initializer. """
global backup_server
backup_server = BackupServer()
app.register_blueprint(blueprint) | 5,354,202 |
def test_picorv32_quartus_cyclone10_timing(picorv32_cyclone10_data):
"""Check timing tables"""
timing = picorv32_cyclone10_data["timing"]
clocks = timing["Clocks"].set_index("Clock Name")
fmax = timing["Slow 900mV 100C Model Fmax Summary"].set_index("Clock Name")
assert clocks.loc["clk", "Frequenc... | 5,354,203 |
def taillight_detect(image):
""" Takes in a road image, re-sizes for the model,
predicts the lane to be drawn from the model in G color,
recreates an RGB image of a lane and merges with the
original road image.
"""
model = load_model('full_CNN_model.h5')
#image1=image
#image1=np.array(im... | 5,354,204 |
def generate_random_number(rng, length):
"""Return random number with predefined length."""
return crypto.random_generate(rng, length) | 5,354,205 |
def update_old_names():
"""Fetches the list of old tz names and returns a mapping"""
url = urlparse(ZONEINFO_URL)
log.info('Connecting to %s' % url.netloc)
ftp = ftplib.FTP(url.netloc)
ftp.login()
gzfile = BytesIO()
log.info('Fetching zoneinfo database')
ftp.retrbinary('RETR ' + url.pa... | 5,354,206 |
def get_namespace(location: Optional[str] = None,
namespace_id: Optional[str] = None,
project: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNamespaceResult:
"""
Gets a namespace.
"""
__args__ = dict()
__args__... | 5,354,207 |
def build_headers(access_token, client_id):
"""
:param access_token: Access token granted when the user links their account
:param client_id: This is the api key for your own app
:return: Dict of headers
"""
return {'Content-Type': 'application/json',
'Authorization': f'Bearer {acce... | 5,354,208 |
def get_args():
"""Get command-line arguments"""
parser = argparse.ArgumentParser(
description='sum numbers',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
# Positional arg
parser.add_argument('int', metavar='INT', type=int, nargs='+',
help='Numbers to add... | 5,354,209 |
def odd_subgraph_centrality(i, lam, u):
"""
Calculates the number of odd length closed walks that a node participates in :cite:`estrada2005spectral`.
Used in the calculation of spectral scaling and generalized robustness index.
:param i: node index
:param lam: largest eigenvalue
:param u: large... | 5,354,210 |
def PolyAreasToModel(polyareas, bevel_amount, bevel_pitch, quadrangulate):
"""Convert a PolyAreas into a Model object.
Assumes polyareas are in xy plane.
Args:
polyareas: geom.PolyAreas
bevel_amount: float - if > 0, amount of bevel
bevel_pitch: float - if > 0, angle in radians of bevel
... | 5,354,211 |
def preprocessing(train_data, test_data):
"""
* The method at first eliminates constant features from both train and test data.
* Then, it splits training data into features and labels.
* Finally, the method performs pca on training and testing data sets to reduce the dimension and
overcome curse... | 5,354,212 |
def generate_videos_from_events(response, video_model):
"""Creates the video containers/representations for this given response.
We should only really invoke this as part of a migration as of right now (2/8/2019),
but it's quite possible we'll have the need for dynamic upsertion later.
"""
seen_id... | 5,354,213 |
def datetime_at_midnight(dt: DateTime, tz: TimeZone) -> DateTime:
""" Returns a DateTime for the requested DateTime at midnight in the specified time zone.
Args:
dt (DateTime): the DateTime for which the new value at midnight should be calculated
tz (TimeZone): the TimeZone to use when interpre... | 5,354,214 |
def count_dcm(logger, top):
"""
This function recursively walks through a given directory
(`top`) using depth-first search (bottom up) and counts the
number of .dcm files present.
Parameters
----------
path : {str}
The directory to count.
Returns
-------
count : {int}
... | 5,354,215 |
def convert2sametype(dict_, formula):
"""Utility function for internal use.
Convert string/dict/DataFrame to dict
Parameters
----------
dict_ : dict
formula : string/dict/DataFrame
Returns
-------
type(formula)
"""
return convert2type(dict_, type(formula)) | 5,354,216 |
def setup_data() -> None:
"""Load test dicom files to the test Orthanc server instance
"""
headers = {'content-type': 'application/dicom'}
list_of_dicom_file_paths = [f'./tests/data/dicom_files/{i}' for i in os.listdir('./tests/data/dicom_files/')]
for file_path in list_of_dicom_file_paths:
... | 5,354,217 |
def from_sdf(sdf_content: str = None, file_path: str = None, ignore_hydrogens = False) -> List[Graph]:
"""
parse graph from_sdf
Read chemical files and parses them into instances of `Graph`.
As this function is not meant to be called in a loop,
inner functions only relative to chemical files parsing are declare... | 5,354,218 |
def dispatcher_connect(
opp: OpenPeerPower, signal: str, target: Callable[..., None]
) -> Callable[[], None]:
"""Connect a callable function to a signal."""
async_unsub = run_callback_threadsafe(
opp.loop, async_dispatcher_connect, opp, signal, target
).result()
def remove_dispatcher() -> N... | 5,354,219 |
def test():
"""
Test method for module.
"""
server = Server(host='orka-node-1', port='22', user='vagrant', key=VAGRANT_KEY)
print command(server, 'uname -s')
print git_status(server, '/opt/orka')
copy_file(server, '/etc/hosts', '/tmp/remote_hosts')
print command(server, 'cat /tmp/remote_hosts') | 5,354,220 |
def get_last_error():
""" Get the last error value, then turn it into a nice string. Return the string. """
error_id = kernel32.GetLastError()
# No actual error
if error_id == 0:
return None
# Gonna need a string pointer
buf = ffi.new("LPWSTR")
chars = kernel32.FormatMessageA(... | 5,354,221 |
def load_params_from_pkl(params_dump_file_path):
"""
Loads parameters from a pickle _dump file.
:param params_dump_file_path: self-explanatory
:return dict of param_name => param
"""
coll = {}
f = open(params_dump_file_path, 'rb')
while True:
try:
param_name, param_v... | 5,354,222 |
def _extract_filename_from_filepath(strFilePath=""):
"""
Function which extracts file name from the given filepath
"""
if strFilePath:
try:
strFileName = Path(strFilePath).name
strFileName = str(strFileName).split(".")[0]
return strFileName
... | 5,354,223 |
def plot_by_term(term, df, kind='go', q=0.1, swarm=True,
x='genotype', y='b', gene='ens_gene'):
"""
Plot ontology terms by a given column.
Params:
term - term to look for in melted_df
df - a tidy dataframe with columns x and y
kind - the ontology to use
q - q-value for stat... | 5,354,224 |
def add_emote_command_handler(update, context):
"""
CommandHandler that adds emotes from a specific channel to the bots cache.
Format: /add <channelid>.
Emotes are determined with querries to the twitchemotes.com API.
"""
try:
channel_id = int(context.args[0])
cached_channels, ... | 5,354,225 |
def lab_equality(lab1, lab2):
"""
Check if two labs are identical
"""
if lab1["ncolumns"] != lab1["ncolumns"] or lab1["nlines"] != lab2["nlines"]:
return False
return all(set(lab1[cell]) == set(lab2[cell]) for cell in lab1.keys() if type(cell) != type("a")) | 5,354,226 |
def read_wwm(filename_or_fileglob, chunks={}, convert_wind_vectors=True):
"""Read Spectra from SWAN native netCDF format.
Args:
- filename_or_fileglob (str): filename or fileglob specifying multiple
files to read.
- chunks (dict): chunk sizes for dimensions in dataset. By default
... | 5,354,227 |
def p_require_key_lst(p):
"""require_key_lst : require_key require_key_lst
| require_key"""
if len(p) == 2:
p[0] = [p[1]]
elif len(p) == 3:
p[0] = [p[1]] + p[2] | 5,354,228 |
def is_sorted(t):
"""Checks whether a list is sorted.
t: list
returns: boolean
"""
return t == sorted(t) | 5,354,229 |
def create_container_port_mappings(container):
"""
Create the port mappings for the given container.
:param container: The container to create the mappings for.
"""
ports = []
image = None
if container.is_image_based():
image = container.image
elif container.is_clone() and conta... | 5,354,230 |
async def test_cleanup_nopbar():
"""Do nothing if no tqdm pbar has been created"""
state = Namespace(progressbar={})
cleanup = TQDMCleanup(None, state)
await cleanup(42)
assert cleanup._prev is None | 5,354,231 |
def _get_dep_for_package(package, platform):
"""
Convert arguments in the `package` parameter to actual deps.
"""
if is_list(package) or is_tuple(package):
package, _ = package
# TODO: ghc-8.4.4
if (package == "compact" and
_get_ghc_version(platform) == "8.4.4"):
packag... | 5,354,232 |
def get_caster(typehint: TypeHint) -> Callable[..., Any]:
"""Returns a conversion class most appropriate for the
supplied type hint. Potential matches are checked in
order from most to least specific to account for
overlapping types (e.g. ABCs).
"""
if typehint in (Any, None):
return unt... | 5,354,233 |
def values(series):
"""Count the values and sort.
series: pd.Series
returns: series mapping from values to frequencies
"""
return series.value_counts(dropna=False).sort_index() | 5,354,234 |
def build_conda(force_conda, render, repo, use_pythonpath, output_folder):
"""Build the conda package for the current source tree."""
build_subcommand = "mambabuild"
if force_conda or not has_mambabuild():
build_subcommand = "build"
if render:
build_subcommand = "render"
requirement... | 5,354,235 |
def use_id(type):
"""Declare that this configuration option should point to an ID with the given type."""
def validator(value):
check_not_templatable(value)
if value is None:
return core.ID(None, is_declaration=False, type=type)
if (
isinstance(value, core.ID)
... | 5,354,236 |
def social_auth_user(backend, uid, user=None, *args, **kwargs):
"""Return UserSocialAuth account for backend/uid pair or None if it
doesn't exists.
Raise AuthAlreadyAssociated if UserSocialAuth entry belongs to another
user.
"""
social_user = UserSocialAuth.get_social_auth(backend.name, uid)
... | 5,354,237 |
def bbox_mask(t_arr, x_arr, limits):
"""
Just a wrapper for np.where
"""
#NOTE: t_arr is included but no longer used
mask = np.where(
(x_arr >= limits[0]) & \
(x_arr <= limits[1]))[0]
return mask | 5,354,238 |
def kdeplot_2d_clevels(xs, ys, levels=11, **kwargs):
""" Plot contours at specified credible levels.
Arguments
---------
xs: array
samples of the first variable.
ys: array
samples of the second variable, drawn jointly with `xs`.
levels: float, array
if float, interpreted... | 5,354,239 |
def test_gumbel_prob():
"""
Test probability functions: passing value through construct.
"""
net = GumbelProb()
value = Tensor([0.5, 1.0], dtype=dtype.float32)
ans = net(value)
assert isinstance(ans, Tensor) | 5,354,240 |
def test_addtional_connections(create):
"""Test additional connections to assembly connection."""
conn1 = create(ConnectorItem)
conn2 = create(ConnectorItem)
conn3 = create(ConnectorItem)
c1 = create(ComponentItem, UML.Component)
c2 = create(ComponentItem, UML.Component)
c3 = create(Compone... | 5,354,241 |
def normalization_reg_loss(input):
"""
input: [..., 3]
It computes the length of each vector and uses the L2 loss between the lengths and 1.
"""
lengths = (input ** 2).sum(dim=-1).sqrt()
loss_norm_reg = ((lengths - 1) ** 2).mean()
return loss_norm_reg | 5,354,242 |
def _get_window_size(offset, step_size, image_size):
"""
Calculate window width or height.
Usually same as block size, except when at the end of image and only a
fracture of block size remains
:param offset: start columns/ row
:param step_size: block width/ height
:param image_size: image wi... | 5,354,243 |
def osc_server(ip=ip_osc_server, port=port_server):
"""
sets up and runs the OSC server.
"""
dispatch = dispatcher.Dispatcher()
"""
dispatch.map("/surface-sentiments", surface_handler)
dispatch.map("/reset", reset_handler)
dispatch.map("/silent", silent_handler)
"""
di... | 5,354,244 |
def _ListCtrl_IsSelected(self, idx):
"""
Returns ``True`` if the item is selected.
"""
return (self.GetItemState(idx, wx.LIST_STATE_SELECTED) & wx.LIST_STATE_SELECTED) != 0 | 5,354,245 |
def sturm_liouville_function(x, y, p, p_x, q, f, alpha=0, nonlinear_exp=2):
"""Second order Sturm-Liouville Function defining y'' for Lu=f.
This form is used because it is expected for Scipy's solve_ivp method.
Keyword arguments:
x -- independent variable
y -- dependent variable
p -- p(x) para... | 5,354,246 |
def bn_update(loader, model, verbose=False, subset=None, **kwargs):
"""
BatchNorm buffers update (if any).
Performs 1 epochs to estimate buffers average using train dataset.
:param loader: train dataset loader for buffers average estimation.
:param model: model being update
... | 5,354,247 |
def StopMasterDaemons():
"""Stop the master daemons on this node.
Stop the master daemons (ganeti-masterd and ganeti-rapi) on this node.
@rtype: None
"""
# TODO: log and report back to the caller the error failures; we
# need to decide in which case we fail the RPC for this
result = utils.RunCmd([path... | 5,354,248 |
def write_detected_issue_summaries(document, speech_segments):
"""
Scans the speech segments for any detected issues, and if there are any then a new table is added
to the document.
:param document: Word document structure to write the table into
:param speech_segments: Call transcript structures
... | 5,354,249 |
def item_view_mouse_click(item_view, row, column=0, button=QtCore.Qt.LeftButton, modifier=QtCore.Qt.NoModifier):
"""
Helper method version of 'item_view_index_mouse_click' using a row, column instead of a QModelIndex
item_view: The QAbstractItemView instance
row: The requested row index
column: The ... | 5,354,250 |
def logged(func):
"""Pipes exceptions through root logger"""
@wraps(func)
def deco(*args, **kwargs):
try:
result = func(*args, **kwargs)
except Exception as e:
logging.exception(f"{func.__name__}:\n{e}")
print("Exception logged to {LOGFILE}")
s... | 5,354,251 |
def hamming_set(index: str, d: int = 1, include_N: bool = True):
"""Given an index of bases in {ACGTN}, generate all indexes within hamming
distance d of the input
:param index: string representing the index sequence
:param d: maximum distance to allow
:param include_N: include N when generating po... | 5,354,252 |
def _get_epochs_info(raw_fname):
"""Get epoch info."""
from mne import read_epochs
epochs = read_epochs(raw_fname)
return epochs.info | 5,354,253 |
def _load_specs(ctx):
""" Helper function to find all specs stored in _WAF_/specs/*.json """
if hasattr(ctx, 'loaded_specs_dict'):
return
ctx.loaded_specs_dict = {}
spec_file_folder = ctx.root.make_node(Context.launch_dir).make_node('/_WAF_/specs')
spec_files = spec_file_fol... | 5,354,254 |
def test_bounding_rect_of_boxes(mock_gui):
"""Test that calculating the bounding rect of a set of Boxes works correctly."""
boxes = []
for i in range(1, 4):
box = make_dummy_box()
box.position = 100 * i, 100 * i
boxes.append(box)
rect = bounding_rect_of_rects((box.world_rect for... | 5,354,255 |
def is_private(key):
"""
Returns whether or not an attribute is private.
A private attribute looks like: __private_attribute__.
:param key: The attribute key
:return: bool
"""
return key.startswith("__") and key.endswith("__") | 5,354,256 |
async def img(filename) -> Response:
"""Image static endpoint."""
return await send_from_directory("img", filename) | 5,354,257 |
def ref_genome_info(info, config, dirs):
"""Retrieve reference genome information from configuration variables.
"""
genome_build = info.get("genome_build", None)
(_, sam_ref) = get_genome_ref(genome_build, config["algorithm"]["aligner"],
dirs["galaxy"])
return genom... | 5,354,258 |
def load_text(file_arg):
"""
General function used to load data from a text file
"""
file_handle = validate_file_for_reading(file_arg)
try:
df = pd.io.parsers.read_csv(file_handle,delim_whitespace=True,\
comment='#', skip_blank_lines=True, engine='c')
except:
raise So... | 5,354,259 |
def entropy_from_mnemonic(mnemonic: Mnemonic, lang: str = "en") -> BinStr:
"""Convert mnemonic sentence to Electrum versioned entropy."""
# verify that it is a valid Electrum mnemonic sentence
_ = version_from_mnemonic(mnemonic)
indexes = _indexes_from_mnemonic(mnemonic, lang)
entropy = _entropy_fr... | 5,354,260 |
def main(argv):
"""Delete/split/merge/... labels in a labelvolume."""
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser = parse.parse_remap_labels(parser)
parser = parse.parse_common(parser)
args = parser.p... | 5,354,261 |
def delete_data_analysis(analysis_id: UUID, token: HTTPAuthorizationCredentials = Depends(auth)):
"""
Delete a data analysis record.
You may only delete records in your private space,
or that are associated with a collab of which you are an administrator.
"""
return delete_computation(omcmp.Dat... | 5,354,262 |
def get_current_blk_file(current_file_number) -> str:
"""
Returns the current blk file name with file format.
"""
return get_current_file_name(blk_file_format(current_file_number)) | 5,354,263 |
def run(vhd_list, output_file):
"""Runs this backend."""
for vhd in vhd_list.order:
output_file.write('%s %s %04d %s\n' % (
'top' if vhd in vhd_list.top else 'dep',
vhd.lib,
vhd.version,
vhd.fname)) | 5,354,264 |
def exec_cmd_status(ceph_installer, commands):
"""
Execute command
Args:
ceph_installer: installer object to exec cmd
commands: list of commands to be executed
Returns:
Boolean
"""
for cmd in commands:
out, err = ceph_installer.exec_command(sudo=True, cmd=cmd)
... | 5,354,265 |
def BuildIsAvailable(bucket_name, remote_path):
"""Checks whether a build is currently archived at some place."""
logging.info('Checking existance: gs://%s/%s' % (bucket_name, remote_path))
try:
exists = cloud_storage.Exists(bucket_name, remote_path)
logging.info('Exists? %s' % exists)
return exists
... | 5,354,266 |
def _channel_name(row, prefix="", suffix=""):
"""Formats a usable name for the repeater."""
length = 16 - len(prefix)
name = prefix + " ".join((row["CALL"], row["CITY"]))[:length]
if suffix:
length = 16 - len(suffix)
name = ("{:%d.%d}" % (length, length)).format(name) + suffix
return... | 5,354,267 |
def analyze1127_and_output(ps: List[List[RunningParticle]], gantry_number: int, total_particle_number: int,
momentum_dispersion_list: List[float]) -> None:
"""
分析运行结果,并写到 output.txt 中
类似 1 2.9106590546670255 3.9272244111035284 1.9234584254384846 0.45806934921638964
Parameters
... | 5,354,268 |
def wordsinunit(unit):
"""Counts the words in the unit's source and target, taking plurals into
account. The target words are only counted if the unit is translated."""
(sourcewords, targetwords) = (0, 0)
if isinstance(unit.source, multistring):
sourcestrings = unit.source.strings
else:
... | 5,354,269 |
def check_instance(arg, types, allow_none=False, message='Argument "%(string)s" is not of type %(expected)s, but of type %(actual)s', level=1):
"""
>>> check_instance(1, int)
1
>>> check_instance(3.5, float)
3.5
>>> check_instance('hello', str)
'hello'
>>> check_instance([1, 2, 3... | 5,354,270 |
def spec_lnlike(params, labels, grid_param_list, lbda_obs, spec_obs, err_obs,
dist, model_grid=None, model_reader=None, em_lines={},
em_grid={}, dlbda_obs=None, instru_corr=None,
instru_fwhm=None, instru_idx=None, filter_reader=None,
AV_bef_bb=False, u... | 5,354,271 |
def generic_list(request):
"""Returns a list of all of the document IDs in the matched DocStore."""
return umbrella_from_request(request).get_doc_ids() | 5,354,272 |
def japan_results(request):
"""
view function returns template that displays New York-specific photos
"""
images = Image.filter_images_by_location(location_id=12)
return render(request, "all_pictures/japan.html", {"images":images}) | 5,354,273 |
def add_yaml_literal_block(yaml_object):
"""
Get a yaml literal block representer function to convert normal strings into yaml literals during yaml dumping
Convert string to yaml literal block
yaml docs: see "Block mappings" in https://pyyaml.org/wiki/PyYAMLDocumentation
"""
def literal_str_re... | 5,354,274 |
def _spans_to_array(
doc: Doc,
sources: List[str],
label2idx: Dict[str, int],
labels_without_prefix: Set[str],
prefixes: Optional[Set[str]] = None,
warn_missing_labels: bool = False
) -> np.ndarray:
"""Convert the annotations of a spacy document into a 2D array.
Each row corresponds to a... | 5,354,275 |
def d_out_dist_cooler(P_mass, rho_dist_cool, w_drift):
"""
Calculates the tube's diameter of out distilliat from distilliat cooler to distilliat volume.
Parameters
----------
P_mass : float
The mass flow rate of distilliat, [kg/s]
rho_dist_cool : float
The density of liquid at co... | 5,354,276 |
def is_port_in_use(port):
"""
test if a port is being used or is free to use.
:param port:
:return:
"""
import socket
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
return s.connect_ex(('localhost', port)) == 0 | 5,354,277 |
def redirect_output_to_logger(logger):
"""Context manager which redirects stdout and stderr to a logger"""
orig_stdout = sys.stdout
orig_stderr = sys.stderr
sys.stdout = StreamToLog(logger, logging.INFO)
sys.stderr = StreamToLog(logger, logging.WARNING)
try:
yield
finally:
sy... | 5,354,278 |
def create_variables_eagerly(getter, initial_value, **kwargs):
"""Attempts to force variable creation to be eager."""
eager_initial_value = None
if isinstance(initial_value, tf.Tensor):
if _is_eager_tensor(initial_value):
eager_initial_value = initial_value
else:
# Try to compute the static v... | 5,354,279 |
def _Run(vm):
"""See base method.
Args:
vm: The vm to run the benchmark on.
Returns:
A list of sample.Sample objects.
"""
# Make changes e.g. compiler flags to spec config file.
if 'gcc' in FLAGS.runspec_config:
_OverwriteGccO3(vm)
# swap only if necessary; free local node memory and avoid... | 5,354,280 |
def mixin_method(ufunc, rhs=None, transpose=True):
"""Decorator to register a mixin class method
Using this decorator ensures that derived classes that are declared
with the `mixin_class` decorator will also have the behaviors that this
class has.
ufunc : numpy.ufunc
A universal function (... | 5,354,281 |
def set_def_quick_print(setting):
"""
Set the global default (henceforth) behavior whether to quick print
when stamping or stopping.
Args:
setting: Passed through bool().
Returns:
bool: Implemented setting value.
"""
setting = bool(setting)
SET['QP'] = setting
retur... | 5,354,282 |
def test_main_restore_cancel(builtin_input, builtin_print,
parse_config, restore):
"""should cancel restore procedure when user cancels confirmation"""
with nose.assert_raises(Exception):
swb.main()
restore.assert_not_called() | 5,354,283 |
def _filter_mandatory_attributes(zeep_axl_factory_object):
"""Inspect the AXL schema and return a generator of an API endpoint's mandatory attributes.
Intended use if for local validation prior to submitting an 'add' AXL request to reduce the cost of
remote error responses from the AXL server.
Note:
... | 5,354,284 |
def company_instance():
"""
Mock Company instance
"""
with patch(PATCH_METHOD) as req:
req.return_value = COMPANY_TABLE_DATA
instance = get_company_instance()
instance.set_company_id("uuid")
instance.set_company_name("co")
instance.set_company_external_id("externa... | 5,354,285 |
def periodize_cylinders(xi, xf, d):
"""A generator for each of the copies of the cylinder that we have to check
for collisions."""
# of course the cylinder itself must be used, so we return that first in
# case anybody only cares to do one, then they do the most important one
yield xi, xf
# now ... | 5,354,286 |
async def test_ignore_terminate_fail(conf, game):
"""Test exit if script stops"""
cmd = ["bash", "-c", 'trap "" SIGTERM && sleep 20']
async with simsched.simsched(game, conf, cmd):
# Wait for term to be captured
await asyncio.sleep(1) | 5,354,287 |
def time_handler(start_time, start_fmt, elaps_fmt, today):
"""return StartTime, ElapsedTime tuple using
start/sub time string"""
start_time = datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%S')
start_time = StartTime(start_time.year, start_time.month,
start_time.day, start_time... | 5,354,288 |
def lgb_multi_weighted_logloss_exgal(y_preds, train_data):
"""
@author olivier https://www.kaggle.com/ogrellier
https://www.kaggle.com/ogrellier/plasticc-in-a-kernel-meta-and-data/code
multi logloss for PLAsTiCC challenge
"""
# class_weights taken from Giba's topic : https://www.kaggle.com/titer... | 5,354,289 |
def makedirs(path, mode=0o777, exist_ok=False):
"""Recursive directory creation function.
:type path: bytes | unicode
:type mode: int
:type exist_ok: int
:rtype: None
"""
pass | 5,354,290 |
def api_owner_required(f):
"""
Authorization decorator for api requests that require the record's owner
Ensure a user is admin or the actual user who created the record,
if not send a 400 error.
:return: Function
"""
@wraps(f)
def decorated_function(*args, **kwargs):
if current... | 5,354,291 |
def save_prediction_image(stacked_img, im_name, epoch, save_folder_name="result_images", save_im=True):
"""save images to save_path
Args:
stacked_img (numpy): stacked cropped images
save_folder_name (str): saving folder name
"""
div_arr = division_array(388, 2, 2, 512, 512)
img_cont ... | 5,354,292 |
def flatten_probas_ori(probas, labels, ignore=None):
"""
Flattens predictions in the batch
"""
if probas.dim() == 3:
# assumes output of a sigmoid layer
B, H, W = probas.size()
probas = probas.view(B, 1, H, W)
B, C, H, W = probas.size()
probas = probas.permute(0, 2, 3, 1)... | 5,354,293 |
def get_new_file_number(pat, destdir, startnum=1, endnum=10000):
"""Substitute the integers from startnum to endnum into pat and
return the first one that doesn't exist. The file name that is
searched for is os.path.join(destdir, pat % i)."""
for i in range(startnum, endnum):
temp = pat % i
... | 5,354,294 |
def VisionTransformer_small(pretrained=False,input_shape=(3,224,224),patch_size=16,num_classes=1000, depth=8,drop_rate=0.2,**kwargs):
""" My custom 'small' ViT model. Depth=8, heads=8= mlp_ratio=3."""
vit= VisionTransformer( patch_size=patch_size,num_classes=num_classes, depth=depth,
num_heads... | 5,354,295 |
def downgrade_database(
alembic_config_filename: str,
destination_revision: str,
alembic_base_dir: str = None,
starting_revision: str = None,
version_table: str = DEFAULT_ALEMBIC_VERSION_TABLE,
as_sql: bool = False) -> None:
"""
Use Alembic to downgrade our databa... | 5,354,296 |
def sparsify_axis_labels_old(ax, n=2):
"""
Sparsify tick labels on the given matplotlib axis, keeping only those whose index is divisible by n. Works
with factor plots
"""
for idx, label in enumerate(ax.xaxis.get_ticklabels()):
if idx % n != 0:
label.set_visible(False) | 5,354,297 |
def cast_env(env):
"""Encode all the environment values as the appropriate type for each Python version
This assumes that all the data is or can be represented as UTF8"""
env_type = six.ensure_binary if sys.version_info[0] < 3 else six.ensure_str
return {env_type(key): env_type(value) for key, value in... | 5,354,298 |
def query_filter_choices(arg=None, fq=[]):
"""
Makes solr query and returns facets for tickets.
:param arg: solr query, string
"""
params = {
'short_timeout': True,
'fq': [
'project_id_s:%s' % c.project._id,
'mount_point_s:%s' % c.app.config.options.mount_poi... | 5,354,299 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.