content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def model_predict(test_data: FeatureVector):
"""
Endpoint to make a prediction with the model. The endpoint `model/train` should have been used before this one.
Args:
test_data (FeatureVector): A unit vector of feature
"""
try:
y_predicted = api.ml_model.predict_proba(test_data.to_... | 5,357,300 |
def print_header(args, argv, preamble='CIFAR10', printfn=print,
log=open(os.devnull, 'w'),
first=('model','dataset','epoch','batchsize','resume','out')):
""" Prints the arguments and header, and returns a logging print function """
def logprint(*args, file=log, **kwar... | 5,357,301 |
def search_folders(project, folder_name=None, return_metadata=False):
"""Folder name based case-insensitive search for folders in project.
:param project: project name
:type project: str
:param folder_name: the new folder's name
:type folder_name: str. If None, all the folders in the project will ... | 5,357,302 |
def tempo_para_percorrer_uma_distancia(distancia, velocidade):
""" Recebe uma distância e a velocidade de movimentação, e retorna
as horas que seriam gastas para percorrer em linha reta"""
horas = distancia / velocidade
return round(horas,2) | 5,357,303 |
def getCorrection(start, end, pos):
"""Correct the angle for the trajectory adjustment
Function to get the correct angle correction when the robot deviates from
it's estimated trajectory.
Args:
start: The starting position of the robot.
end: The position the robot is supposed to arrive.
... | 5,357,304 |
def decontainerize_parameter(params_obj):
"""
Given a set of parameters already ran through containerize_parameter(),
reverse what was done.
"""
params = ['reference_data_path', 'test_data_path', 'results_dir']
# Set each of the params back to their original value.
for p in params:
... | 5,357,305 |
def main(input_filepath, output_filepath):
""" Runs data processing scripts to turn raw data from (../raw) into
cleaned data ready to be analyzed (saved in ../processed).
"""
logger = logging.getLogger(__name__)
logger.info('making final data set from raw data...')
df = load_csv_file_to_df(... | 5,357,306 |
def string2symbols(s):
"""
Convert string to list of chemical symbols.
Args:
s:
Returns:
"""
i = None
n = len(s)
if n == 0:
return []
c = s[0]
if c.isdigit():
i = 1
while i < n and s[i].isdigit():
i += 1
return int(s[:i]) * s... | 5,357,307 |
def set_meta(target, keys, overwrite=False):
"""Write metadata keys to .md file.
TARGET can be a media file or an album directory. KEYS are key/value pairs.
Ex, to set the title of test.jpg to "My test image":
sigal set_meta test.jpg title "My test image"
"""
if not os.path.exists(target):
... | 5,357,308 |
def coe2rv(a, e, i, node, w, v, MU=Earth.mu, degrees=True):
"""Given the classical orbital elements (a, e, i, node, w, v), this
returns the position (R) and the velocity (V) in an ECI frame
- Semimajor-axis (a)[km]: orbit size
- Eccentricity (e): orbit shape (0=circle, 1=line)
- Inclination (i)[deg... | 5,357,309 |
def get_dictionary(filename, dict_size=2000):
"""
Read the tweets and return a list of the 'max_words' most common words.
"""
all_words = []
with open(filename, 'r') as csv_file:
r = csv.reader(csv_file, delimiter=',', quotechar='"')
for row in r:
tweet = row[3]
... | 5,357,310 |
def find_last_index(l, x):
"""Returns the last index of element x within the list l"""
for idx in reversed(range(len(l))):
if l[idx] == x:
return idx
raise ValueError("'{}' is not in list".format(x)) | 5,357,311 |
def download_query_alternative(user, password, queryid, batch_size=500):
"""
This is an alternative implementation of the query downloader.
The original implementation only used a batch size of 20 as this allowed for using
plain LOC files. Unfortunately this is a bit slow and causes more load on the we... | 5,357,312 |
def parse_template(templ_str, event):
"""
Parses a template string and find the corresponding element in an event data structure.
This is a highly simplified version of the templating that is supported by
the Golang template code - it supports only a single reference to a sub
element of the event s... | 5,357,313 |
def current_time_hhmm() -> str:
"""
Uses the time library to get the current time in hours and minutes
Args:
None
Returns:
str(time.gmtime().tm_hour) + ":" + str(time.gmtime().tm_min) (str):
Current time formatted as hour:minutes
"""
logger... | 5,357,314 |
def mae_loss(output, target):
"""Creates a criterion that measures the mean absolute error (l1 loss)
between each element in the input :math:`output` and target :math:`target`.
The loss can be described as:
.. math::
\\ell(x, y) = L = \\operatorname{mean}(\\{l_1,\\dots,l_N\\}^\\top), \\qua... | 5,357,315 |
def test_assign_id_in_html(app: Sphinx) -> None:
"""It assigns an ID to notes automatically in HTML."""
app.build()
tree = cached_parse(os.path.join(app.outdir, "index.html"))
notes = tree.find_all("div", class_="note")
assert len(notes) == 3
# first note is not inside a section
assert no... | 5,357,316 |
def aistracker_from_json(filepath, debug=True):
"""
get an aistracker object from a debug messages JSON that was previously
exported from pyaisnmea
Args:
filepath(str): full path to json file
debug(bool): save all message payloads and decoded attributes into
message... | 5,357,317 |
def configure(node):
""" Generates the script to set the hostname in a node """
script = []
script.append(Statements.exec("hostname %s" % node.getName()))
script.append(Statements.createOrOverwriteFile(
"/etc/hostname", [node.getName()]))
script.append(Statements.exec(
"sed -i 's/127... | 5,357,318 |
def register_preprocess(function_name : str, prep_function):
"""
Register a preprocessing function for use in delta.
Parameters
----------
function_name: str
Name of the preprocessing function.
prep_function:
A function of the form prep_function(data, rectangle, bands_list), whe... | 5,357,319 |
def processOptional(opt):
"""
Processes the optional element 50% of the time, skips it the other 50% of the time
"""
rand = random.random()
if rand <= 0.5:
return ''
else:
return processRHS(opt.option) | 5,357,320 |
def remove_non_paired_trials(df):
"""Remove non-paired trials from a dataset.
This function will remove any trials from the input dataset df that do not
have a matching pair. A matching pair are trial conditions A->B and B->A.
"""
# Define target combinations
start_pos = np.concatenate(df['sta... | 5,357,321 |
def compute_weighted_means_ds(ds,
shp,
ds_name='dataset',
time_range=None,
column_names=[],
averager=False,
df_output=pd.DataFrame(),
... | 5,357,322 |
def action_remove(indicator_id, date, analyst):
"""
Remove an action from an indicator.
:param indicator_id: The ObjectId of the indicator to update.
:type indicator_id: str
:param date: The date of the action to remove.
:type date: datetime.datetime
:param analyst: The user removing the ac... | 5,357,323 |
def repack_orb_to_dalton(A, norb, nclosed, nact, nvirt):
"""Repack a [norb, norb] matrix into a [(nclosed*nact) +
(nclosed*nvirt) + (nact*nvirt)] vector for contraction with the CI
Hamiltonian.
"""
assert norb == nclosed + nact + nvirt
assert A.shape == (norb, norb)
# These might be availa... | 5,357,324 |
def get_config(section="MAIN", filename="config.ini"):
"""
Function to retrieve all information from token file.
Usually retrieves from config.ini
"""
try:
config = ConfigParser()
with open(filename) as config_file:
config.read_file(config_file)
return config[sect... | 5,357,325 |
def _worst_xt_by_core(cores) -> float:
"""
Assigns a default worst crosstalk value based on the number of cores
"""
worst_crosstalks_by_core = {7: -84.7, 12: -61.9, 19: -54.8} # Cores: Crosstalk in dB
worst_xt = worst_crosstalks_by_core.get(cores) # Worst aggregate intercore XT
return worst_xt | 5,357,326 |
def process_files(files):
"""Decode METAR lines from the given files."""
for file in files:
fh = open(file, "r")
for line in fh.readlines():
process_line(line) | 5,357,327 |
async def test_config_entry_retry(hass: HomeAssistant) -> None:
"""Test that a config entry can be retried."""
config_entry = MockConfigEntry(
domain=DOMAIN, data={CONF_HOST: IP_ADDRESS}, unique_id=MAC_ADDRESS
)
config_entry.add_to_hass(hass)
with _patch_discovery(no_device=True), _patch_wif... | 5,357,328 |
def _write_title(file: TextIO) -> None:
"""
Writes the title of test specifications
Args:
file (TextIO): test spec file
"""
title = "Test Specification"
file.write(title + "\n")
file.write("=" * len(title) + "\n")
file.write("\n")
file.write("This file lists all test cases'... | 5,357,329 |
def write_status_unsafe(status, message, status_file, tempdir=None):
"""Write a JSON structure to a file non-atomically"""
with open(status_file, "w") as fh:
json.dump(status, fh) | 5,357,330 |
def _CheckUploadStatus(status_code):
"""Validates that HTTP status for upload is 2xx."""
return status_code / 100 == 2 | 5,357,331 |
def load(path: str, **kwargs) -> BELGraph:
"""Read a BEL graph.
:param path: The path to a BEL graph in any of the formats
with extensions described below
:param kwargs: The keyword arguments are passed to the importer
function
:return: A BEL graph.
This is the universal loader, which me... | 5,357,332 |
def predict_on_matrix(input_matrix: List[List[Any]], provided_columns_names: Optional[List[str]] = None) \
-> Tuple[List[List[Any]], Tuple[str, ...]]:
"""
Make prediction on a Matrix of values
:param input_matrix: data for prediction
:param provided_columns_names: (Optional). Name of columns fo... | 5,357,333 |
def wait_for_cluster_state(conn, opts, cluster_instances, cluster_state):
"""
Wait for all the instances in the cluster to reach a designated state.
cluster_instances: a list of boto.ec2.instance.Instance
cluster_state: a string representing the desired state of all the instances in the cluster
... | 5,357,334 |
def smooth_l1_loss(
prediction: oneflow._oneflow_internal.BlobDesc,
label: oneflow._oneflow_internal.BlobDesc,
beta: float = 1.0,
name: Optional[str] = None,
) -> oneflow._oneflow_internal.BlobDesc:
"""This operator computes the smooth l1 loss.
The equation is:
.. math::
& out = \... | 5,357,335 |
def get_model_fields(model, concrete=False): # type: (Type[Model], Optional[bool]) -> List[Field]
"""
Gets model field
:param model: Model to get fields for
:param concrete: If set, returns only fields with column in model's table
:return: A list of fields
"""
if not hasattr(model._meta, 'g... | 5,357,336 |
def loss_fun(para):
"""
This is the loss function
"""
return -data_processing(my_cir(para)) | 5,357,337 |
def update_user_history(user_id, expire_seconds):
"""Update user requests number history on every task submission"""
if not redis.exists(user_id):
redis.set(user_id, 1, expire_seconds)
else:
redis.set(user_id, int(redis.get(user_id)) + 1, expire_seconds) | 5,357,338 |
def GetDynTypeMgr():
"""Get the dynamic type manager"""
return _gDynTypeMgr | 5,357,339 |
def identifyEntity(tweet, entities):
"""
Identify the target entity of the tweet from the list of entities
:param tweet:
:param entities:
:return:
"""
best_score = 0 # best score over all entities
targetEntity = "" # the entity corresponding to the best score
for word in tweet:
... | 5,357,340 |
def angle_between(a, b):
"""
compute angle in radian between a and b. Throws an exception if a or b has zero magnitude.
:param a:
:param b:
:return:
"""
# TODO: check if extreme value that can make the function crash-- use "try"
# from numpy.linalg import norm
# from numpy import dot... | 5,357,341 |
def _apply_D_loss(scores_fake, scores_real, loss_func):
"""Compute Discriminator losses and normalize loss values
Arguments
---------
scores_fake : list
discriminator scores of generated waveforms
scores_real : list
discriminator scores of groundtruth waveforms
loss_func : objec... | 5,357,342 |
def render_html(options):
"""Start a Flask server to generate HTML report on request."""
# spin up the Flask server
config = ProdConfig
config.SQLALCHEMY_DATABASE_URI = options['database']
report_options = options['report']
config.CHANJO_PANEL_NAME = report_options.get('panel_name')
config.C... | 5,357,343 |
def _write_int(ofile, i, indent_level, dicts, key, print_type):
"""
Writes 'i' to 'ofile'. If 'print_type' is true, the M2K type
followed by a colon is written first.
"""
# appease pychecker
if indent_level or dicts or key:
pass
if print_type:
ofile.write(_dec_types[_get_int... | 5,357,344 |
def _TestCase3(iTolerance):
"""
This is test case function #3. |br|
Args:
iTolerance: maximum tolerance of a difference between an expected value
and a real value
Returns:
Nothing
"""
tStart = rxcs.console.module_progress('LNA test (case 3) SNR')
# Define the ... | 5,357,345 |
def get_temp_dir():
"""
Get path to the temp directory.
Returns:
str: The path to the temp directory.
"""
return fix_slashes( tempfile.gettempdir() ) | 5,357,346 |
def async_parser(_, objconf, skip=False, **kwargs):
"""Asynchronously parses the pipe content
Args:
_ (None): Ignored
objconf (obj): The pipe configuration (an Objectify instance)
skip (bool): Don't parse the content
kwargs (dict): Keyword arguments
Kwargs:
assign (... | 5,357,347 |
def pick_op(r, maxr, w, maxw):
"""Choose a read or a write operation"""
if r == maxr or random.random() >= float(w) / maxw:
return "write"
else:
return "read" | 5,357,348 |
def sim_nochange(request):
""" Return a dummy YATSM model container with a no-change dataset
"No-change" dataset is simply a timeseries drawn from samples of one
standard normal.
"""
X, Y, dates = _sim_no_change_data()
return setup_dummy_YATSM(X, Y, dates, [0]) | 5,357,349 |
def internalpatch(patchobj, ui, strip, cwd, files=None, eolmode='strict'):
"""use builtin patch to apply <patchobj> to the working directory.
returns whether patch was applied with fuzz factor."""
if files is None:
files = {}
if eolmode is None:
eolmode = ui.config('patch', 'eol', 'stri... | 5,357,350 |
def test_convert_str_to_datetime(dummy_df, dummy_df_datetime):
"""Parse strings to datetime format."""
actual = clean.convert_str_to_datetime(dummy_df, colname="reviewdate", datetime_format="%B %d %Y")
expected = deepcopy(dummy_df_datetime)
# Preserve original dtypes
expected["reviewdate"] = expec... | 5,357,351 |
def test_register_interface_extend_task(collector, int_decl):
"""Test extending a task by adding interfaces.
"""
collector.contributions['exopy.Task'] = TaskInfos()
task, _ = int_decl
task.task = 'exopy.Task'
task.register(collector, {})
assert collector.contributions['exopy.Task'].interfac... | 5,357,352 |
def get_queryset_descendants(nodes, include_self=False, add_to_result=None):
"""
RUS: Запрос к базе данных потомков. Если нет узлов,
то возвращается пустой запрос.
:param nodes: список узлов дерева, по которым необходимо отыскать потомков
:param include_self: признак включения в результ исходного сп... | 5,357,353 |
def op_mul(lin_op, args):
"""Applies the linear operator to the arguments.
Parameters
----------
lin_op : LinOp
A linear operator.
args : list
The arguments to the operator.
Returns
-------
NumPy matrix or SciPy sparse matrix.
The result of applying the linear o... | 5,357,354 |
def comparator(x, y):
"""
default comparator
:param x:
:param y:
:return:
"""
if x < y:
return -1
elif x > y:
return 1
return 0 | 5,357,355 |
def test_json_parser_input_error(sdc_builder, sdc_executor):
"""Test JSON parser processor with an invalid input value. The pipeline would look like:
dev_raw_data_source >> json_parser >> trash
"""
pipeline_builder = sdc_builder.get_pipeline_builder()
dev_raw_data_source = pipeline_builder.add_... | 5,357,356 |
def set_trait(age, age_risk_map, sex, sex_risk_map, race, race_risk_map):
""" A trait occurs based on some mix of """
if age in age_risk_map:
risk_from_age = age_risk_map[age]
else:
risk_from_age = 0
if sex in sex_risk_map:
risk_from_sex = sex_risk_map[sex]
else:
ri... | 5,357,357 |
def isvalid(save_path, file):
""" Returns true if the file described by the parameters is a file with
the appropriate file extension. """
return os.path.isfile(os.path.join(save_path, file)) and \
str(file).endswith('.meta') | 5,357,358 |
def test_yang_tree():
""" check that the tree is consistent with the yang
"""
res = subprocess.run(['pyang', '-f', 'tree', '--tree-line-length', '69', '-p', IETF_DIR, YANG_FILE], stdout=subprocess.PIPE)
treefile = Path(YANG_FILE).with_suffix('.tree')
tree = open(treefile, 'r').read()
assert res.... | 5,357,359 |
def tidy_expression(expr, design=None):
"""Converts expression matrix into a tidy 'long' format."""
df_long = pd.melt(
_reset_index(
expr, name='gene'), id_vars=['gene'], var_name='sample')
if design is not None:
df_long = pd.merge(
df_long,
_reset_index... | 5,357,360 |
def model_remote_to_local(remote_timestamps, local_timestamps, debug=False):
"""for timestamps"""
a1=remote_timestamps[:,np.newaxis]
a2=np.ones( (len(remote_timestamps),1))
A = np.hstack(( a1,a2))
b = local_timestamps[:,np.newaxis]
x,resids,rank,s = np.linalg.lstsq(A,b)
if debug:
pri... | 5,357,361 |
def get_optimizer(name):
"""Get an optimizer generator that returns an optimizer according to lr."""
if name == 'adam':
def adam_opt_(lr):
return tf.keras.optimizers.Adam(lr=lr)
return adam_opt_
else:
raise ValueError('Unknown optimizer %s.' % name) | 5,357,362 |
def get_multi(response: Response, common: dict = Depends(common_parameters)) -> List[ShopToPriceSchema]:
"""List prices for a shop"""
query_result, content_range = shop_to_price_crud.get_multi(
skip=common["skip"],
limit=common["limit"],
filter_parameters=common["filter"],
sort_p... | 5,357,363 |
async def test_sensor_empty(
hass: HomeAssistant, config_entry: ConfigEntry, vehicle_type: str
):
"""Test for Renault sensors with empty data from Renault."""
entity_registry = mock_registry(hass)
device_registry = mock_device_registry(hass)
await hass.config_entries.async_setup(config_entry.entry_... | 5,357,364 |
def send_crash(request, machine_config_info, crashlog):
"""
Save houdini crashes
"""
machine_config = get_or_save_machine_config(
machine_config_info, get_ip_address(request),
datetime.datetime.now())
save_crash(machine_config, crashlog, datetime.datetime.now())
return True | 5,357,365 |
def read_csv_file(filename):
"""Read csv file into a numpy array
"""
header_info = {}
# Make this Py2.x and Py3.x compatible
if sys.version_info[0] < 3:
infile = open(filename, 'rb')
else:
infile = open(filename, 'r', newline='', encoding='utf8')
with infile as csvfile:
... | 5,357,366 |
def apply_HAc_dense(A_C, A_L, A_R, Hlist):
"""
Construct the dense effective Hamiltonian HAc and apply it to A_C.
For testing.
"""
d, chi, _ = A_C.shape
HAc = HAc_dense(A_L, A_R, Hlist)
HAc_mat = HAc.reshape((d*chi*chi, d*chi*chi))
A_Cvec = A_C.flatten()
A_C_p = np.dot(HAc_mat, A_Cv... | 5,357,367 |
def min_index(array, i, j):
"""Pomocna funkce pro razeni vyberem. Vrati index nejmensiho prvku
v poli 'array' mezi 'i' a 'j'-1.
"""
index = i
for k in range(i, j):
if array[k] < array[index]:
index = k
return index | 5,357,368 |
def filterControlChars(value, replacement=' '):
"""
Returns string value with control chars being supstituted with replacement character
>>> filterControlChars(u'AND 1>(2+3)\\n--')
u'AND 1>(2+3) --'
"""
return filterStringValue(value, PRINTABLE_CHAR_REGEX, replacement) | 5,357,369 |
def get_deobfuscator(var_names) -> str:
"""Creates a deobfuscator for the given set of var names.
Args:
var_names (list): List of variable names from the `obfuscate` function.
Returns:
str: Deobfuscator
"""
return f'\n\ngetattr(getattr(__main__, [x for x in dir(__main__) if x.start... | 5,357,370 |
def write_trt_rpc(cell_ID, cell_time, lon, lat, area, rank, hmin, hmax, freq,
fname, timeformat='%Y%m%d%H%M'):
"""
writes the rimed particles column data for a TRT cell
Parameters
----------
cell_ID : array of ints
the cell ID
cell_time : array of datetime
the ... | 5,357,371 |
def get_tag(string: str) -> Tag:
"""Получить тему."""
return Tag.objects.get(tag=string) | 5,357,372 |
def average_precision(gt, pred):
"""
Computes the average precision.
This function computes the average prescision at k between two lists of
items.
Parameters
----------
gt: set
A set of ground-truth elements (order doesn't matter)
pred: list
A list of predicted elements (order does mat... | 5,357,373 |
def rotate_coo(x, y, phi):
"""Rotate the coordinates in the *.coo files for data sets
containing images at different PAs.
"""
# Rotate around center of image, and keep origin at center
xin = 512.
yin = 512.
xout = 512.
yout = 512.
cos = math.cos(math.radians(phi))
sin = math.s... | 5,357,374 |
def database():
"""
View MongoDB Configuration
"""
christisMongoconfigPath = get_mongo_configuration_location()
if (not Path(christisMongoconfigPath).is_file()):
typer.echo("ERROR: The cli.yaml file can't be found please use CLI to generate it!",err=True)
raise typer.Exit(code=1)
... | 5,357,375 |
def get_spectral_info(self):
"""
Return the channel values
"""
if self.method=='scouse':
self.specx=self.scouseobject.xtrim
self.specy=self.my_spectrum.spectrum[self.scouseobject.trimids]
self.specrms=self.my_spectrum.rms
else:
self.specx = self.individual[self.index,... | 5,357,376 |
def ip_block_array():
"""
Return an ipBlock array instance fixture
"""
return ['10.0.0.1', '10.0.0.2', '10.0.0.3'] | 5,357,377 |
def check_term_source_refs_usage(i_df, dir_context):
"""Checks Term Source REF linkages in investigation, study and assay files
:param i_df: An investigation DataFrame
:param dir_context: Path to where the investigation file is found
:return: None
"""
check_term_source_refs_in_investigation(i_d... | 5,357,378 |
def load_model(model_name, dir_loc=None, alive_bar_on=True):
"""Load local model_name=model_s if present, else fetch from hf.co."""
if dir_loc is None:
dir_loc = ""
dir_loc = Path(dir_loc).absolute().as_posix()
file_loc = f"{dir_loc}/{model_name}"
if Path(file_loc).exists():
if aliv... | 5,357,379 |
def get_dev_requirements() -> Generator:
"""Yield package name and version for Python developer requirements."""
return get_versions("DEVELOPMENT") | 5,357,380 |
def initialize_logging_errors_to_console(logger):
"""Log errors to the console, in a simple single-line format."""
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
ch.setFormatter(logging.Formatter('Error: %(asctime)s - %(message)s'))
logger.addHandler(ch) | 5,357,381 |
def _ndb_key_to_cloud_key(ndb_key):
"""Convert a ndb.Key to a cloud entity Key."""
return datastore.Key(
ndb_key.kind(), ndb_key.id(), project=utils.get_application_id()) | 5,357,382 |
def inference(H, images, train=True):
"""Build the MNIST model up to where it may be used for inference.
Parameters
----------
images: Images placeholder, from inputs().
train: whether the network is used for train of inference
Returns
-------
softmax_linear: Output tensor with the com... | 5,357,383 |
def _find_modules_and_directories(top_level_directory):
"""
Recursive helper function to find all python files included in top level
package. This will recurse down the directory paths of any package to find
all modules and subpackages in order to create an exhaustive list of all
python files within... | 5,357,384 |
def UniqueLattice(lattice_vectors,ind):
"""
Takes a list with two tuples, each representing a lattice vector and a list with the genes of an individual.
Returns a list with two tuples, representing the equivalent lattice vectors with the smallest cell circunference.
"""
x_1 = lattice_vectors(0,ind)
... | 5,357,385 |
def register_errors(app: Flask):
"""注册需要的错误处理程序包到 Flask 程序实例 app 中"""
@app.errorhandler(400) # Bad Request 客户端请求的语法错误,服务器无法理解
def bad_request(e):
return render_template('error.html', description=e.description, code=e.code), 400
@app.errorhandler(404) # Not Found 服务器无法根据客户端的请求找到资源(网页)
def... | 5,357,386 |
def websafe_encode(data):
"""Encodes a byte string into websafe-base64 encoding.
:param data: The input to encode.
:return: The encoded string.
"""
return urlsafe_b64encode(data).replace(b'=', b'').decode('ascii') | 5,357,387 |
def gravatar(email: Union[str, list]) -> str:
"""Converts the e-mail address provided into a gravatar URL.
If the provided string is not a valid e-mail address, this
function just returns the original string.
Args:
email: e-mail address to convert.
Returns:
Gravatar URL, or None i... | 5,357,388 |
def test_cutmix_batch_fail7():
"""
Test CutMixBatch op
We expect this to fail because labels are not in one-hot format
"""
logger.info("test_cutmix_batch_fail7")
# CutMixBatch Images
data1 = ds.Cifar10Dataset(DATA_DIR, num_samples=10, shuffle=False)
cutmix_batch_op = vision.CutMixBatch... | 5,357,389 |
def bar_2_MPa(value):
"""
converts pressure in bar to Pa
:param value: pressure value in bar
:return: pressure value in Pa
"""
return value * const.bar / const.mega | 5,357,390 |
def poll_all_bme680(bme_config, bme_sensor, pi_id, pi_name, engine,
mqtt_client=None):
"""
Poll all bme680 sensors listed in the config file for this pi
Save resulting records to the database specified engine
"""
if bme_sensor is not None:
for location, details in bme_con... | 5,357,391 |
def walk(x, y, model, theta, conditions=None, var2=0.01, mov=100,
d=1, tol=1e-3, mode=True):
"""Executes the walker implementation.
Parameters
----------
x : np.ndarray
An $(m, n)$ dimensional array for (cols, rows).
y : np.ndarray
An $n$ dimensional array that will be ... | 5,357,392 |
def izbor_letov():
"""Glavna stran."""
# Iz cookieja dobimo uporabnika in morebitno sporočilo
(username, ime, priimek) = get_potnik()
c.execute("SELECT distinct drzava FROM lokacija ORDER BY drzava")
drzave=c.fetchall()
drzava_kje = bottle.request.forms.drzava_kje
mesto_kje = bottle.request.forms.mesto_kje
l... | 5,357,393 |
def generate_data_from_cvs(csv_file_paths):
"""Generate data from list of csv_file_paths. csv_file_paths contains path to CSV file, column_name, and its label
`csv_file_paths`: A list of CSV file path, column_name, and label
"""
data = []
for item in csv_file_paths:
values = read_csv(item[0]... | 5,357,394 |
def log_density_gaussian(x, mu, logvar):
"""Calculates log density of a gaussian.
Parameters
----------
mu: torch.Tensor or np.ndarray or float
Mean.
logvar: torch.Tensor or np.ndarray or float
Log variance.
"""
normalization = - 0.5 * (math.log(2 * math.pi) + logvar)
i... | 5,357,395 |
def parse_path(path: Optional[str] = None, root: str = '/') \
-> Iterator[str]:
"""Parse PATH variable
:param path: PATH string to parse,
default to the ``PATH`` environment variable
:param root: Path to prepend to all paths found
:return: Iterator over the processed paths
"""
i... | 5,357,396 |
def invalid_hexadecimal(statement):
"""Identifies problem caused by invalid character in an hexadecimal number."""
if statement.highlighted_tokens: # Python 3.10
prev = statement.bad_token
wrong = statement.next_token
else:
prev = statement.prev_token
wrong = statement.bad_t... | 5,357,397 |
def test_multi_regex_text_strip():
"""Test with multiple character regex delimiter"""
std_tests_strip('!@', re.compile(r'!@'))
std_tests_strip('!@', re.compile(r'!@'), block_size=1) | 5,357,398 |
async def clear(ctx):
"""Clears requests made by you. Also removes stale requests."""
if not cog_Admin.ADMIN_bot_enabled:
return
clear_requests(remove_all=True)
s = ctx.message.author.name + ', your requests and all stale requests have been removed.'
await bot.say(s) | 5,357,399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.