content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def create_cli(ctx):
"""
Create dataset in a CKAN instance.
"""
create(
ctx.obj['CKAN_HOST'],
ctx.obj['CKAN_KEY'],
ctx.obj['DATAPACKAGE'],
ctx.obj['DATASTORE'],
ctx.obj['EXIT_CODE'],
) | 5,353,300 |
def divideArray(array, factor):
"""Dzielimy tablice na #factor tablic, kazda podtablica ma tyle samo elem oprocz ostatniej"""
factor = min(factor, len(array))
length = floor(len(array) * 1.0 / factor)
res = []
for i in range(factor - 1):
res = res + list([array[i * length:(i + 1) * length]]... | 5,353,301 |
async def open(command):
"""Open the shutter."""
command.info(text="Opening the shutter!")
# Here we would implement the actual communication
# with the shutter hardware.
command.finish(shutter="open")
return | 5,353,302 |
def argMax(scores):
"""
Returns the key with the highest value.
"""
if len(scores) == 0: return None
all = scores.items()
values = [x[1] for x in all]
maxIndex = values.index(max(values))
return all[maxIndex][0] | 5,353,303 |
def rotateright(arr,k)->list:
"""
Rotate the array right side k number of times.
"""
temp=a[0]
poi=0
for i in range(len(arr)):
for j in range(0,k):
poi+=1
if(poi==len(arr)):
poi=0
temp1=arr[poi]
arr[poi]=temp
temp=temp1
return arr | 5,353,304 |
def get_video_chunk_path(instance, filename):
"""
Get path to store video chunk
the path will be of format : project_id/chunks/chunk_no.mp3
"""
if (not instance.project_id) and (not instance.chunk_no):
raise ValidationError('Invalid Project ID')
return os.path.join(instance.project_id +... | 5,353,305 |
def scrap_page(targetURL):
""" Scraps a single URL for sentences, downloading audios """
# Extract post title from URL and use as .csv file name
name = targetURL.split("/")[3]
with open(f"csv/{name}.csv", "w+", encoding="utf8") as card:
# Headers for the GET request so it doesn't get easily rej... | 5,353,306 |
def create_reverse_routes(rt):
"""Function to reverse TSP routes"""
if rt.tsp_solver_status == 1:
for key, value in rt.stop_order_dict.items():
if value != 0:
rt.stop_order_dict[key] = rt.num_stops - value | 5,353,307 |
def main(cfg: Config) -> None:
"""Main function."""
data_cfg = cfg.data
if data_cfg.dataset == DS.adult:
print("Using the adult dataset.")
elif data_cfg.dataset == DS.cmnist:
print("Using CMNIST.")
if cfg.misc.use_wandb:
print("Starting W&B.")
args_as_dict = flatten(Ome... | 5,353,308 |
def draw_string(turtle):
"""
This function draws the strings to the guitar
:param turtle: The name of the turtle
:return: None
"""
turtle.penup()
length = 35
turtle.setpos(60, -45)
for i in range(5):
turtle.penup()
turtle.left(90)
turtle.pendown... | 5,353,309 |
def switch_to_sink(pulse, sink):
"""Switch output to the given sink."""
# set the default sink
pulse.default_set(sink)
# move existing streams over to the sink
for sink_input in pulse.sink_input_list():
pulse.sink_input_move(sink_input.index, sink.index) | 5,353,310 |
def get_ratio(numerator, denominator):
"""Get ratio from numerator and denominator."""
return (
0 if not denominator else round(float(numerator or 0) / float(denominator), 2)
) | 5,353,311 |
def create_app():
"""
Create a Flask application for face alignment
Returns:
flask.Flask -> Flask application
"""
app = Flask(__name__)
model = setup_model()
app.config.from_mapping(MODEL=model)
@app.route("/", methods=["GET"])
def howto():
instruction = (
... | 5,353,312 |
def Red(n=1.0):
"""Change to red color"""
glColor3fv((n, 0.0, 0.0)) | 5,353,313 |
def test_load_images_and_stack_2d_random(test_output_dirs: OutputFolderForTests) -> None:
"""
Test load of 2D images
"""
image_size = (20, 30)
low = 0
high = 200
array1 = np.random.randint(low=low, high=high, size=image_size, dtype='uint16')
write_test_dicom(array1, test_output_dirs.roo... | 5,353,314 |
def grad_clip(x:Tensor) -> Tensor:
"""
Clips too big and too small gradients.
Example::
grad = grad_clip(grad)
Args:
x(:obj:`Tensor`): Gradient with too large or small values
Returns:
:obj:`Tensor`: Cliped Gradient
"""
x[x>5] = 5
x[x<-5] = -5
... | 5,353,315 |
def unfold_kernel(kernel):
"""
In pytorch format, kernel is stored as [out_channel, in_channel, height, width]
Unfold kernel into a 2-dimension weights: [height * width * in_channel, out_channel]
:param kernel: numpy ndarray
:return:
"""
k_shape = kernel.shape
weight = np.zeros([k_shape[... | 5,353,316 |
def write(message: str, color: str = None) -> None:
""" Write message. """
logger.write(message, color) | 5,353,317 |
def _parse_polyline_locations(locations, max_n_locations):
"""Parse and validate locations in Google polyline format.
The "locations" argument of the query should be a string of ascii characters above 63.
Args:
locations: The location query string.
max_n_locations: The max allowable numbe... | 5,353,318 |
def dense(data, weight, bias=None, out_dtype=None):
"""The default implementation of dense in topi.
Parameters
----------
data : tvm.Tensor
2-D with shape [batch, in_dim]
weight : tvm.Tensor
2-D with shape [out_dim, in_dim]
bias : tvm.Tensor, optional
1-D with shape [o... | 5,353,319 |
def choice(*choices: T, default: Union[T, _MISSING_TYPE] = MISSING, **kwargs: Any) -> T:
"""Makes a field which can be chosen from the set of choices from the
command-line.
Returns a regular `dataclasses.field()`, but with metadata which indicates
the allowed values.
(New:) If `choices` is a dicti... | 5,353,320 |
def __set_config(key: str, value: Any):
""" Update config entry. You should not use this function in your code, once the cfg is set up it should not be changed again!
Args:
key: name of config entry
value: new value
"""
print('update ', (key, value), ' config')
setattr(cfg, key, val... | 5,353,321 |
def modulelink(module, baseurl=''):
"""Hyperlink to a module, either locally or on python.org"""
if module+'.py' not in local_files:
baseurl = 'http://www.python.org/doc/current/lib/module-'
return link(baseurl+module+'.html', module) | 5,353,322 |
def test_view_translate_not_authed_public_project(client, locale0,
settings_debug):
"""
If the user is not authenticated and we're translating project
ID 1, return a 200.
"""
# Clear out existing project with ID=1 if necessary.
Project.objects.fi... | 5,353,323 |
def print_copy_method(struct_name, field_count, struct_members):
"""Generate copy function for joint data structure."""
print("struct joint_{0} *joint_{0}_copy(void *(*mem_allocator)(size_t), struct joint_{0} *lhs, struct joint_{0} *rhs) {{".format(struct_name))
print(" if (lhs == rhs) {")
print(" r... | 5,353,324 |
def cleanup_command(
__cli_options=False,
artifactsdir=defaults['artifacts_dir'], # Where dexy should store working files.
logdir=defaults['log_dir'], # DEPRECATED
reports=True # Whether directories generated by reports should also be removed.
):
"""
Remove the directorie... | 5,353,325 |
def add_reference(
*, short_purpose: str, reference: Optional[str] = None, doi: Optional[str] = None
) -> Callable:
"""Decorator to link a reference to a function or method.
Acts as a marker in code where particular alogrithms/data/... originates.
General execution of code silently passes these markers... | 5,353,326 |
def test_autoimport_list(mocker, credentials):
"""Test list autoimport jobs being outputed to the shell."""
runner = CliRunner()
autoimport_response = {
"meta": {"count": 2, "next": None, "previous": None},
"results": [
{
"id": "41fd0397-62b0-4ef9-992f-423435b5d5e... | 5,353,327 |
def get_capital_ptd_act():
"""Get chart of accounts from shared drive."""
logging.info('Retrieving latest CIP project to date')
command = "smbclient //ad.sannet.gov/dfs " \
+ "--user={adname}%{adpass} -W ad -c " \
+ "'prompt OFF;" \
+ " cd \"FMGT-Shared/Shared/BUDGET/" \
+ "O... | 5,353,328 |
def eval_curvature(poly, x_vals):
"""
This function returns a vector with the curvature based on path defined by `poly`
evaluated on distance vector `x_vals`
"""
# https://en.wikipedia.org/wiki/Curvature# Local_expressions
def curvature(x):
a = abs(2 * poly[1] + 6 * poly[0] * x) / (1 + (3 * poly[0] * x... | 5,353,329 |
def calculate_folder_size(path, _type="mb") -> float:
"""Return the size of the given path in MB, bytes if wanted"""
p1 = subprocess.Popen(["du", "-sb", path], stdout=subprocess.PIPE)
p2 = subprocess.Popen(["awk", "{print $1}"], stdin=p1.stdout, stdout=subprocess.PIPE)
p1.stdout.close() # type: ignore
... | 5,353,330 |
def templates():
"""Return all of the templates and settings."""
return settings | 5,353,331 |
def add_license(license_uri, based_on_uri, version, jurisdiction,
legalcode_uri, rdf_dir, license_code):
"""Create a new license based on an existing one. Write the resulting
graph to the rdf_dir."""
# make sure the license_uri ends with a slash
if license_uri[-1] != '/':
licen... | 5,353,332 |
def get_semitones(interval_tuplet):
"""
Takes an interval tuplet of the form returned by get_interval()
Returns an int representing the semitones within the interval.
"""
return mintervals.semitones_from_shorthand(interval_tuplet[0]) + 12*interval_tuplet[1] | 5,353,333 |
def test_fieldset_sample(fieldset, xdim=120, ydim=80):
""" Sample the fieldset using indexing notation. """
lon = np.linspace(-170, 170, xdim, dtype=np.float32)
lat = np.linspace(-80, 80, ydim, dtype=np.float32)
v_s = np.array([fieldset.V[0, 0., 70., x] for x in lon])
u_s = np.array([fieldset.U[0, 0... | 5,353,334 |
def svds(a, k=6, *, ncv=None, tol=0, which='LM', maxiter=None,
return_singular_vectors=True):
"""Finds the largest ``k`` singular values/vectors for a sparse matrix.
Args:
a (cupy.ndarray or cupyx.scipy.sparse.csr_matrix): A real or complex
array with dimension ``(m, n)``
k... | 5,353,335 |
def register_mongodb(app: Flask) -> Flask:
"""Instantiates database and initializes collections."""
config = app.config
# Instantiate PyMongo client
mongo = create_mongo_client(app=app, config=config)
# Add database
db = mongo.db[get_conf(config, "database", "name")]
# Add database collec... | 5,353,336 |
def epflux_boa(netcdf = False):
"""
Input boa variables U, V, T
Ouput boa variable. Save to netcdf file if netcdf is true
"""
pass | 5,353,337 |
def assert_is_compatible(schema, required_args, optional_args):
"""Raises a :exc:`~cosmic.exceptions.SpecError` if function argument spec
(as returned by :func:`get_args`) is incompatible with the given schema.
By incompatible, it is meant that there exists such a piece of data that
is valid according t... | 5,353,338 |
def look(table, limit=0, vrepr=None, index_header=None, style=None,
truncate=None, width=None):
"""
Format a portion of the table as text for inspection in an interactive
session. E.g.::
>>> import petl as etl
>>> table1 = [['foo', 'bar'],
... ['a', 1],
..... | 5,353,339 |
def refresh_devices(config, cache_path):
"""Refresh devices from configuration received"""
global DEBUG, m_devices, Device_Cache
if DEBUG: print("DEBUG: Refreshing device database")
print_progress("Refresh devices")
try:
m_devices = config['devices']
except:
print("ERROR: No devi... | 5,353,340 |
def test_visiting_the_site():
"""Visiting the site.""" | 5,353,341 |
def listElements(server, elements, filesToGet):
"""
Description:
Function to print elements list on a distant ssh server.
Selected elements, that are in filesToGet, are red underlined.
Parameters:
- server: the name of the server.
- elements: list of element to print out.
... | 5,353,342 |
def warn_vars_naming_style(messages, line, style):
""" Check whether varibales and function argumens fit the naming rule."""
naming_style_name = style.Get('CHECK_VAR_NAMING_STYLE')
if not naming_style_name:
return
def is_expr(uwl):
return (uwl.tokens
and _find_parent(uw... | 5,353,343 |
def _create_sync_table_from_resource_df(
resource_df: DataFrame,
identity_columns: List[str],
resource_name: str,
sync_db: sqlalchemy.engine.base.Engine,
):
"""
Take fetched data and push to a new temporary sync table. Includes
hash and tentative extractor CreateDate/LastModifiedDates.
... | 5,353,344 |
def validate_schema(path, data, schema):
"""
Warns and returns the number of errors relating to JSON Schema validation.
Uses the `jsonschema <https://python-jsonschema.readthedocs.io/>`__ module.
:param object schema: the metaschema against which to validate
:returns: the number of errors
:rty... | 5,353,345 |
def class_dict(base_module, node):
"""class_dict(base_module, node) -> dict
Returns the class dictionary for the module represented by node and
with base class base_module"""
class_dict_ = {}
def update_dict(name, callable_):
if class_dict_.has_key(name):
class_dict_[name] = cal... | 5,353,346 |
def assert_records_equal_nonvolatile(first, second, volatile_fields, indent=0):
"""Compare two test_record tuples, ignoring any volatile fields.
'Volatile' fields include any fields that are expected to differ between
successive runs of the same test, mainly timestamps. All other fields
are recursively compar... | 5,353,347 |
def get_playlist_decreasing_popularity():
"""This function is used to return playlists in decreasing popularity"""
all_ = PlaylistPopularityPrefixed.objects.all()
results = [{"playlist_name": obj.playlist_name, "popularity": obj.played} for obj in all_]
return results | 5,353,348 |
def validate_numeric_scalar(var: Any) -> bool:
"""Evaluates whether an argument is a single numeric value.
Args:
var: the input argument to validate
Returns:
var: the value if it passes validation
Raises:
AssertionError: `var` was not numeric.
"""
assert isinstance(var... | 5,353,349 |
def fit_draw_func(data_for_plotting: Dict[base.FitType, histogram.Histogram1D], component: fit.FitComponent,
x: np.ndarray, ax: Axes) -> None:
""" Determine and draw the fit and data on a given axis.
Here, we will draw both the signal and the background dominated data, regardless of what was
... | 5,353,350 |
def irrf(valor=0):
"""
-> Função para cálcular o valor do IRRF.
:param valor: Valor base do salário para cálculo do IRRF.
:return: Retorna o valor do IRRF e alíquota utilizada.
"""
irrf = []
if valor < 1903.99:
irrf.append(0)
irrf.append(0)
elif va... | 5,353,351 |
def find_adcp_files_within_period(working_directory,max_gap=20.0,max_group_size=6):
"""
Sorts a directory of ADCPRdiWorkHorseData raw files into groups by
closeness in time, with groups being separated by more than
'max_gap_minutes'. This method first sorts the files by start time, and
then spli... | 5,353,352 |
def test_get_basic(client):
"""
Tests get endpoint (all crimes)
"""
client.delete("/streetlights")
insert_test_data(client)
rs = client.get("/streetlights")
collection = rs.json["result"]["streetlights"]
assert len(collection) == 5 | 5,353,353 |
def regular_transport_factory(host, port, env, config_file):
"""
Basic unencrypted Thrift transport factory function.
Returns instantiated Thrift transport for use with cql.Connection.
Params:
* host .........: hostname of Cassandra node.
* port .........: port number to connect to.
* env .... | 5,353,354 |
def frames_to_video(images, Fs, output_file_name, codec_spec='h264'):
"""
Given a list of image files and a sample rate, concatenate those images into
a video and write to [output_file_name].
"""
if len(images) == 0:
return
# Determine the width and height from the first image
... | 5,353,355 |
def smoothed_abs(x, eps=1e-8):
"""A smoothed version of |x| with improved numerical stability."""
return jnp.sqrt(jnp.multiply(x, x) + eps) | 5,353,356 |
def plot_confusion_matrix(cm, classes,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
p... | 5,353,357 |
def setup_release(keep_days=0):
""" Sets up a full release across the cluster
See :func:`_setup_release` for info
Example:
fab <env> setup_release:keep_days=10 # Makes a new release that will last for 10 days
"""
_setup_release(parse_int_or_exit(keep_days), full_cluster=True) | 5,353,358 |
def increment(t1, seconds):
"""Adds seconds to a Time object."""
assert valid_time(t1)
seconds += time_to_int(t1)
return int_to_time(seconds) | 5,353,359 |
def _apply_write(cls):
"""Add write method if any formats have a registered writer for `cls`."""
skbio_io_write = globals()['write']
write_formats = list_write_formats(cls)
if write_formats:
if not hasattr(cls, 'default_write_format'):
raise NotImplementedError(
"Clas... | 5,353,360 |
def add_absname(file):
"""Prefix a file name with the working directory."""
work_dir = os.path.dirname(__file__)
return os.path.join(work_dir, file) | 5,353,361 |
def dense(input_shape, output_shape, output_activation='linear', name=None):
"""
Build a simple Dense model
Parameters
----------
input_shape: shape
Input shape
output_shape: int
Number of actions (Discrete only so far)
Returns
-------
model: Mode... | 5,353,362 |
def test_missing_enable_ext(monkeypatch: pytest.MonkeyPatch) -> None:
"""Test missing enable-ext option fails.
Check that a workflow that needs `--enable-ext` and
`--enable-dev` fails without those options and passes with them.
"""
monkeypatch.delenv("CWLTOOL_OPTIONS", raising=False)
assert mai... | 5,353,363 |
def dataset_labels(alldata, tag=None):
""" Return label for axis of dataset
Args:
ds (DataSet): dataset
tag (str): can be 'x', 'y' or 'z'
"""
if tag == 'x':
d = alldata.default_parameter_array()
return d.set_arrays[0].label
if tag == 'y':
d = alldata.default_... | 5,353,364 |
def spread(
template: Template,
data: Optional[Any],
flavor: Flavor,
postprocess: Optional[Callable] = None,
start_at: int = 0,
replace_missing_with: Optional[str] = None,
) -> Tuple[List[Union["pygsheets.Cell"]], int]:
"""Spread data into cells.
Parameters
----------
template
... | 5,353,365 |
def check_html(name):
"""
Given a name of graph to save or write, check if it is of valid syntax
:param: name: the name to check
:type name: str
"""
assert len(name.split(".")) == 2, "invalid file type for %s" % name
assert name.split(
".")[1] == "html", "%s is not a valid html file... | 5,353,366 |
def train_validate_test_split(DataFrame, ratios=(0.6,0.2,0.2)):
"""
Parameters
----------
DataFrame : pandas.DataFrame
DataFrame
ratios : tuple
E.g.
(train, validate, test) = (0.6, 0.25, 0.15)
(train, test) = (0.6, 0.4) -> validate = test
... | 5,353,367 |
def is_error(code: Union[Error, int]) -> bool:
"""Returns True, if error is a (fatal) error, not just a warning."""
if isinstance(code, Error): code = code.code
return code >= ERROR | 5,353,368 |
def unpack_triple(item):
"""Extracts the indices and values from an object.
The argument item can either be an instance of SparseTriple or a
sequence of length three.
Example usage:
>>> st = SparseTriple()
>>> ind1, ind2, val = unpack_triple(st)
>>> quad_expr = [[], [], []]
>>> ind1, ... | 5,353,369 |
def class_report(data=None, label: str = None):
"""
This function calculates a class report of a given anomaly score.
:param data: dataset [DataFrame]
:param label: anomaly-score of an given feature [string]
:return: Classification report as console output
"""
print(classification_report(d... | 5,353,370 |
def test_skip_c() -> None:
""" Test with skip """
run_test(['-c', SKIP], 'tests/expected/skip.txt.c.out') | 5,353,371 |
def max_sequence(arr):
"""
The maximum sum subarray problem consists in finding the maximum sum of a contiguous subsequence in an array or
list of integers.
:param arr: an array or list of integers.
:return: the maximum value found within the subarray.
"""
best = 0
for x in range(len(arr... | 5,353,372 |
def get_by_username(username):
"""
Retrieve a user from the database by their username
:param username:
:return:
"""
return database.get(User, username, field="username") | 5,353,373 |
def plot_averaged_forecast(actual, predicted, first_column_act, last_column_act,
first_column_pr, last_column_pr):
"""
Function plot averaged forecasts and actual time series
:param actual: time series with averaged actual values
:param predicted: time series with averaged pr... | 5,353,374 |
def evalPoint(u, v):
"""
Evaluates the surface point corresponding to normalized parameters (u, v)
"""
a, b, c, d = 0.5, 0.3, 0.5, 0.1
s = TWO_PI * u
t = (TWO_PI * (1 - v)) * 2
r = a + b * cos(1.5 * t)
x = r * cos(t)
y = r * sin(t)
z = c * sin(1.5 * t)
dv = PVector()
d... | 5,353,375 |
def decode_funcname2(subprogram_die, address):
""" Get the function name from an PC address"""
for DIE in subprogram_die:
try:
lowpc = DIE.attributes['DW_AT_low_pc'].value
# DWARF v4 in section 2.17 describes how to interpret the
# DW_AT_high_pc attribute based on th... | 5,353,376 |
def load_cubes(filespecs, callback=None):
"""
Loads cubes from a list of ABF filenames.
Args:
* filenames - list of ABF filenames to load
Kwargs:
* callback - a function that can be passed to :func:`iris.io.run_callback`
.. note::
The resultant cubes may not be in the same orde... | 5,353,377 |
def get_reddit_slug(permalink):
"""
Get the reddit slug from a submission permalink, with '_' replaced by '-'
Args:
permalink (str): reddit submission permalink
Returns:
str: the reddit slug for a submission
"""
return list(filter(None, permalink.split("/")))[-1].replace("_", "... | 5,353,378 |
def newton_halley(func, x0, fprime, fprime2, args=(), tol=1.48e-8,
maxiter=50, disp=True):
"""
Find a zero from Halley's method using the jitted version of
Scipy's.
`func`, `fprime`, `fprime2` must be jitted via Numba.
Parameters
----------
func : callable and jitted
... | 5,353,379 |
def staff_dash(request):
"""Route for displaying the staff dashboard of the site.
"""
# Empty context to populate:
context = {}
def get_account_name(path):
"""Method contains logic to extract the app name from a url path.
Method uses the django.urls.resolve method with basi... | 5,353,380 |
def read_v1_file(path: str = "CBETHUSD.csv") -> tuple:
"""
Read the data from the file path, reconstruct the format the the data
and return a 3d matrix.
"""
lst = []
res = []
with open(path) as data:
reader = csv.reader(data)
next(reader) # skip the header row
for ro... | 5,353,381 |
def check_attribute(name, paginator, expected, params):
"""
Helper method that checks a single attribute and gives a nice error
message upon test failure.
"""
got = getattr(paginator, name)
assert expected == got | 5,353,382 |
def get_adjacent_th(spec: torch.Tensor, filter_length: int = 5) -> torch.Tensor:
"""Zero-pad and unfold stft, i.e.,
add zeros to the beginning so that, using the multi-frame signal model,
there will be as many output frames as input frames.
Args:
spec (torch.Tensor): input spectrum (B, F, T, 2... | 5,353,383 |
def K2(eps):
""" Radar dielectric factor |K|**2
Parameters
----------
eps : complex
nd array of complex relative dielectric constants
Returns
-------
nd - float
Radar dielectric factor |K|**2 real
"""
K_complex = (eps-1.0)/(eps+2.0)
return (K_complex*K_complex.... | 5,353,384 |
def permutation_test_mi(x, y, B=100, random_state=None, **kwargs):
"""Permutation test for mutual information
Parameters
----------
x : 1d array-like
Array of n elements
y : 1d array-like
Array of n elements
n_classes : int
Number of classes
B : int
Number... | 5,353,385 |
def convert_time_units(value, value_unit="s", result_unit="s", case_sensitive=True):
"""
Convert `value` from `value_unit` to `result_unit`.
The possible time units are ``'s'``,``'ms'``, ``'us'``, ``'ns'``, ``'ps'``, ``'fs'``, ``'as'``.
If ``case_sensitive==True``, matching units is case sensitive.... | 5,353,386 |
def compute_distribution_clusters(columns: list, dataset_name: str, threshold: float, pool: Pool,
chunk_size: int = None, quantiles: int = 256):
"""
Algorithm 2 of the paper "Automatic Discovery of Attributes in Relational Databases" from M. Zhang et al. [1]. This
algorithm... | 5,353,387 |
def save_cpx_image(image, name):
"""Save a complex image (represented by real and imaginary channels) as two images."""
save_image(image[0], '{}_real.png'.format(name))
save_image(image[1], '{}_imag.png'.format(name)) | 5,353,388 |
def get_eventframe_sequence(event_deque, is_x_first, is_x_flipped,
is_y_flipped, shape, data_format, frame_width,
frame_gen_method):
"""
Given a single sequence of x-y-ts events, generate a sequence of binary
event frames.
"""
inp = []
wh... | 5,353,389 |
def creation_sequence_to_weights(creation_sequence):
"""
Returns a list of node weights which create the threshold
graph designated by the creation sequence. The weights
are scaled so that the threshold is 1.0. The order of the
nodes is the same as that in the creation sequence.
"""
# Turn... | 5,353,390 |
def parse_args():
"""
Parsing shell command arguments, and override appropriate params
from setting module
:return: None
"""
parser = argparse.ArgumentParser(version=VERSION)
parser.add_argument('-u', action='store', dest='url')
parser.add_argument('-f', action='store', dest='url_file')... | 5,353,391 |
def group():
"""
Model generator.
""" | 5,353,392 |
def get_seg_features(string):
"""
Segment text with jieba
features are represented in bies format
s donates single word
"""
seg_feature = []
for word in jieba.cut(string):
if len(word) == 1:
seg_feature.append(0)
else:
tmp = [2] * len(word)
... | 5,353,393 |
def print_device_info(nodemap):
"""
This function prints the device information of the camera from the transport
layer; please see NodeMapInfo example for more in-depth comments on printing
device information from the nodemap.
:param nodemap: Transport layer device nodemap.
:type nodemap: ... | 5,353,394 |
def make_withdrawal(account):
"""Withdrawal Dialog."""
# @TODO: Use questionary to capture the withdrawal and set it equal to amount variable. Be sure that amount is a floating
# point number.
amount = questionary.text("How much would you like to withdraw").ask()
amount = float(amount)
# @TOD... | 5,353,395 |
def coerce_to_pendulum_date(x: PotentialDatetimeType,
assume_local: bool = False) -> Optional[Date]:
"""
Converts something to a :class:`pendulum.Date`.
Args:
x: something that may be coercible to a date
assume_local: if ``True``, assume local timezone; if ``Fals... | 5,353,396 |
def loss_fixed_depl_noquench(params, loss_data):
"""
MSE loss function for fitting individual stellar mass histories.
Only main sequence efficiency parameters. Quenching is deactivated.
Depletion time is fixed at tau=0Gyr, i.e. gas conversion is instantaenous.
"""
(
lgt,
dt,
... | 5,353,397 |
def plot_timeseries_comp(date1, value1, date2, value2, fname_list,
labelx='Time [UTC]', labely='Value',
label1='Sensor 1', label2='Sensor 2',
titl='Time Series Comparison', period1=0, period2=0,
ymin=None, ymax=None, dpi... | 5,353,398 |
def write_basissets(inp, basissets, folder):
"""Writes the unified BASIS_SETS file with the used basissets"""
_write_gdt(inp, basissets, folder, "BASIS_SET_FILE_NAME", "BASIS_SETS") | 5,353,399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.