content stringlengths 22 815k | id int64 0 4.91M |
|---|---|
def nodal_distribution_factors_v2(topo: ndarray, volumes: ndarray):
"""The j-th factor of the i-th row is the contribution of
element i to the j-th node. Assumes a regular topology."""
ndf = nodal_distribution_factors(topo, volumes)
return ndf | 1,500 |
def start(update: Update, context: CallbackContext):
"""Send a message when the command /start is issued."""
update.message.reply_text("Hi. Please create or choose task list from existing.") | 1,501 |
def game_core_binary(number_to_guess):
"""Binary search approach.
Set the first predict value as the middle of interval, i.e. 50.
Then decrease or increase the predict number by step.
The step is calculated using the check interval divided by 2,
i.e. 25, 13 ... 1
The minimum step is always 1.
... | 1,502 |
def linear_trend(series, return_line=True):
"""
USAGE
-----
line = linear_trend(series, return_line=True)
OR
b, a, x = linear_trend(series, return_line=False)
Returns the linear fit (line = b*x + a) associated
with the 'series' array.
Adapted from pylab.detrend_linear.
"""
series = np.asanyarray(series)
... | 1,503 |
def gradcheck_wrapper_masked_operation(op, input, *args, **kwargs):
"""Gradcheck wrapper for masked operations.
When mask is specified, replaces masked-out elements with zeros.
Use for operations that produce non-finite masked-out elements,
for instance, for minimum and maximum reductions.
"""
... | 1,504 |
def classify(neural_net, image_file):
"""
Using the given model and image file, returns the model's prediction
for the image as an array.
"""
img = Image.open(image_file)
img.load()
img_array = np.asarray(img)
img_array.shape = (1, 100, 100, 3)
prediction = model.predict(img_array)[0][0]
return prediction | 1,505 |
def delete(job):
"""Delete a job."""
# Initialise variables.
jobid = job["jobid"]
try:
shellout = shellwrappers.sendtossh(job, ["qdel " + jobid])
except exceptions.SSHError:
raise exceptions.JobdeleteError("Unable to delete job.")
return shellout[0] | 1,506 |
def regexp_span_tokenize(s, regexp):
"""
Identify the tokens in the string, as defined by the token
delimiter regexp, and generate (start, end) offsets.
@param s: the string to be tokenized
@type s: C{str}
@param regexp: the token separator regexp
@type regexp: C{str}
@rtype: C{iter... | 1,507 |
def optimum_simrank(x_p, x_n, alpha):
"""Intermediary function to the one below."""
pos_pair_1 = itertools.combinations(x_p, 2)
pos_pair_2 = itertools.combinations(x_n, 2)
neg_pair = itertools.product(x_p, x_n)
def get_val_from_pair(x):
# Transforms each pair into one minus the minimum of i... | 1,508 |
def imports(operators=None, union=True):
""" Lazily imports and returns an enaml imports context.
Parameters
----------
operators : dict, optional
An optional dictionary of operators to push onto the operator
stack for the duration of the import context. If this is not
provided,... | 1,509 |
def create_capture_database(capture, configfile=None):
"""Create 1 database with 2 tables:
- DoC table for coverage data
- Annotation table with gene-target info.
"""
if configfile is None:
configfile = os.path.join(SCRIPTDIR, 'config.py')
config = get_config_dict(configfile)
anno... | 1,510 |
def apply_job_security(code):
"""Treat input `code` like Python 2 (implicit strings are byte literals).
The implementation is horribly inefficient but the goal is to be compatible
with what Mercurial does at runtime.
"""
buf = io.BytesIO(code.encode("utf8"))
tokens = tokenize.tokenize(buf.readl... | 1,511 |
def fetch(gpname: str):
""""
Gives gunpowder
Parameters
----------
gpname: str
Gunpowder name
Returns
-------
gpowder: dict
Gunpowder in dictionary form
"""
gpowders = _load_many()
return gpowders[gpname] | 1,512 |
def _kc_frequency_features(time_data, times, sfreq):
""" Calculate absolute power of delta and alpha band before (on a 3 seconds
windows) and after K-complexes"""
exp = [('before', -2.5, -0.5), ('after', 1, 3)]
res = {}
for m in exp:
kc_matrix_temp = time_data[:, np.bitwise_and(times > m[1]... | 1,513 |
def test_atomic_integer_total_digits_4_nistxml_sv_iv_atomic_integer_total_digits_5_5(mode, save_output, output_format):
"""
Type atomic/integer is restricted by facet totalDigits with value 18.
"""
assert_bindings(
schema="nistData/atomic/integer/Schema+Instance/NISTSchema-SV-IV-atomic-integer-t... | 1,514 |
async def CommandProfile(
ctx: SlashContext, user: Union[InteractionMember, UserImpl]
) -> None:
"""Handler for the /profile slash command."""
if hasattr(user, "user"):
try:
user.user = await ctx.rest.fetch_user(user.id)
except Exception as e:
logger.warning(
... | 1,515 |
def gentrends(x, window=1/3.0, charts=True):
"""
Returns a Pandas dataframe with support and resistance lines.
:param x: One-dimensional data set
:param window: How long the trendlines should be. If window < 1, then it
will be taken as a percentage of the size of the data
:param ... | 1,516 |
def test_clean_slug_exists(mock_super_clean):
"""
Cleaning an instance that already has a slug should not replace the
slug.
"""
name = "foo"
slug = "bar-baz"
inst = models.SlugModel(slug=slug)
setattr(inst, models.SlugModel.SLUG_SOURCE, name)
new_slug = "foo-bar"
with mock.patch... | 1,517 |
def gen_cand_keyword_scores(phrase_words, word_score):
"""
Computes the score for the input phrases.
:param phrase_words: phrases to score
:type phrase_words: list
:param word_score: calculated word scores
:type word_score: list
:return: dict *{phrase: score, ...}*
"""
keyword_candi... | 1,518 |
def analyze_network(directed=False, base_url=DEFAULT_BASE_URL):
"""Calculate various network statistics.
The results are added to the Node and Edge tables and the Results Panel.
The summary statistics in the Results Panel are also returned by the function
as a list of named values.
Args:
d... | 1,519 |
def create_user():
"""
Create new user
"""
# request.get_json(): extract the JSON from the request and return it as
# a Python structure.
data = request.get_json() or {}
# Validate mandatory fields
if 'username' not in data or 'email' not in data or \
'password' not in data:
... | 1,520 |
def hash_long_to_short(long_url):
"""
turn a long input url into a short url's url-safe 5 character hash
this is deterministic and the same long_url will always have the same hash
"""
encoded = long_url.encode("utf-8")
md5_hash = hashlib.md5(encoded).digest()
return base64.urlsafe_b64encode(... | 1,521 |
def transform_asset_time_fields_recursive(asset):
"""
Transforms the asset time fields recursively
"""
if isinstance(asset, list):
for sub_asset_object in asset:
transform_asset_time_fields_recursive(sub_asset_object)
if isinstance(asset, dict):
for k, v in asset.items():... | 1,522 |
def cutout_vstAtlas(ra, dec, bands=["u","g","r","i","z"], database="ATLASDR3",\
psfmags=None, imDir="/data/vst-atlas/", input_filename=[], saveFITS=False,\
width_as=20., smooth=False, cmap="binary", minmax="MAD", origin="lower", figTitle=True, \
return_val=False, saveD... | 1,523 |
def has_prefix(sub_s):
"""
Test possibility of sub_s before doing recursion.
:param sub_s: sub_string of input word from its head.
:return: (boolean) whether word stars with sub_s.
"""
for word in DATABASE:
if word.startswith(sub_s):
return True | 1,524 |
def transform_results(search_result, user, department_filters):
"""
Transform podcast and podcast episode, and userlist and learning path in aggregations
Add 'is_favorite' and 'lists' fields to the '_source' attributes for learning resources.
Args:
search_result (dict): The results from Elastic... | 1,525 |
def extract_rest_proxy_info(event):
"""Extract REST task proxy info"""
data = event.data
use_proxy = data.get('use_proxy')
if use_proxy and not data.get('proxy_server'):
event.form.widgets.errors += (Invalid(_("Proxy access defined without proxy server!")), ) | 1,526 |
def kl_div_loss(inputs: Tensor, targets: Tensor) -> Tensor:
"""Computes the Kullback–Leibler divergence loss between two probability distributions."""
return F.kl_div(F.log_softmax(inputs, dim=-1), F.softmax(targets, dim=-1), reduction="none") | 1,527 |
def get_schema_names(connection: psycopg2.extensions.connection) -> List[psycopg2.extras.RealDictRow]:
"""Function for getting the schema information from the given connection
:param psycopg2.extensions.connection connection: The connection
:return: List of rows using key-value pairs for the data
:rty... | 1,528 |
def parse(string):
"""Returns a list of specs from an input string.
For creating one spec, see Spec() constructor.
"""
return SpecParser().parse(string) | 1,529 |
def get_provider_idx(provider_type):
"""Return the index associated to the type.
"""
try:
return PROVIDERS_TYPE[provider_type]['idx']
except KeyError as error:
raise ProviderError(
"Provider type (%s) is not supported yet." % (provider_type, )
) | 1,530 |
def file_based_convert_examples_to_features(
examples, slot_label_list, intent_label_list, max_seq_length, tokenizer, output_file):
"""
将InputExamples转成tf_record,并写入文件
Convert a set of InputExample to a TFRecord file.
:param examples: [(text, CRF_label, class_label), ...]
:param slot_label_l... | 1,531 |
def create_readme(df):
"""Retrieve text from README.md and update it."""
readme = str
categories = pd.unique(df["category"])
categories.sort()
with open('README.md', 'r', encoding='utf-8') as read_me_file:
read_me = read_me_file.read()
splits = read_me.split('<!---->')
# I... | 1,532 |
async def get_leaderboard_info_by_id(
# ScoreSaber leaderboardId
leaderboardId: float
):
"""
GET /api/leaderboard/by-id/{leaderboardId}/info
"""
# request
request_url = f'{SERVER}/api/leaderboard/by-id/{leaderboardId}/info'
response_dict = await request.get(request_url)
return Leade... | 1,533 |
async def check(app_name: str = "all"):
"""
Lists all migrations which have and haven't ran.
:param app_name:
The name of the app to check. Specify a value of 'all' to check
the migrations for all apps.
"""
await CheckMigrationManager(app_name=app_name).run() | 1,534 |
def distance(lat1,lon1,lat2,lon2):
"""Input 2 points in Lat/Lon degrees.
Calculates the great circle distance between them in radians
"""
rlat1= radians(lat1)
rlon1= radians(lon1)
rlat2= radians(lat2)
rlon2= radians(lon2)
dlat = rlat1 - rlat2
dlon = rlon1 - rlon2
a = pow(si... | 1,535 |
def minimumSwaps(arr):
"""
O(nlogn)
"""
len_arr = len(arr)
arr_dict = {key+1:value for key, value in enumerate(arr)}
arr_checked = [False]*len_arr
total_count = 0
for key, value in arr_dict.items():
count = 0
while key != value and arr_checked[key-1] is False:
... | 1,536 |
def store_user_bot(user_id, intended_user, bot_id):
"""Store an uploaded bot in object storage."""
if user_id != intended_user:
raise api_util.user_mismatch_error(
message="Cannot upload bot for another user.")
if bot_id != 0:
raise util.APIError(
400, message="Sorry... | 1,537 |
def dispatch(methods, request, notification_errors=False):
"""Dispatch JSON-RPC requests to a list of methods::
r = dispatch([cat], {'jsonrpc': '2.0', 'method': 'cat', 'id': 1})
The first parameter can be either:
- A *list* of functions, each identifiable by its ``__name__`` attribute.
- Or a... | 1,538 |
async def test_purehotcoollink_component_setup_only_once(devices, login, hass):
"""Test if entities are created only once."""
config = _get_config()
await async_setup_component(hass, DYSON_DOMAIN, config)
await hass.async_block_till_done()
entity_ids = hass.states.async_entity_ids(DOMAIN)
asser... | 1,539 |
def parse_conv(weights_file, cfg_parser, section, layer_dict):
""" parse conv layer
Args:
weights_file (file object): file object of .weights file
cfg_parser (ConfigParser object): ConfigParser object of .cfg file for net
section (str): name of conv layer
... | 1,540 |
def generate_format_spec(num_vals, sep, dtypes, decimals=None):
"""
Generate a format specifier for generic input.
--------------------------------------------------------------
Input
num_vals : number of wild-cards
sep : separator string (could be '_', '-', '--' ...)
... | 1,541 |
def products_with_low_stock(threshold: Optional[int] = None):
"""Return queryset with stock lower than given threshold."""
if threshold is None:
threshold = settings.LOW_STOCK_THRESHOLD
stocks = (
Stock.objects.select_related("product_variant")
.values("product_variant__product_id", ... | 1,542 |
def robust_topological_sort(deps):
"""
A topological sorting algorithm which is robust enough to handle cyclic graphs.
First, we bucket nodes into strongly connected components (we use Tarjan's linear algorithm for that).
Then, we topologically sort these buckets grouping sibling buckets into sets.
... | 1,543 |
def deprecated(reason):
"""
This is a decorator which can be used to mark functions and classes
as deprecated. It will result in a warning being emitted
when the function is used.
From https://stackoverflow.com/a/40301488
"""
string_types = (type(b""), type(u""))
if isinstance(reason, ... | 1,544 |
def rotate_tensor(l: torch.Tensor, n: int = 1) -> torch.Tensor:
"""Roate tensor by n positions to the right
Args:
l (torch.Tensor): input tensor
n (int, optional): positions to rotate. Defaults to 1.
Returns:
torch.Tensor: rotated tensor
"""
return torch.cat((l[n:], l[:n])... | 1,545 |
def test_apply_spatial1_onecolumn(c_or_python):
"""Test the code path for Python _apply_array_spatial1 with one column
"""
fqe.settings.use_accelerated_code = c_or_python
work = fqe_data.FqeData(2, 1, 3)
work.set_wfn(strategy='ones')
# dummy Hamiltonian with one nonzero column
h1 = numpy.asa... | 1,546 |
def test_multiple_header_rows():
"""column names that indicate coordinates
are found even if buried in the middle of multiple header rows."""
tables = etree.XML(
"""<extracted-tables-set>
<pmcid>123</pmcid>
<extracted-table>
<table-id />
<table-label />
<transformed-table>
<t... | 1,547 |
def generate_all_fish(
n_fish,
n_replica_fish,
channel,
interaction,
k_coh,
k_ar,
alpha,
lim_neighbors,
weights = [1],
neighbor_weights=None,
fish_max_speeds=None,
clock_freqs=None,
verbose=False,
names=None
):
"""Generate both replica and regular fish
Ar... | 1,548 |
def allclose(a, b):
""" close to machine precision """
return np.allclose(a, b, rtol=1e-14, atol=1e-14) | 1,549 |
def check_pwhash(pwhash, password):
"""Check a password against a given hash value. Since
many forums save md5 passwords with no salt and it's
technically impossible to convert this to an sha hash
with a salt we use this to be able to check for
plain passwords::
plain$$default
md5 pass... | 1,550 |
def make_markov_model(tweets):
"""Wrapper around making Markov Chain"""
return markovify.Text(" ".join(tweets)) | 1,551 |
def slice_image(sitk_image, start=(0, 0, 0), end=(-1, -1, -1)):
""""Returns the `sitk_image` sliced from the `start` index (x,y,z) to the `end` index.
"""
size = sitk_image.GetSize()
assert len(start) == len(end) == len(size)
# replace -1 dim index placeholders with the size of that dimension
e... | 1,552 |
def MakeGlyphs(src, reverseNormals):
"""
Glyph the normals on the surface.
You may need to adjust the parameters for maskPts, arrow and glyph for a
nice appearance.
:param: src - the surface to glyph.
:param: reverseNormals - if True the normals on the surface are reversed.
:return: The gl... | 1,553 |
def get_tags_categorys(self):
"""02返回添加文档的变量"""
tags = Tag.all()
categorys = Category.all()
return tags, categorys | 1,554 |
def main():
"""Operations executed when calling this script from the command line"""
args = ArgparseUserOptions(
description=parser_description,
args_dict_list=[required_args_dict, optional_args_dict],
epilog=__doc__,
).parse_args(sys.argv[1:])
return args | 1,555 |
def single_mode_constant_rotation(**kwargs):
"""Return WaveformModes object a single nonzero mode, with phase proportional to time
The waveform output by this function will have just one nonzero mode. The behavior of that mode will be fairly
simple; it will be given by exp(i*omega*t). Note that omega can... | 1,556 |
def get_file(file_pattern: list, sub_type: str = None) -> list:
"""Get a subset from file patterns that belong to a sub-type.
If no sub-type is specified, return all file patterns.
Args:
file_pattern (list): The input file patterns
sub_type (str, optional): A string to search in file patter... | 1,557 |
def etopo_subset(llcrnrlon=None, urcrnrlon=None, llcrnrlat=None,
urcrnrlat=None, tfile='dap', smoo=False, subsample=False):
"""Get a etopo subset.
Should work on any netCDF with x, y, data
http://www.trondkristiansen.com/wp-content/uploads/downloads/
2011/07/contourICEMaps.py
Examp... | 1,558 |
def get_all_report_data(db):
"""
Gets all report data for pre report page
"""
query = r'SELECT * FROM report WHERE relevent=1 ORDER BY id DESC'
return db_get(db, query) | 1,559 |
def find_center_projection(mat1, mat2, flip=True, chunk_height=None,
start_row=None, denoise=True, norm=False,
use_overlap=False):
"""
Find the center-of-rotation (COR) using projection images at 0-degree
and 180-degree based on a method in Ref. [1].
... | 1,560 |
def parse_date(str):
"""
parsing given str
to date
"""
ymd = str.split('-')
return date(int(ymd[0]), int(ymd[1]), int(ymd[2])) | 1,561 |
def mark_as_widget(view):
"""
Marks @view as a widget so we can later inspect that attribute, for
example, when hiding panels in _vi_enter_normal_mode.
Used prominently by '/', '?' and ':'.
XXX: This doesn't always work as we expect. For example, changing
settings to a panel created insta... | 1,562 |
def module_name(ctx, f):
"""Given Haskell source file path, turn it into a dot-separated module name.
module_name(
ctx,
"some-workspace/some-package/src/Foo/Bar/Baz.hs",
) => "Foo.Bar.Baz"
Args:
ctx: Rule context.
f: Haskell source file.
Returns:
string: Haskell module name.
"""
r... | 1,563 |
def train(
train_dir,
batch_size_per_gpu,
num_train_epochs,
dataset_name,
dataset_parent_dir,
strategy=None):
"""TBD."""
# ----------------------------------------------------------------------------
# Print train configuration.
# --------------------------------------------------------... | 1,564 |
def store(key):
"""Gets the configured default store. The default is PickleStore
:return store: Store object
"""
global __stores
if __stores is None:
__stores = {}
if key not in __stores:
__stores[key] = __configuration[STORE](key)
return __stores[key] | 1,565 |
def test_cross_val_score_weighted():
"""Test ``cross_val_score_weighted``."""
sample_weights = np.array([1.0, 1.0, 0.0, 1.0, 1.0, 0.0])
cv_score_kwargs = {
'estimator': LinearRegression(),
'x_data': np.arange(6).reshape(6, 1),
'y_data': np.array([0, 1, 1000, 0, -1, -1000]),
'... | 1,566 |
def get_label_for_line(line, leg):
"""
Can't remember what I was using this for but seems useful to keep
"""
# leg = line.figure.legends[0]
# leg = line.axes.get_legend()
for h, t in zip(leg.legendHandles, leg.texts):
if h.get_label() == line.get_label():
return t.get_tex... | 1,567 |
def test_save_unfitted():
"""Save an unfitted HMMClassifier object."""
try:
with pytest.raises(RuntimeError) as e:
HMMClassifier().save('test.pkl')
assert str(e.value) == 'The classifier needs to be fitted before it can be saved'
finally:
if os.path.exists('test.pkl'):
... | 1,568 |
def validate_maximum(value, maximum):
"""Validate that ``value`` is at most ``maximum``
Normally called in :meth:`~mopidy.config.types.ConfigValue.deserialize`.
"""
if maximum is not None and value > maximum:
raise ValueError(
'{!r} must be smaller than {!r}.'.format(value, maximum)... | 1,569 |
def _node_parent_listener(target, value, oldvalue, initiator):
"""Listen for Node.parent being modified and update path"""
if value != oldvalue:
if value is not None:
if target._root != (value._root or value):
target._update_root(value._root or value)
target._upda... | 1,570 |
def flatten(sequence):
"""
Gets a first visit iterator for the given tree.
Args:
sequence: The iterable that is to be flattened
Returns: iterable
"""
for item in sequence:
if isinstance(item, (list, tuple)):
for subitem in flatten(item):
yield subitem... | 1,571 |
def vxm_data_generator(x_data, batch_size=32):
"""
Generator that takes in data of size [N, H, W], and yields data for
our custom vxm model. Note that we need to provide numpy data for each
input, and each output.
inputs: moving [bs, H, W, 1], fixed image [bs, H, W, 1]
outputs: moved image [bs... | 1,572 |
def test_jinja2_required_extensions() -> None:
"""Test ``jinja2.ext`` has attrs needed for language support."""
# noinspection PyUnresolvedReferences
assert hasattr(jinja2_ext, "autoescape")
assert hasattr(jinja2_ext, "with_") | 1,573 |
def _make_block_conf_header(block, append_fn):
"""Returns a list of config.h strings for a block (list) of items."""
# Collect the substrings in a list and later use join() instead of += to
# build the final .config contents. With older Python versions, this yields
# linear instead of quadratic complex... | 1,574 |
def save_model_architecture(model, project_name, keras_model_type, cat_vocab_dict,
model_options, chart_name="model_before"):
"""
This function saves the model architecture in a PNG file in the artifacts sub-folder of project_name folder
"""
if isinstance(project_name,str):
... | 1,575 |
def sequential_to_momentum_net(module: torch.nn.Sequential,
split_dim=1,
coupling_forward: typing.Optional[typing.List[typing.Optional[typing.Callable]]] = None,
coupling_inverse: typing.Optional[typing.List[typing.Optional[typ... | 1,576 |
def version(): # static void version()
"""
TOWRITE
"""
print("%s %s\n" % (_appName_, _appVer_))
global exitApp
exitApp = True | 1,577 |
def user_view(request, name):
"""Render the view page for users"""
# argument is the login name, not the uuid in Cassandra
user = User.find(name)
if not user:
return redirect("users:home")
ctx = {
"req_user": request.user,
"user_obj": user,
"groups": [Group.find(gna... | 1,578 |
def grelha_nr_colunas(g):
"""
grelha_nr_colunas: grelha --> inteiro positivo
grelha_nr_colunas(g) devolve o numero de colunas da grelha g.
"""
return len(g[0]) | 1,579 |
def rmsd(array_a, array_b):
"""
Calculate the RMSD between two 1d arrays
Parameters
----------
array_a, array_b : 1d numpy arrays
The arrays to be compared
Returns
-------
rmsd : float
The Root Mean Square Deviation of the elements of the array
"""
diff = array_... | 1,580 |
def readFile(sFile, sMode = 'rb'):
"""
Reads the entire file.
"""
oFile = open(sFile, sMode);
sRet = oFile.read();
oFile.close();
return sRet; | 1,581 |
def VI_cgivens_d( a, b):
"""
returns cos, sin, r
"""
c = vsip_cmplx_d(0.0,0.0)
s = vsip_cmplx_d(0.0,0.0)
r = vsip_cmplx_d(0.0,0.0)
am = vsip_cmag_d(a)
bm = vsip_cmag_d(b)
if am == 0.0:
r.r = b.r; r.i=b.i;
s.r = 1.0;
else:
scale = am + bm;
alpha ... | 1,582 |
def _no_op(data):
"""
An on_load_batch callback that does nothing.
""" | 1,583 |
def documint_request_factory(request):
"""
Create a function that issues a request to a Documint endpoint.
Status codes outside the 2xx range are treated as errors. If error
responses are JSON then `DocumintError` is raised, otherwise
`MalformedDocumintError` is raised.
If the status code indi... | 1,584 |
def roipac_header(file_path, params):
"""
Function to obtain a header for roipac interferogram file or converted
geotiff.
"""
rsc_file = os.path.join(params[cf.DEM_HEADER_FILE])
if rsc_file is not None:
projection = parse_header(rsc_file)[ifc.PYRATE_DATUM]
else:
raise RoipacE... | 1,585 |
def find_exe_in_path(exe, bypass_permissions_check=None, add_exe_to_path=None):
"""
Check that an executable exists in $PATH
"""
paths = os.environ["PATH"].split(os.pathsep)
for path in paths:
fullexe = os.path.join(path,exe)
if os.path.exists(fullexe):
if not bypass... | 1,586 |
def test_tb_pipeline():
"""
Test case to ensure that the Hi-C pipeline code works.
Running the pipeline with the test data from the command line:
.. code-block:: none
runcompss \\
--lang=python ... | 1,587 |
def skip_for_tf2(f):
"""Decorator that skips tests when using TensorFlow 2."""
def test_wrapper(*args, **kwargs):
"""Wraps the decorated function to determine whether to skip."""
# Extract test case instance from args.
self = args[0]
try:
# If tf.contrib doesn't exist, we are in TF 2.0.
... | 1,588 |
def clean_logfile(logfile_lines,to_remove):
"""Remove yaml fields from a list of lines.
Removes from a set of lines the yaml_fields contained in the to_remove list.
Arguments:
logfile_lines (list): list of the lines of the logfile. Generated from a file by e.g. :py:meth:`~io.IOBase.readlines`.
to_re... | 1,589 |
def callback_function(bus):
"""
Function we want to call from the background_thread function
This function will be called when an interrupt is triggered from
a state change on pin 1
"""
print("interrupt triggered")
if bus.read_pin(1) == 0:
print("pin 1 was set low")
else:
... | 1,590 |
def no_red_sum(tokens):
"""Using import json is cheating, let's parse it ourselves in a sinlge pass. Hope you like stacks."""
sums = [0]
stack = []
is_red = False
for token in tokens:
if token == 'red' and not is_red and stack[-1] == '{':
is_red = True
sums[-1] = 0
... | 1,591 |
def get_all_projects():
"""
Return a list with all the projects (open and closed).
"""
return gazu.project.all_projects() | 1,592 |
def test_view_permissions_has_all(authed_request, test_view):
"""Should be True if view lists multiple action permissions and all are on User"""
test = HasViewSetActionPermissions()
authed_request.user.perms.append('list_permission')
authed_request.user.perms.append('list_permission2')
test_view.act... | 1,593 |
def install(opts):
"""
Install one or more resources.
"""
resources = _load(opts.resources, opts.output_dir)
if opts.all:
opts.resource_names = ALL
success = _install(resources, opts.resource_names, opts.mirror_url,
opts.destination, opts.skip_top_level)
if suc... | 1,594 |
def seq2seq_att(mems, lengths, state, att_net=None):
"""
:param mems: [B, T, D_mem] This are the memories.
I call memory for this variable because I think attention is just like read something and then
make alignments with your memories.
This memory he... | 1,595 |
def SaveImage(Im, fname, useCompression=True):
"""Save an image in any known format"""
# get file extension
ext = os.path.splitext(fname)[1].lower()
# dispatch based on file extension
if ext == '.npy':
SaveImageNPY(Im, fname)
elif ext == '.npz':
SaveImageNPZ(Im, fname, useCompre... | 1,596 |
def convert_for_webkit(new_path, filename, reference_support_info, host=Host()):
""" Converts a file's |contents| so it will function correctly in its |new_path| in Webkit.
Returns the list of modified properties and the modified text if the file was modifed, None otherwise."""
contents = host.filesystem.r... | 1,597 |
def test_atomic_positive_integer_min_exclusive_3_nistxml_sv_iv_atomic_positive_integer_min_exclusive_4_3(mode, save_output, output_format):
"""
Type atomic/positiveInteger is restricted by facet minExclusive with
value 506558727413711217.
"""
assert_bindings(
schema="nistData/atomic/positive... | 1,598 |
def create_partial_pickle(flnc_files, chunked_nfl_files, out_pickle):
"""
Parameters:
flnc_files -- full-length non-chimeric files in bins
chunked_nfl_files -- chunked non-chimeric files
out_pickle -- output pickle for saving PolishChunkTask objects
"""
n_bins = len(flnc_files)
n_n... | 1,599 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.