code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def _process_loaded_object(self, path):
"""process the :paramref:`path`.
:param str path: the path to load an svg from
"""
file_name = os.path.basename(path)
name = os.path.splitext(file_name)[0]
with open(path) as file:
string = file.read()
self.... | process the :paramref:`path`.
:param str path: the path to load an svg from |
def get_context_data(self, **kwargs):
"""Add context data to view"""
context = super().get_context_data(**kwargs)
context.update({
'title': self.title,
'submit_value': self.submit_value,
'cancel_url': self.cancel_url
})
return context | Add context data to view |
def clean_out_dir(directory):
"""
Delete all the files and subdirectories in a directory.
"""
if not isinstance(directory, path):
directory = path(directory)
for file_path in directory.files():
file_path.remove()
for dir_path in directory.dirs():
dir_path.rmtree() | Delete all the files and subdirectories in a directory. |
def dump_copy(self, path, relativePath, name=None,
description=None,
replace=False, verbose=False):
"""
Copy an exisitng system file to the repository.
attribute in the Repository with utc timestamp.
:Parameters:
#. path (str):... | Copy an exisitng system file to the repository.
attribute in the Repository with utc timestamp.
:Parameters:
#. path (str): The full path of the file to copy into the repository.
#. relativePath (str): The relative to the repository path of the directory where the file should be... |
def new_pattern(self, id_, name, rows=None):
"""Create a new knitting pattern.
If rows is :obj:`None` it is replaced with the
:meth:`new_row_collection`.
"""
if rows is None:
rows = self.new_row_collection()
return self._spec.new_pattern(id_, name, rows, self... | Create a new knitting pattern.
If rows is :obj:`None` it is replaced with the
:meth:`new_row_collection`. |
def get_file(self, sharename, fileid):
"""
Get a specific file. Does not require authentication.
Input:
* A sharename
* A fileid - must be an integer
Output:
* A :py:mod:`pygett.files.GettFile` object
Example::
file = client.get... | Get a specific file. Does not require authentication.
Input:
* A sharename
* A fileid - must be an integer
Output:
* A :py:mod:`pygett.files.GettFile` object
Example::
file = client.get_file("4ddfds", 0) |
def plot(image, overlay=None, blend=False,
alpha=1, cmap='Greys_r', overlay_cmap='jet', overlay_alpha=0.9,
cbar=False, cbar_length=0.8, cbar_dx=0., cbar_vertical=True,
axis=0, nslices=12, slices=None, ncol=None, slice_buffer=None, black_bg=True,
bg_thresh_quant=0.01, bg_val_quant=0.99, domain_image_ma... | Plot an ANTsImage.
By default, images will be reoriented to 'LAI' orientation before plotting.
So, if axis == 0, the images will be ordered from the
left side of the brain to the right side of the brain. If axis == 1,
the images will be ordered from the anterior (front) of the brain to
the poster... |
def user_cache_dir():
r"""Return the per-user cache dir (full path).
- Linux, *BSD, SunOS: ~/.cache/glances
- macOS: ~/Library/Caches/glances
- Windows: {%LOCALAPPDATA%,%APPDATA%}\glances\cache
"""
if WINDOWS:
path = os.path.join(os.environ.get('LOCALAPPDATA') or os.environ.get('APPDATA... | r"""Return the per-user cache dir (full path).
- Linux, *BSD, SunOS: ~/.cache/glances
- macOS: ~/Library/Caches/glances
- Windows: {%LOCALAPPDATA%,%APPDATA%}\glances\cache |
def wait_for_service_tasks_state(
service_name,
expected_task_count,
expected_task_states,
timeout_sec=120
):
""" Returns once the service has at least N tasks in one of the specified state(s)
:param service_name: the service name
:type service_name: str
:par... | Returns once the service has at least N tasks in one of the specified state(s)
:param service_name: the service name
:type service_name: str
:param expected_task_count: the expected number of tasks in the specified state(s)
:type expected_task_count: int
:param expected_task_sta... |
def display_candidates(self, candidates, pdf_file=None):
"""
Displays the bounding boxes corresponding to candidates on an image of the pdf
boxes is a list of 5-tuples (page, top, left, bottom, right)
"""
if not pdf_file:
pdf_file = os.path.join(
self.... | Displays the bounding boxes corresponding to candidates on an image of the pdf
boxes is a list of 5-tuples (page, top, left, bottom, right) |
def CheckProg(context, prog_name):
"""Simple check if a program exists in the path. Returns the path
for the application, or None if not found.
"""
res = SCons.Conftest.CheckProg(context, prog_name)
context.did_show_result = 1
return res | Simple check if a program exists in the path. Returns the path
for the application, or None if not found. |
def readinto(self, b):
"""
Read bytes into a pre-allocated, writable bytes-like object b,
and return the number of bytes read.
Args:
b (bytes-like object): buffer.
Returns:
int: number of bytes read
"""
if not self._readable:
... | Read bytes into a pre-allocated, writable bytes-like object b,
and return the number of bytes read.
Args:
b (bytes-like object): buffer.
Returns:
int: number of bytes read |
def error(self, message=None):
"""
Delegates to `ArgumentParser.error`
"""
if self.__parser__: # pylint: disable-msg=E1101
self.__parser__.error(message) # pylint: disable-msg=E1101
else:
self.logger.error(message)
sys.exit(2) | Delegates to `ArgumentParser.error` |
def parse_section_entry_points(self, section_options):
"""Parses `entry_points` configuration file section.
:param dict section_options:
"""
parsed = self._parse_section_to_dict(section_options, self._parse_list)
self['entry_points'] = parsed | Parses `entry_points` configuration file section.
:param dict section_options: |
def _submit(self, pathfile, filedata, filename):
'''
Submit either a file from disk, or a in-memory file to the solver service, and
return the request ID associated with the new captcha task.
'''
if pathfile and os.path.exists(pathfile):
files = {'file': open(pathfile, 'rb')}
elif filedata:
assert fil... | Submit either a file from disk, or a in-memory file to the solver service, and
return the request ID associated with the new captcha task. |
def make_autogen_str():
r"""
Returns:
str:
CommandLine:
python -m utool.util_ipynb --exec-make_autogen_str --show
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_ipynb import * # NOQA
>>> import utool as ut
>>> result = make_autogen_str()
>>>... | r"""
Returns:
str:
CommandLine:
python -m utool.util_ipynb --exec-make_autogen_str --show
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_ipynb import * # NOQA
>>> import utool as ut
>>> result = make_autogen_str()
>>> print(result) |
def sync_from_spec(redis, schema):
"""
Takes an input experiment spec and creates/modifies/archives the existing
experiments to match the spec.
If there's an experiment in the spec that currently doesn't exist, it will
be created along with the associated choices.
If there's an experiment in t... | Takes an input experiment spec and creates/modifies/archives the existing
experiments to match the spec.
If there's an experiment in the spec that currently doesn't exist, it will
be created along with the associated choices.
If there's an experiment in the spec that currently exists, and the set of
... |
def load_sgems_exp_var(filename):
""" read an SGEM experimental variogram into a sequence of
pandas.DataFrames
Parameters
----------
filename : (str)
an SGEMS experimental variogram XML file
Returns
-------
dfs : list
a list of pandas.DataFrames of x, y, pairs for each
... | read an SGEM experimental variogram into a sequence of
pandas.DataFrames
Parameters
----------
filename : (str)
an SGEMS experimental variogram XML file
Returns
-------
dfs : list
a list of pandas.DataFrames of x, y, pairs for each
division in the experimental vario... |
async def fetch_messages(self, selected: SelectedMailbox,
sequence_set: SequenceSet,
attributes: FrozenSet[FetchAttribute]) \
-> Tuple[Iterable[Tuple[int, MessageInterface]], SelectedMailbox]:
"""Get a list of loaded message objects correspon... | Get a list of loaded message objects corresponding to given sequence
set.
Args:
selected: The selected mailbox session.
sequence_set: Sequence set of message sequences or UIDs.
attributes: Fetch attributes for the messages.
Raises:
:class:`~pymap... |
def slices(src_path):
''' Return slices as a flat list '''
pages = list_slices(src_path)
slices = []
for page in pages:
slices.extend(page.slices)
return slices | Return slices as a flat list |
def addVariantAnnotationSet(self, variantAnnotationSet):
"""
Adds the specified variantAnnotationSet to this dataset.
"""
id_ = variantAnnotationSet.getId()
self._variantAnnotationSetIdMap[id_] = variantAnnotationSet
self._variantAnnotationSetIds.append(id_) | Adds the specified variantAnnotationSet to this dataset. |
def add_external_reference(self,ext_ref):
"""
Adds an external reference to the role
@param ext_ref: the external reference object
@type ext_ref: L{CexternalReference}
"""
#check if the externalreferences sublayer exist for the role, and create it in case
node_ext... | Adds an external reference to the role
@param ext_ref: the external reference object
@type ext_ref: L{CexternalReference} |
def expects_call(self):
"""The fake must be called.
.. doctest::
:hide:
>>> import fudge
>>> fudge.clear_expectations()
>>> fudge.clear_calls()
This is useful for when you stub out a function
as opposed to a class. For example::
... | The fake must be called.
.. doctest::
:hide:
>>> import fudge
>>> fudge.clear_expectations()
>>> fudge.clear_calls()
This is useful for when you stub out a function
as opposed to a class. For example::
>>> import fudge
... |
def _execute(self, query, model, adapter, raw=False):
"""
We have to override this because in some situation
(such as with Filebackend, or any dummy backend)
we have to parse / adapt results *before* when can execute the query
"""
values = self.load(model, adapter)
... | We have to override this because in some situation
(such as with Filebackend, or any dummy backend)
we have to parse / adapt results *before* when can execute the query |
def delete(self, record_key):
''' a method to delete a record from S3
:param record_key: string with key of record
:return: string reporting outcome
'''
title = '%s.delete' % self.__class__.__name__
# validate inputs
input_fields = {
'r... | a method to delete a record from S3
:param record_key: string with key of record
:return: string reporting outcome |
def _find_supported(self, features, mechanism_classes):
"""
Find the first mechansim class which supports a mechanism announced in
the given stream features.
:param features: Current XMPP stream features
:type features: :class:`~.nonza.StreamFeatures`
:param mechanism_cl... | Find the first mechansim class which supports a mechanism announced in
the given stream features.
:param features: Current XMPP stream features
:type features: :class:`~.nonza.StreamFeatures`
:param mechanism_classes: SASL mechanism classes to use
:type mechanism_classes: iterab... |
def send_message(self, output):
"""
Send a message to the socket
"""
file_system_event = None
if self.my_action_input:
file_system_event = self.my_action_input.file_system_event or None
output_action = ActionInput(file_system_event,
... | Send a message to the socket |
def process_fastq_minimal(fastq, **kwargs):
"""Swiftly extract minimal features (length and timestamp) from a rich fastq file"""
infastq = handle_compressed_input(fastq)
try:
df = pd.DataFrame(
data=[rec for rec in fq_minimal(infastq) if rec],
columns=["timestamp", "lengths"]... | Swiftly extract minimal features (length and timestamp) from a rich fastq file |
def build_documentation_lines(self):
"""Build a parameter documentation string that can appended to the
docstring of a function that uses this :class:`~.Filters` instance
to build filters.
"""
return [
line_string for key in sorted(self.keys)
for line_stri... | Build a parameter documentation string that can appended to the
docstring of a function that uses this :class:`~.Filters` instance
to build filters. |
def manage_itstat(self):
"""Compute, record, and display iteration statistics."""
# Extract and record iteration stats
itst = self.iteration_stats()
self.itstat.append(itst)
self.display_status(self.fmtstr, itst) | Compute, record, and display iteration statistics. |
def fill_datetime(self):
"""Returns when the slot was filled.
Returns:
A datetime.datetime.
Raises:
SlotNotFilledError if the value hasn't been filled yet.
"""
if not self.filled:
raise SlotNotFilledError('Slot with name "%s", key "%s" not yet filled.'
... | Returns when the slot was filled.
Returns:
A datetime.datetime.
Raises:
SlotNotFilledError if the value hasn't been filled yet. |
def templates(self, name=None, params=None):
"""
`<https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-templates.html>`_
:arg name: A pattern that returned template names must match
:arg format: a short version of the Accept header, e.g. json, yaml
:arg h: Comma-... | `<https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-templates.html>`_
:arg name: A pattern that returned template names must match
:arg format: a short version of the Accept header, e.g. json, yaml
:arg h: Comma-separated list of column names to display
:arg help: Retu... |
def _read_para_overlay_ttl(self, code, cbit, clen, *, desc, length, version):
"""Read HIP OVERLAY_TTL parameter.
Structure of HIP OVERLAY_TTL parameter [RFC 6078]:
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5... | Read HIP OVERLAY_TTL parameter.
Structure of HIP OVERLAY_TTL parameter [RFC 6078]:
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-... |
def _process_status(self, status):
""" Process latest status update. """
self._screen_id = status.get(ATTR_SCREEN_ID)
self.status_update_event.set() | Process latest status update. |
def c32address(version, hash160hex):
"""
>>> c32address(22, 'a46ff88886c2ef9762d970b4d2c63678835bd39d')
'SP2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKNRV9EJ7'
>>> c32address(0, '0000000000000000000000000000000000000000')
'S0000000000000000000002AA028H'
>>> c32address(31, '0000000000000000000000000000000000... | >>> c32address(22, 'a46ff88886c2ef9762d970b4d2c63678835bd39d')
'SP2J6ZY48GV1EZ5V2V5RB9MP66SW86PYKKNRV9EJ7'
>>> c32address(0, '0000000000000000000000000000000000000000')
'S0000000000000000000002AA028H'
>>> c32address(31, '0000000000000000000000000000000000000001')
'SZ00000000000000000005HZ3DVN'
>... |
def sign_blob(
self,
name,
payload,
delegates=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Signs a blob using a service account's system-managed private key.
... | Signs a blob using a service account's system-managed private key.
Example:
>>> from google.cloud import iam_credentials_v1
>>>
>>> client = iam_credentials_v1.IAMCredentialsClient()
>>>
>>> name = client.service_account_path('[PROJECT]', '[SERVICE_AC... |
def p_casecontent_condition_single(self, p):
'casecontent_condition : casecontent_condition COMMA expression'
p[0] = p[1] + (p[3],)
p.set_lineno(0, p.lineno(1)) | casecontent_condition : casecontent_condition COMMA expression |
def _extract_shape(idx, x, j, cur_center):
"""
>>> _extract_shape(np.array([0,1,2]), np.array([[1,2,3], [4,5,6]]), 1, np.array([0,3,4]))
array([-1., 0., 1.])
>>> _extract_shape(np.array([0,1,2]), np.array([[-1,2,3], [4,-5,6]]), 1, np.array([0,3,4]))
array([-0.96836405, 1.02888681, -0.06052275])
... | >>> _extract_shape(np.array([0,1,2]), np.array([[1,2,3], [4,5,6]]), 1, np.array([0,3,4]))
array([-1., 0., 1.])
>>> _extract_shape(np.array([0,1,2]), np.array([[-1,2,3], [4,-5,6]]), 1, np.array([0,3,4]))
array([-0.96836405, 1.02888681, -0.06052275])
>>> _extract_shape(np.array([1,0,1,0]), np.array([[1... |
def get_library_config(name):
"""Get distutils-compatible extension extras for the given library.
This requires ``pkg-config``.
"""
try:
proc = Popen(['pkg-config', '--cflags', '--libs', name], stdout=PIPE, stderr=PIPE)
except OSError:
print('pkg-config is required for building PyA... | Get distutils-compatible extension extras for the given library.
This requires ``pkg-config``. |
def _add_parameter(self, parameter):
'''
Force adds a `Parameter` object to the instance.
'''
if isinstance(parameter, MethodParameter):
# create a bound instance of the MethodParameter
parameter = parameter.bind(alloy=self)
self._parameters[parameter.name... | Force adds a `Parameter` object to the instance. |
def footprint(sobject):
"""
Get the I{virtual footprint} of the object.
This is really a count of the attributes in the branch with a significant
value.
@param sobject: A suds object.
@type sobject: L{Object}
@return: The branch footprint.
@rtype: int
"""
n = 0
for a in sobje... | Get the I{virtual footprint} of the object.
This is really a count of the attributes in the branch with a significant
value.
@param sobject: A suds object.
@type sobject: L{Object}
@return: The branch footprint.
@rtype: int |
def find_keys(self, regex, bucket_name=None):
"""Finds a list of S3 keys matching the passed regex
Given a regular expression, this method searches the S3 bucket
for matching keys, and returns an array of strings for matched
keys, an empty array if non are found.
:param regex: ... | Finds a list of S3 keys matching the passed regex
Given a regular expression, this method searches the S3 bucket
for matching keys, and returns an array of strings for matched
keys, an empty array if non are found.
:param regex: (str) Regular expression to use is the key search
... |
def namedb_get_name_DID_info(cur, name, block_height):
"""
Given a name and a DB cursor, find out its DID info at the given block.
Returns {'name_type': ..., 'address': ..., 'index': ...} on success
Return None if there is no such name
"""
# get the latest creator addresses for this name, as wel... | Given a name and a DB cursor, find out its DID info at the given block.
Returns {'name_type': ..., 'address': ..., 'index': ...} on success
Return None if there is no such name |
def try_lock(lock):
"""Attempts to acquire a lock, and auto releases if acquired (on exit)."""
# NOTE(harlowja): the keyword argument for 'blocking' does not work
# in py2.x and only is fixed in py3.x (this adjustment is documented
# and/or debated in http://bugs.python.org/issue10789); so we'll just
... | Attempts to acquire a lock, and auto releases if acquired (on exit). |
def queue_actions(self, source, actions, event_args=None):
"""
Queue a list of \a actions for processing from \a source.
Triggers an aura refresh afterwards.
"""
source.event_args = event_args
ret = self.trigger_actions(source, actions)
source.event_args = None
return ret | Queue a list of \a actions for processing from \a source.
Triggers an aura refresh afterwards. |
def save(self, filename=None):
"""
Save the point histories to sqlite3 database.
Save the device object properties to a pickle file so the device can be reloaded.
"""
if filename:
if ".db" in filename:
filename = filename.split(".")[0]
se... | Save the point histories to sqlite3 database.
Save the device object properties to a pickle file so the device can be reloaded. |
def summarize_provenance(self):
"""Utility function to summarize provenance files for cached items used by a Cohort.
At the moment, most PROVENANCE files contain details about packages used to
generate files. However, this function is generic & so it summarizes the contents
of those fil... | Utility function to summarize provenance files for cached items used by a Cohort.
At the moment, most PROVENANCE files contain details about packages used to
generate files. However, this function is generic & so it summarizes the contents
of those files irrespective of their contents.
... |
def source_list(source, source_hash, saltenv):
'''
Check the source list and return the source to use
CLI Example:
.. code-block:: bash
salt '*' file.source_list salt://http/httpd.conf '{hash_type: 'md5', 'hsum': <md5sum>}' base
'''
contextkey = '{0}_|-{1}_|-{2}'.format(source, source... | Check the source list and return the source to use
CLI Example:
.. code-block:: bash
salt '*' file.source_list salt://http/httpd.conf '{hash_type: 'md5', 'hsum': <md5sum>}' base |
def relabel(self, qubits: Qubits) -> 'Gate':
"""Return a copy of this Gate with new qubits"""
gate = copy(self)
gate.vec = gate.vec.relabel(qubits)
return gate | Return a copy of this Gate with new qubits |
def get(self, sid):
"""
Constructs a OriginationUrlContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.trunking.v1.trunk.origination_url.OriginationUrlContext
:rtype: twilio.rest.trunking.v1.trunk.origination_url.OriginationUrlContext
... | Constructs a OriginationUrlContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.trunking.v1.trunk.origination_url.OriginationUrlContext
:rtype: twilio.rest.trunking.v1.trunk.origination_url.OriginationUrlContext |
def new_job(self, task, inputdata, callback, launcher_name="Unknown", debug=False, ssh_callback=None):
""" Add a new job. Every callback will be called once and only once.
:type task: Task
:param inputdata: input from the student
:type inputdata: Storage or dict
:param callback:... | Add a new job. Every callback will be called once and only once.
:type task: Task
:param inputdata: input from the student
:type inputdata: Storage or dict
:param callback: a function that will be called asynchronously in the client's process, with the results.
it's signatur... |
def load_items(self, items):
"""Loads any number of items in chunks, handling continuation tokens.
:param items: Unpacked in chunks into "RequestItems" for :func:`boto3.DynamoDB.Client.batch_get_item`.
"""
loaded_items = {}
requests = collections.deque(create_batch_get_chunks(it... | Loads any number of items in chunks, handling continuation tokens.
:param items: Unpacked in chunks into "RequestItems" for :func:`boto3.DynamoDB.Client.batch_get_item`. |
def v_type_extension(ctx, stmt):
"""verify that the extension matches the extension definition"""
(modulename, identifier) = stmt.keyword
revision = stmt.i_extension_revision
module = modulename_to_module(stmt.i_module, modulename, revision)
if module is None:
return
if identifier not in... | verify that the extension matches the extension definition |
def _init_records(self, record_types):
"""Initalize all records for this form."""
for record_type in record_types:
# This conditional was inserted on 7/11/14. It may prove problematic:
if str(record_type) not in self._my_map['recordTypeIds']:
record_initialized = ... | Initalize all records for this form. |
def _push_processor(self, proc, index=None):
"""
Pushes a processor onto the processor stack. Processors are
objects with proc_request(), proc_response(), and/or
proc_exception() methods, which can intercept requests,
responses, and exceptions. When a method invokes the send()
... | Pushes a processor onto the processor stack. Processors are
objects with proc_request(), proc_response(), and/or
proc_exception() methods, which can intercept requests,
responses, and exceptions. When a method invokes the send()
method on a request, the proc_request() method on each
... |
def log(self, n=None, **kwargs):
"""
Run the repository log command
Returns:
str: output of log command (``git log -n <n> <--kwarg=value>``)
"""
kwargs['format'] = kwargs.pop('template', self.template)
cmd = ['git', 'log']
if n:
cmd.append... | Run the repository log command
Returns:
str: output of log command (``git log -n <n> <--kwarg=value>``) |
async def create(self, token):
"""Creates a new token with a given policy
Parameters:
token (Object): Token specification
Returns:
Object: token ID
The create endpoint is used to make a new token.
A token has a name, a type, and a set of ACL rules.
... | Creates a new token with a given policy
Parameters:
token (Object): Token specification
Returns:
Object: token ID
The create endpoint is used to make a new token.
A token has a name, a type, and a set of ACL rules.
The request body may take the form::
... |
def get_short_reads(vals):
(args,txome,seed,chunk) = vals
#fast forward some ammount
"""Emit the short reads first"""
txe = TranscriptomeEmitter(txome,TranscriptomeEmitter.Options(seed=seed))
if args.weights:
weights = {}
if args.weights[-3:]=='.gz': inf = gzip.open(args.weights)
else: ... | Emit the short reads first |
def parse_lines(lines: [str], units: Units, use_na: bool = True) -> [dict]: # type: ignore
"""
Returns a list of parsed line dictionaries
"""
parsed_lines = []
prob = ''
while lines:
raw_line = lines[0].strip()
line = core.sanitize_line(raw_line)
# Remove prob from the b... | Returns a list of parsed line dictionaries |
def apply_modification(self):
"""Modifications on the right side need to be committed"""
self.__changing_model = True
if self.adding_model: self.model.add(self.adding_model)
elif self.editing_model and self.editing_iter:
# notifies the currencies model
path = sel... | Modifications on the right side need to be committed |
def title_line(text):
"""Returns a string that represents the
text as a title blurb
"""
columns = shutil.get_terminal_size()[0]
start = columns // 2 - len(text) // 2
output = '='*columns + '\n\n' + \
' ' * start + str(text) + "\n\n" + \
'='*columns + '\n'
return outpu... | Returns a string that represents the
text as a title blurb |
def density_contour(self, *args, **kwargs):
"""
Estimates point density of the given linear orientation measurements
(Interpreted as poles, lines, rakes, or "raw" longitudes and latitudes
based on the `measurement` keyword argument.) and plots contour lines of
the resulting densi... | Estimates point density of the given linear orientation measurements
(Interpreted as poles, lines, rakes, or "raw" longitudes and latitudes
based on the `measurement` keyword argument.) and plots contour lines of
the resulting density distribution.
Parameters
----------
... |
def invert_index(source_dir, index_url=INDEX_URL, init=False):
"""
Build the invert index from give source_dir
Output a Shove object built on the store_path
Input:
source_dir: a directory on the filesystem
index_url: the store_path for the Shove object
init: clear the old index a... | Build the invert index from give source_dir
Output a Shove object built on the store_path
Input:
source_dir: a directory on the filesystem
index_url: the store_path for the Shove object
init: clear the old index and rebuild from scratch
Output:
index: a Shove object |
def trapz2(f, x=None, y=None, dx=1.0, dy=1.0):
"""Double integrate."""
return numpy.trapz(numpy.trapz(f, x=y, dx=dy), x=x, dx=dx) | Double integrate. |
def get_vector(self, max_choice=3):
"""Return pseudo-choice vectors."""
vec = {}
for dim in ['forbidden', 'required', 'permitted']:
if self.meta[dim] is None:
continue
dim_vec = map(lambda x: (x, max_choice), self.meta[dim])
vec[dim] = dict(dim... | Return pseudo-choice vectors. |
def column_keymap(self):
""" Returns keymap and keystates used in column mode """
keystates = set()
shortcuts = self.cp.items('column_keymap')
keymap_dict = dict(shortcuts)
for combo, action in shortcuts:
# add all possible prefixes to keystates
combo_as... | Returns keymap and keystates used in column mode |
def _get_magnitude_term(self, C, mag):
"""
Returns the magnitude scaling term.
"""
lny = C['C1'] + (C['C3'] * ((8.5 - mag) ** 2.))
if mag > 6.3:
return lny + (-C['H'] * C['C5']) * (mag - 6.3)
else:
return lny + C['C2'] * (mag - 6.3) | Returns the magnitude scaling term. |
def SETPE(cpu, dest):
"""
Sets byte if parity even.
:param cpu: current CPU.
:param dest: destination operand.
"""
dest.write(Operators.ITEBV(dest.size, cpu.PF, 1, 0)) | Sets byte if parity even.
:param cpu: current CPU.
:param dest: destination operand. |
def assign(self, expr):
"""Give *expr* a name."""
name = self.variable()
self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr))
return ast.Name(name, ast.Load()) | Give *expr* a name. |
def _sort_converters(cls, app_ready=False):
'''Sorts the converter functions'''
# app_ready is True when called from DMP's AppConfig.ready()
# we can't sort before then because models aren't ready
cls._sorting_enabled = cls._sorting_enabled or app_ready
if cls._sorting_enabled:
... | Sorts the converter functions |
def _register_server_authenticator(klass, name):
"""Add a client authenticator class to `SERVER_MECHANISMS_D`,
`SERVER_MECHANISMS` and, optionally, to `SECURE_SERVER_MECHANISMS`
"""
# pylint: disable-msg=W0212
SERVER_MECHANISMS_D[name] = klass
items = sorted(SERVER_MECHANISMS_D.items(), key = _k... | Add a client authenticator class to `SERVER_MECHANISMS_D`,
`SERVER_MECHANISMS` and, optionally, to `SECURE_SERVER_MECHANISMS` |
def create_contact(self, *args, **kwargs):
"""Creates a contact"""
url = 'contacts'
data = {
'view_all_tickets': False,
'description': 'Freshdesk Contact'
}
data.update(kwargs)
return Contact(**self._api._post(url, data=json.dumps(data))) | Creates a contact |
async def _retrieve_messages_around_strategy(self, retrieve):
"""Retrieve messages using around parameter."""
if self.around:
around = self.around.id if self.around else None
data = await self.logs_from(self.channel.id, retrieve, around=around)
self.around = None
... | Retrieve messages using around parameter. |
def append(self, element):
"""
Append an element onto the array.
>>> array = Array()
>>> array.append('test')
"""
from refract.refraction import refract
self.content.append(refract(element)) | Append an element onto the array.
>>> array = Array()
>>> array.append('test') |
def _get_ptext_to_endchars(value, endchars):
"""Scan printables/quoted-pairs until endchars and return unquoted ptext.
This function turns a run of qcontent, ccontent-without-comments, or
dtext-with-quoted-printables into a single string by unquoting any
quoted printables. It returns the string, the r... | Scan printables/quoted-pairs until endchars and return unquoted ptext.
This function turns a run of qcontent, ccontent-without-comments, or
dtext-with-quoted-printables into a single string by unquoting any
quoted printables. It returns the string, the remaining value, and
a flag that is True iff ther... |
def _build_request(self, type, commands):
'''
Build NX-API JSON request.
'''
request = {}
headers = {
'content-type': 'application/json',
}
if self.nxargs['connect_over_uds']:
user = self.nxargs['cookie']
headers['cookie'] = 'nx... | Build NX-API JSON request. |
def variants(self, case_id, skip=0, count=1000, filters=None):
"""Return all variants in the VCF.
This function will apply the given filter and return the 'count' first
variants. If skip the first 'skip' variants will not be regarded.
Args:
case_id (str): Path to a ... | Return all variants in the VCF.
This function will apply the given filter and return the 'count' first
variants. If skip the first 'skip' variants will not be regarded.
Args:
case_id (str): Path to a vcf file (for this adapter)
skip (int): Skip first variant... |
def tag_manifest_into_registry(self, session, worker_digest):
"""
Tags the manifest identified by worker_digest into session.registry with all the
configured tags found in workflow.tag_conf.
"""
self.log.info("%s: Tagging manifest", session.registry)
digest = worker_dige... | Tags the manifest identified by worker_digest into session.registry with all the
configured tags found in workflow.tag_conf. |
def push(self, repository=None, tag=None):
"""
Push image to registry. Raise exception when push fail.
:param repository: str, see constructor
:param tag: str, see constructor
:return: None
"""
image = self
if repository or tag:
image = self.... | Push image to registry. Raise exception when push fail.
:param repository: str, see constructor
:param tag: str, see constructor
:return: None |
def _to_addr(worksheet, row, col, row_fixed=False, col_fixed=False):
"""converts a (0,0) based coordinate to an excel address"""
addr = ""
A = ord('A')
col += 1
while col > 0:
addr = chr(A + ((col - 1) % 26)) + addr
col = (col - 1) // 26
prefix = ("'%s'!" % worksheet) if workshe... | converts a (0,0) based coordinate to an excel address |
def connect_async(self, connection_id, connection_string, callback):
"""Connect to a device by its connection_string
This function looks for the device on AWS IOT using the preconfigured
topic prefix and looking for:
<prefix>/devices/connection_string
It then attempts to lock t... | Connect to a device by its connection_string
This function looks for the device on AWS IOT using the preconfigured
topic prefix and looking for:
<prefix>/devices/connection_string
It then attempts to lock that device for exclusive access and
returns a callback if successful.
... |
def event_update(
self,
event_id,
name=None,
season=None,
start_time=None,
event_group_id=None,
status=None,
account=None,
**kwargs
):
""" Update an event. This needs to be **proposed**.
:param str event_id: Id of the event... | Update an event. This needs to be **proposed**.
:param str event_id: Id of the event to update
:param list name: Internationalized names, e.g. ``[['de', 'Foo'],
['en', 'bar']]``
:param list season: Internationalized season, e.g. ``[['de',
'Foo'], ['en... |
def convert_snapshot(self, shift, instruction):
"""Return converted `Snapshot`.
Args:
shift(int): Offset time.
instruction (Snapshot): snapshot instruction.
Returns:
dict: Dictionary of required parameters.
"""
command_dict = {
'na... | Return converted `Snapshot`.
Args:
shift(int): Offset time.
instruction (Snapshot): snapshot instruction.
Returns:
dict: Dictionary of required parameters. |
def build_stop_times(pfeed, routes, shapes, stops, trips, buffer=cs.BUFFER):
"""
Given a ProtoFeed and its corresponding routes (DataFrame),
shapes (DataFrame), stops (DataFrame), trips (DataFrame),
return DataFrame representing ``stop_times.txt``.
Includes the optional ``shape_dist_traveled`` colum... | Given a ProtoFeed and its corresponding routes (DataFrame),
shapes (DataFrame), stops (DataFrame), trips (DataFrame),
return DataFrame representing ``stop_times.txt``.
Includes the optional ``shape_dist_traveled`` column.
Don't make stop times for trips with no nearby stops. |
def expect_constructor(target):
"""
Set an expectation on a ``ClassDouble`` constructor
:param ClassDouble target: The ClassDouble to set the expectation on.
:return: an ``Expectation`` for the __new__ method.
:raise: ``ConstructorDoubleError`` if target is not a ClassDouble.
"""
if not is... | Set an expectation on a ``ClassDouble`` constructor
:param ClassDouble target: The ClassDouble to set the expectation on.
:return: an ``Expectation`` for the __new__ method.
:raise: ``ConstructorDoubleError`` if target is not a ClassDouble. |
def speech_speaker(self):
"""Retrieves the speaker of the audio or video file associated with the element.
The source is inherited from ancestor elements if none is specified. For this reason, always use this method rather than access the ``src`` attribute directly.
Returns:
str or... | Retrieves the speaker of the audio or video file associated with the element.
The source is inherited from ancestor elements if none is specified. For this reason, always use this method rather than access the ``src`` attribute directly.
Returns:
str or None if not found |
def execute_script(code_block, example_globals, image_path, fig_count,
src_file, gallery_conf):
"""Executes the code block of the example file"""
time_elapsed = 0
stdout = ''
# We need to execute the code
print('plotting code blocks in %s' % src_file)
plt.close('all')
cw... | Executes the code block of the example file |
def police_priority_map_exceed_map_pri3_exceed(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
police_priority_map = ET.SubElement(config, "police-priority-map", xmlns="urn:brocade.com:mgmt:brocade-policer")
name_key = ET.SubElement(police_priority_map, ... | Auto Generated Code |
def get_diff_idxs(array, rtol, atol):
"""
Given an array with (C, N, L) values, being the first the reference value,
compute the relative differences and discard the one below the tolerance.
:returns: indices where there are sensible differences.
"""
C, N, L = array.shape
diff_idxs = set() ... | Given an array with (C, N, L) values, being the first the reference value,
compute the relative differences and discard the one below the tolerance.
:returns: indices where there are sensible differences. |
def _dens(self,R,z,phi=0.,t=0.):
"""
NAME:
_dens
PURPOSE:
evaluate the density for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
the densi... | NAME:
_dens
PURPOSE:
evaluate the density for this potential
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
the density
HISTORY:
2018-08-06 - Written -... |
def tatoeba(language, word, minlength = 10, maxlength = 100):
''' Returns a list of suitable textsamples for a given word using Tatoeba.org. '''
word, sentences = unicode(word), []
page = requests.get('http://tatoeba.org/deu/sentences/search?query=%s&from=%s&to=und' % (word, lltk.locale.iso639_1to3(language)))
tre... | Returns a list of suitable textsamples for a given word using Tatoeba.org. |
def get_map(self, url, auth_map=None):
"""Envia uma requisição GET.
:param url: URL para enviar a requisição HTTP.
:param auth_map: Dicionário com as informações para autenticação na networkAPI.
:return: Retorna uma tupla contendo:
(< código de resposta http >, < corpo da r... | Envia uma requisição GET.
:param url: URL para enviar a requisição HTTP.
:param auth_map: Dicionário com as informações para autenticação na networkAPI.
:return: Retorna uma tupla contendo:
(< código de resposta http >, < corpo da resposta >).
:raise ConnectionError: Falha... |
def robots(request):
"""Return a simple "don't index me" robots.txt file."""
resp = request.response
resp.status = '200 OK'
resp.content_type = 'text/plain'
resp.body = """
User-Agent: *
Disallow: /
"""
return resp | Return a simple "don't index me" robots.txt file. |
def get_email(self, token):
"""Fetches email address from email API endpoint"""
resp = requests.get(self.emails_url,
params={'access_token': token.token})
emails = resp.json().get('values', [])
email = ''
try:
email = emails[0].get('email')... | Fetches email address from email API endpoint |
def _create_function(name, doc=""):
"""Create a PySpark function by its name"""
def _(col):
sc = SparkContext._active_spark_context
jc = getattr(sc._jvm.functions, name)(col._jc if isinstance(col, Column) else col)
return Column(jc)
_.__name__ = name
_.__doc__ = doc
return _ | Create a PySpark function by its name |
def _create_menu(self, items):
"""
Create a menu from the given node.
:param list items: list of menu items
:returns: a new Gtk.Menu object holding all items of the node
"""
menu = Gtk.Menu()
self._create_menu_items(menu, items)
return menu | Create a menu from the given node.
:param list items: list of menu items
:returns: a new Gtk.Menu object holding all items of the node |
def file_md5(file_name):
'''
Generate an MD5 hash of the specified file.
@file_name - The file to hash.
Returns an MD5 hex digest string.
'''
md5 = hashlib.md5()
with open(file_name, 'rb') as f:
for chunk in iter(lambda: f.read(128 * md5.block_size), b''):
md5.update(c... | Generate an MD5 hash of the specified file.
@file_name - The file to hash.
Returns an MD5 hex digest string. |
def get_force(self, component_info=None, data=None, component_position=None):
"""Get force data."""
components = []
append_components = components.append
for _ in range(component_info.plate_count):
component_position, plate = QRTPacket._get_exact(
RTForcePlate... | Get force data. |
def list(self, filter_name=None, filter_ids=None, filter_labels=None, page=None):
"""
This API endpoint returns a paginated list of the Servers
associated with your New Relic account. Servers can be filtered
by their name or by a list of server IDs.
:type filter_name: str
... | This API endpoint returns a paginated list of the Servers
associated with your New Relic account. Servers can be filtered
by their name or by a list of server IDs.
:type filter_name: str
:param filter_name: Filter by server name
:type filter_ids: list of ints
:param fil... |
def DetermineRunner(bbdir):
'''Checks if the given directory is a worker or a master and returns the
appropriate run function.'''
tacfile = os.path.join(bbdir, 'buildbot.tac')
if not os.path.exists(tacfile):
# No tac-file - use master runner by default.
import buildbot.scripts.runner
... | Checks if the given directory is a worker or a master and returns the
appropriate run function. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.