code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def _make_entities_from_ids(entity_cls, entity_objs_and_ids, server_config): """Given an iterable of entities and/or IDs, return a list of entities. :param entity_cls: An :class:`Entity` subclass. :param entity_obj_or_id: An iterable of :class:`nailgun.entity_mixins.Entity` objects and/or entity ID...
Given an iterable of entities and/or IDs, return a list of entities. :param entity_cls: An :class:`Entity` subclass. :param entity_obj_or_id: An iterable of :class:`nailgun.entity_mixins.Entity` objects and/or entity IDs. All of the entities in this iterable should be of type ``entity_cls``. ...
def find(self, name): """Returns the extension pack with the specified name if found. in name of type str The name of the extension pack to locate. return return_data of type :class:`IExtPack` The extension pack if found. raises :class:`VBoxErrorObjectNotFound`...
Returns the extension pack with the specified name if found. in name of type str The name of the extension pack to locate. return return_data of type :class:`IExtPack` The extension pack if found. raises :class:`VBoxErrorObjectNotFound` No extension pack ma...
def _compute_mean(self, C, mag, rhypo, hypo_depth, mean, idx): """ Compute mean value according to equations 10 and 11 page 226. """ mean[idx] = (C['C1'] + C['C2'] * mag + C['C3'] * np.log(rhypo[idx] + C['C4'] * np.exp(C['C5'] * mag)) + C['C6'] * hypo_depth)
Compute mean value according to equations 10 and 11 page 226.
def delete(self, client=None): """Deletes a task from Task Queue. :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the task's taskqueue. :rtype...
Deletes a task from Task Queue. :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the task's taskqueue. :rtype: :class:`Task` :returns: The task...
def RegisterProtoDescriptors(db, *additional_descriptors): """Registers all API-releated descriptors in a given symbol DB.""" db.RegisterFileDescriptor(artifact_pb2.DESCRIPTOR) db.RegisterFileDescriptor(client_pb2.DESCRIPTOR) db.RegisterFileDescriptor(config_pb2.DESCRIPTOR) db.RegisterFileDescriptor(cron_pb2....
Registers all API-releated descriptors in a given symbol DB.
def get_condarc_channels(self, normalize=False, conda_url='https://conda.anaconda.org', channels=None): """Return all the channel urls defined in .condarc. If no condarc file is found, use the default channels. ...
Return all the channel urls defined in .condarc. If no condarc file is found, use the default channels. the `default_channel_alias` key is ignored and only the anaconda client `url` key is used.
def layout(self, dimensions=None, **kwargs): """Group by supplied dimension(s) and lay out groups Groups data by supplied dimension(s) laying the groups along the dimension(s) out in a NdLayout. Args: dimensions: Dimension(s) to group by Returns: NdLayo...
Group by supplied dimension(s) and lay out groups Groups data by supplied dimension(s) laying the groups along the dimension(s) out in a NdLayout. Args: dimensions: Dimension(s) to group by Returns: NdLayout with supplied dimensions
def cli(env, package_keyname): """List Datacenters a package can be ordered in. Use the location Key Name to place orders """ manager = ordering.OrderingManager(env.client) table = formatting.Table(COLUMNS) locations = manager.package_locations(package_keyname) for region in locations: ...
List Datacenters a package can be ordered in. Use the location Key Name to place orders
def _check_cargs(self, cargs): """Raise exception if clbit is not in this circuit or bad format.""" if not all(isinstance(i, tuple) and isinstance(i[0], ClassicalRegister) and isinstance(i[1], int) for i in cargs): raise QiskitError("carg not (ClassicalR...
Raise exception if clbit is not in this circuit or bad format.
def combobox_set_model_from_list(cb, items): """Setup a ComboBox or ComboBoxEntry based on a list of strings.""" cb.clear() model = gtk.ListStore(str) for i in items: model.append([i]) cb.set_model(model) if type(cb) == gtk.ComboBoxEntry: cb.set_text_column(0) elif type(cb) =...
Setup a ComboBox or ComboBoxEntry based on a list of strings.
def fastaIterator(fn, useMutableString=False, verbose=False): """ A generator function which yields fastaSequence objects from a fasta-format file or stream. :param fn: a file-like stream or a string; if this is a string, it's treated as a filename, else it's treated it as a file-like ...
A generator function which yields fastaSequence objects from a fasta-format file or stream. :param fn: a file-like stream or a string; if this is a string, it's treated as a filename, else it's treated it as a file-like object, which must have a readline() method. :param useMu...
def substitute_variables(cls, configuration, value, ref): """ Substitute variables in `value` from `configuration` where any path reference is relative to `ref`. Parameters ---------- configuration : dict configuration (required to resolve intra-document refe...
Substitute variables in `value` from `configuration` where any path reference is relative to `ref`. Parameters ---------- configuration : dict configuration (required to resolve intra-document references) value : value to resolve substitutions for ...
def plot_best_worst_fits(assignments_df, data, modality_col='Modality', score='$\log_2 K$'): """Violinplots of the highest and lowest scoring of each modality""" ncols = 2 nrows = len(assignments_df.groupby(modality_col).groups.keys()) fig, axes = plt.subplots(nrows=nrows, ncol...
Violinplots of the highest and lowest scoring of each modality
def build_opener(self): """ Builds url opener, initializing proxy. @return: OpenerDirector """ http_handler = urllib2.HTTPHandler() # debuglevel=self.transport.debug if util.empty(self.transport.proxy_url): return urllib2.build_opener(http_handler) p...
Builds url opener, initializing proxy. @return: OpenerDirector
def int_filter(text): """Extract integer from text. **中文文档** 摘除文本内的整数。 """ res = list() for char in text: if char.isdigit(): res.append(char) return int("".join(res))
Extract integer from text. **中文文档** 摘除文本内的整数。
def regularrun( shell, prompt_template="default", aliases=None, envvars=None, extra_commands=None, speed=1, test_mode=False, commentecho=False, ): """Allow user to run their own live commands until CTRL-Z is pressed again. """ loop_again = True command_string = regulartyp...
Allow user to run their own live commands until CTRL-Z is pressed again.
def delete_device(name, safety_on=True): ''' Deletes a device from Vistara based on DNS name or partial name. By default, delete_device will only perform the delete if a single host is returned. Set safety_on=False to delete all matches (up to default API search page size) CLI Example: .. code...
Deletes a device from Vistara based on DNS name or partial name. By default, delete_device will only perform the delete if a single host is returned. Set safety_on=False to delete all matches (up to default API search page size) CLI Example: .. code-block:: bash salt-run vistara.delete_device...
def get_folder(service_instance, datacenter, placement, base_vm_name=None): ''' Returns a Folder Object service_instance Service instance object datacenter Name of the datacenter placement Placement dictionary base_vm_name Existing virtual machine name (for cl...
Returns a Folder Object service_instance Service instance object datacenter Name of the datacenter placement Placement dictionary base_vm_name Existing virtual machine name (for cloning)
def random_jpath(depth = 3): """ Generate random JPath with given node depth. """ chunks = [] while depth > 0: length = random.randint(5, 15) ident = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase) for _ in range(length)) if random.choice((True, False)...
Generate random JPath with given node depth.
def reinit(self): """ Re-initialize the socket connection close current socket (if open) and start a fresh connection raise ConnectionError on error """ log.debug("Reinitializing socket connection for %s:%d" % (self.host, self.port)) if self._sock: ...
Re-initialize the socket connection close current socket (if open) and start a fresh connection raise ConnectionError on error
def _setup_freqs(self): """Updating frequency borders from channel values """ if self.header[b'foff'] > 0: self.f_start = self.f_begin + self.chan_start_idx*abs(self.header[b'foff']) self.f_stop = self.f_begin + self.chan_stop_idx*abs(self.header[b'foff']) else: ...
Updating frequency borders from channel values
def make_optimize_tensor(self, model, session=None, var_list=None, **kwargs): """ Make Tensorflow optimization tensor. This method builds optimization tensor and initializes all necessary variables created by optimizer. :param model: GPflow model. :param session:...
Make Tensorflow optimization tensor. This method builds optimization tensor and initializes all necessary variables created by optimizer. :param model: GPflow model. :param session: Tensorflow session. :param var_list: List of variables for training. :par...
def get_service_definitions(self, service_type=None): """GetServiceDefinitions. [Preview API] :param str service_type: :rtype: [ServiceDefinition] """ route_values = {} if service_type is not None: route_values['serviceType'] = self._serialize.url('ser...
GetServiceDefinitions. [Preview API] :param str service_type: :rtype: [ServiceDefinition]
def parse_line(self, line, lineno): """Check a single line for an error. Keeps track of the linenumber""" # TaskCluster logs are a bit wonky. # # TaskCluster logs begin with output coming from TaskCluster itself, # before it has transitioned control of the task to the configured...
Check a single line for an error. Keeps track of the linenumber
def parse_commandline(argv): """ Returns the arguments parsed from *argv* as a namespace. """ ap = ArgumentParser( prog='wdiffhtml', description=DESCRIPTION, epilog=EPILOG, ) ap.add_argument( '--version', action='version', version='wdiffhtml v{}'.format(version), help="shows version and...
Returns the arguments parsed from *argv* as a namespace.
def on_post(self): """Extracts the request, feeds the module, and returns the response.""" request = self.environ['wsgi.input'] try: return self.process_request(request) except ClientError as exc: return self.on_client_error(exc) except BadGateway as exc: ...
Extracts the request, feeds the module, and returns the response.
def random_word(length,dictionary = False):#may return offensive words if dictionary = True ''' Creates random lowercase words from dictionary or by alternating vowels and consonants The second method chooses from 85**length words. The dictionary method chooses from 3000--12000 words for 3<=length<...
Creates random lowercase words from dictionary or by alternating vowels and consonants The second method chooses from 85**length words. The dictionary method chooses from 3000--12000 words for 3<=length<=12 (though this of course depends on the available dictionary) :param length: word length ...
def mkdir_command(endpoint_plus_path): """ Executor for `globus mkdir` """ endpoint_id, path = endpoint_plus_path client = get_client() autoactivate(client, endpoint_id, if_expires_in=60) res = client.operation_mkdir(endpoint_id, path=path) formatted_print(res, text_format=FORMAT_TEXT_...
Executor for `globus mkdir`
def info(self, *msg): """ Prints a message with an info prefix """ label = colors.blue("INFO") self._msg(label, *msg)
Prints a message with an info prefix
def optimize(population, toolbox, ngen, archive=None, stats=None, verbose=False, history=None): """ Optimize a population of individuals. :param population: :param toolbox: :param mut_prob: :param ngen: :param archive: :param stats: :param verbose: :param history: :return: ...
Optimize a population of individuals. :param population: :param toolbox: :param mut_prob: :param ngen: :param archive: :param stats: :param verbose: :param history: :return:
def unimapping(arg, level): """ Mapping object to unicode string. :type arg: collections.Mapping :param arg: mapping object :type level: int :param level: deep level :rtype: unicode :return: mapping object as unicode string """ if not isinstance(arg, collections.Mapping): ...
Mapping object to unicode string. :type arg: collections.Mapping :param arg: mapping object :type level: int :param level: deep level :rtype: unicode :return: mapping object as unicode string
def store(self, text, tier): """ Writes text to the underlying Store mapped at tier. If the store doesn't exists, yet, it creates it :param text: the text to write :param tier: the tier used to identify the store :return: """ store = self._stores.get(tier, None) ...
Writes text to the underlying Store mapped at tier. If the store doesn't exists, yet, it creates it :param text: the text to write :param tier: the tier used to identify the store :return:
def check_file_for_tabs(cls, filename, verbose=True): """identifies if the file contains tabs and returns True if it does. It also prints the location of the lines and columns. If verbose is set to False, the location is not printed. :param verbose: if true prints issues :param ...
identifies if the file contains tabs and returns True if it does. It also prints the location of the lines and columns. If verbose is set to False, the location is not printed. :param verbose: if true prints issues :param filename: the filename :type filename: str :rtype...
def remove_repositories(repositories, default_repositories): """ Remove no default repositories """ repos = [] for repo in repositories: if repo in default_repositories: repos.append(repo) return repos
Remove no default repositories
def combine_mv_and_lv(mv, lv): """Combine MV and LV grid topology in PyPSA format """ combined = { c: pd.concat([mv[c], lv[c]], axis=0) for c in list(lv.keys()) } combined['Transformer'] = mv['Transformer'] return combined
Combine MV and LV grid topology in PyPSA format
def is_deletion(self): """ Does this variant represent the deletion of nucleotides from the reference genome? """ # A deletion would appear in a VCF like CT>C, so that the # reference allele starts with the alternate nucleotides. # This is true even in the normali...
Does this variant represent the deletion of nucleotides from the reference genome?
def continue_abort(self, root_pipeline_key, cursor=None, max_to_notify=_MAX_ABORTS_TO_BEGIN): """Sends the abort signal to all children for a root pipeline. Args: root_pipeline_key: db.Key of the root pipeline to abort. cursor: The quer...
Sends the abort signal to all children for a root pipeline. Args: root_pipeline_key: db.Key of the root pipeline to abort. cursor: The query cursor for enumerating _PipelineRecords when inserting tasks to cause child pipelines to terminate. max_to_notify: Used for testing.
def from_array(array): """ Deserialize a new LabeledPrice from a given dictionary. :return: new LabeledPrice instance. :rtype: LabeledPrice """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_nam...
Deserialize a new LabeledPrice from a given dictionary. :return: new LabeledPrice instance. :rtype: LabeledPrice
def guinierplot(*args, **kwargs): """Make a Guinier plot. This is simply a wrapper around plotsascurve().""" ret=plotsascurve(*args, **kwargs) plt.xscale('power',exponent=2) plt.yscale('log') return ret
Make a Guinier plot. This is simply a wrapper around plotsascurve().
def handle_exists(self, spec, checkable): '''The implementation of this one is weird. By the time the {'$exists': True} spec gets to the dispatched handler, the key presumably exists. So we just parrot the assertion the spec makes. If it asserts the key exists, we return True. I...
The implementation of this one is weird. By the time the {'$exists': True} spec gets to the dispatched handler, the key presumably exists. So we just parrot the assertion the spec makes. If it asserts the key exists, we return True. If it asserts the key doesn't exist, we return...
def find(self, which, param): '''Get a parameter from a layer in the network. Parameters ---------- which : int or str The layer that owns the parameter to return. If this is an integer, then 0 refers to the input layer, 1 refers to the first hidden ...
Get a parameter from a layer in the network. Parameters ---------- which : int or str The layer that owns the parameter to return. If this is an integer, then 0 refers to the input layer, 1 refers to the first hidden layer, 2 to the second, and so on. ...
def user_filter(config, message, fasnick=None, *args, **kw): """ A particular user Use this rule to include messages that are associated with a specific user. """ fasnick = kw.get('fasnick', fasnick) if fasnick: return fasnick in fmn.rules.utils.msg2usernames(message, **config)
A particular user Use this rule to include messages that are associated with a specific user.
def remove_core_element(self, model): """Remove respective core element of handed global variable name :param str model: String that is the key/gv_name of core element which should be removed :return: """ gv_name = model if self.global_variable_is_editable(gv_name, "Dele...
Remove respective core element of handed global variable name :param str model: String that is the key/gv_name of core element which should be removed :return:
def get_tree(cls, *condition, **kwargs): """ parent is root parent value, default is None current is current value condition is extra condition for select root records mode is search method, value is 'wide' or 'deep' """ parent_field = kwargs.pop('parent_field', '...
parent is root parent value, default is None current is current value condition is extra condition for select root records mode is search method, value is 'wide' or 'deep'
def get_is_value(tag): """ Getters for data that also work with implicit transfersyntax :param tag: the tag to read """ # data is int formatted as string so convert te string first and cast to int if tag.VR == 'OB' or tag.VR == 'UN': value = int(tag.value.decode("ascii").replace(" ", ""...
Getters for data that also work with implicit transfersyntax :param tag: the tag to read
def get_obs_route(value): """ obs-route = obs-domain-list ":" obs-domain-list = *(CFWS / ",") "@" domain *("," [CFWS] ["@" domain]) Returns an obs-route token with the appropriate sub-tokens (that is, there is no obs-domain-list in the parse tree). """ obs_route = ObsRoute() whi...
obs-route = obs-domain-list ":" obs-domain-list = *(CFWS / ",") "@" domain *("," [CFWS] ["@" domain]) Returns an obs-route token with the appropriate sub-tokens (that is, there is no obs-domain-list in the parse tree).
def _recv(self): """ Receives and returns a message from Scratch """ prefix = self._read(self.prefix_len) msg = self._read(self._extract_len(prefix)) return prefix + msg
Receives and returns a message from Scratch
def extract_notebook_metatab(nb_path: Path): """Extract the metatab lines from a notebook and return a Metapack doc """ from metatab.rowgenerators import TextRowGenerator import nbformat with nb_path.open() as f: nb = nbformat.read(f, as_version=4) lines = '\n'.join(['Declare: metatab-lat...
Extract the metatab lines from a notebook and return a Metapack doc
def read_from_cache(self, domains=None): """ Returns: dict: Dict[str, DataFrame] """ logger.info(f'Reading data from cache ({self.EXTRACTION_CACHE_PATH})') if domains is not None and isinstance(domains, list): dfs = {domain: self.read_entry(domain) for dom...
Returns: dict: Dict[str, DataFrame]
def _python_rpath(self): """The relative path (from environment root) to python.""" # Windows virtualenv installation installs pip to the [Ss]cripts # folder. Here's a simple check to support: if sys.platform == 'win32': return os.path.join('Scripts', 'python.exe') re...
The relative path (from environment root) to python.
def compute_checksum(self, payload_offset: Optional[int]=None): '''Compute and add the checksum data to the record fields. This function also sets the content length. ''' if not self.block_file: self.fields['Content-Length'] = '0' return block_hasher = h...
Compute and add the checksum data to the record fields. This function also sets the content length.
def _recipients_from_cloud(self, recipients, field=None): """ Transform a recipient from cloud data to object data """ recipients_data = [] for recipient in recipients: recipients_data.append( self._recipient_from_cloud(recipient, field=field)) return Recipien...
Transform a recipient from cloud data to object data
def _get_event_id(object_type: str) -> str: """Return an event key for the event on the object type. This must be a unique event id for the object. Args: object_type (str): Type of object Returns: str, event id """ key = _keys.event_counter(object_type) DB.watch(key, pipe...
Return an event key for the event on the object type. This must be a unique event id for the object. Args: object_type (str): Type of object Returns: str, event id
def ParseOptions(cls, options, configuration_object): """Parses and validates options. Args: options (argparse.Namespace): parser options. configuration_object (CLITool): object to be configured by the argument helper. Raises: BadConfigObject: when the configuration object is o...
Parses and validates options. Args: options (argparse.Namespace): parser options. configuration_object (CLITool): object to be configured by the argument helper. Raises: BadConfigObject: when the configuration object is of the wrong type. BadConfigOption: when a configuration...
def get_parser(self): """ Returns :class:`monolith.cli.Parser` instance for this *ExecutionManager*. """ parser = self.parser_cls(prog=self.prog_name, usage=self.get_usage(), stream=self.stderr) subparsers = parser.add_subparsers( title='subcommand...
Returns :class:`monolith.cli.Parser` instance for this *ExecutionManager*.
def from_conversation_event(conversation, conv_event, prev_conv_event, datetimefmt, watermark_users=None): """Return MessageWidget representing a ConversationEvent. Returns None if the ConversationEvent does not have a widget representation. """ u...
Return MessageWidget representing a ConversationEvent. Returns None if the ConversationEvent does not have a widget representation.
def create_ip_arp_reply(srchw, dsthw, srcip, targetip): ''' Create an ARP reply (just change what needs to be changed from a request) ''' pkt = create_ip_arp_request(srchw, srcip, targetip) pkt[0].dst = dsthw pkt[1].operation = ArpOperation.Reply pkt[1].targethwaddr = dsthw return pk...
Create an ARP reply (just change what needs to be changed from a request)
def layers(self): """ similar as parent images, except that it uses /history API endpoint :return: """ # sample output: # { # "Created": 1457116802, # "Id": "sha256:507cb13a216097710f0d234668bf64a4c92949c573ba15eba13d05aad392fe04", # "S...
similar as parent images, except that it uses /history API endpoint :return:
def parse_relations( belstr: str, char_locs: CharLocs, parsed: Parsed, errors: Errors ) -> Tuple[Parsed, Errors]: """Parse relations from BEL string Args: belstr: BEL string as one single string (not list of chars) char_locs: paren, comma and quote char locations parsed: data struct...
Parse relations from BEL string Args: belstr: BEL string as one single string (not list of chars) char_locs: paren, comma and quote char locations parsed: data structure for parsed functions, relations, nested errors: error messages Returns: (parsed, errors):
def set_channel_created(self, channel_link, channel_id): """ set_channel_created: records progress after creating channel on Kolibri Studio Args: channel_link (str): link to uploaded channel channel_id (str): id of channel that has been uploaded Returns: N...
set_channel_created: records progress after creating channel on Kolibri Studio Args: channel_link (str): link to uploaded channel channel_id (str): id of channel that has been uploaded Returns: None
def apply_sfr_seg_parameters(seg_pars=True, reach_pars=False): """apply the SFR segement multiplier parameters. Expected to be run in the same dir as the model exists Parameters ---------- reach_pars : bool if reach paramters need to be applied Returns ------- sfr ...
apply the SFR segement multiplier parameters. Expected to be run in the same dir as the model exists Parameters ---------- reach_pars : bool if reach paramters need to be applied Returns ------- sfr : flopy.modflow.ModflowSfr instance Note ---- expects...
def resetAndRejoin(self, timeout): """reset and join back Thread Network with a given timeout delay Args: timeout: a timeout interval before rejoin Thread Network Returns: True: successful to reset and rejoin Thread Network False: fail to reset and rejoin th...
reset and join back Thread Network with a given timeout delay Args: timeout: a timeout interval before rejoin Thread Network Returns: True: successful to reset and rejoin Thread Network False: fail to reset and rejoin the Thread Network
def generateSplines(self): """#TODO: docstring """ _ = returnSplineList(self.dependentVar, self.independentVar, subsetPercentage=self.splineSubsetPercentage, cycles=self.splineCycles, minKnotPoints=self.spline...
#TODO: docstring
def _equalizeHistogram(img): ''' histogram equalisation not bounded to int() or an image depth of 8 bit works also with negative numbers ''' # to float if int: intType = None if 'f' not in img.dtype.str: TO_FLOAT_TYPES = {np.dtype('uint8'): np.float16, ...
histogram equalisation not bounded to int() or an image depth of 8 bit works also with negative numbers
def check_purge_status(self, purge_id): """Get the status and times of a recently completed purge.""" content = self._fetch("/purge?id=%s" % purge_id) return map(lambda x: FastlyPurgeStatus(self, x), content)
Get the status and times of a recently completed purge.
def iptag_clear(self, iptag, x, y): """Clear an IPTag. Parameters ---------- iptag : int Index of the IPTag to clear. """ self._send_scp(x, y, 0, SCPCommands.iptag, int(consts.IPTagCommands.clear) << 16 | iptag)
Clear an IPTag. Parameters ---------- iptag : int Index of the IPTag to clear.
def mix(self, color1, color2, weight=50, *args): """This algorithm factors in both the user-provided weight and the difference between the alpha values of the two colors to decide how to perform the weighted average of the two RGB values. It works by first normalizing both parameters to...
This algorithm factors in both the user-provided weight and the difference between the alpha values of the two colors to decide how to perform the weighted average of the two RGB values. It works by first normalizing both parameters to be within [-1, 1], where 1 indicates "only use colo...
def _create_archive_table(self, table_name): ''' Dynamo implementation of BaseDataManager create_archive_table waiter object is implemented to ensure table creation before moving on this will slow down table creation. However, since we are only creating table once this should no...
Dynamo implementation of BaseDataManager create_archive_table waiter object is implemented to ensure table creation before moving on this will slow down table creation. However, since we are only creating table once this should no impact users. Parameters ---------- tab...
def serialize_gen( obj_pyxb, encoding='utf-8', pretty=False, strip_prolog=False, xslt_url=None ): """Serialize PyXB object to XML. Args: obj_pyxb: PyXB object PyXB object to serialize. encoding: str Encoding to use for XML doc bytes pretty: bool True: Use pretty ...
Serialize PyXB object to XML. Args: obj_pyxb: PyXB object PyXB object to serialize. encoding: str Encoding to use for XML doc bytes pretty: bool True: Use pretty print formatting for human readability. strip_prolog: True: remove any XML prolog (e.g., ``<?x...
def toLily(self): ''' Method which converts the object instance, its attributes and children to a string of lilypond code :return: str of lilypond code ''' lilystring = "" if not self.autoBeam: lilystring += "\\autoBeamOff" children = self.SortedChi...
Method which converts the object instance, its attributes and children to a string of lilypond code :return: str of lilypond code
def basename_without_extension(self): """ Get the ``os.path.basename`` of the local file, if any, with extension removed. """ ret = self.basename.rsplit('.', 1)[0] if ret.endswith('.tar'): ret = ret[0:len(ret)-4] return ret
Get the ``os.path.basename`` of the local file, if any, with extension removed.
def insert_paulis(self, indices=None, paulis=None, pauli_labels=None): """ Insert or append pauli to the targeted indices. If indices is None, it means append at the end. Args: indices (list[int]): the qubit indices to be inserted paulis (Pauli): the to-be-inser...
Insert or append pauli to the targeted indices. If indices is None, it means append at the end. Args: indices (list[int]): the qubit indices to be inserted paulis (Pauli): the to-be-inserted or appended pauli pauli_labels (list[str]): the to-be-inserted or appended ...
def sparse_to_unmasked_sparse(self): """The 1D index mappings between the masked sparse-grid and unmasked sparse grid.""" return mapping_util.sparse_to_unmasked_sparse_from_mask_and_pixel_centres( total_sparse_pixels=self.total_sparse_pixels, mask=self.regular_grid.mask, unmasked...
The 1D index mappings between the masked sparse-grid and unmasked sparse grid.
def rotate(a, th): """Return cartesian vectors, after rotation by specified angles about each degree of freedom. Parameters ---------- a: array, shape (n, d) Input d-dimensional cartesian vectors, left unchanged. th: array, shape (n, m) Angles by which to rotate about each m rot...
Return cartesian vectors, after rotation by specified angles about each degree of freedom. Parameters ---------- a: array, shape (n, d) Input d-dimensional cartesian vectors, left unchanged. th: array, shape (n, m) Angles by which to rotate about each m rotational degree of freedom ...
def find_converting_reactions(model, pair): """ Find all reactions which convert a given metabolite pair. Parameters ---------- model : cobra.Model The metabolic model under investigation. pair: tuple or list A pair of metabolite identifiers without compartment suffix. Retu...
Find all reactions which convert a given metabolite pair. Parameters ---------- model : cobra.Model The metabolic model under investigation. pair: tuple or list A pair of metabolite identifiers without compartment suffix. Returns ------- frozenset The set of reactio...
def combine_tax_scales(node): """ Combine all the MarginalRateTaxScales in the node into a single MarginalRateTaxScale. """ combined_tax_scales = None for child_name in node: child = node[child_name] if not isinstance(child, AbstractTaxScale): log.info('Skipping {} w...
Combine all the MarginalRateTaxScales in the node into a single MarginalRateTaxScale.
def _distort_color(image, color_ordering=0, scope=None): """Distort the color of a Tensor image. Each color distortion is non-commutative and thus ordering of the color ops matters. Ideally we would randomly permute the ordering of the color ops. Rather then adding that level of complication, we select a disti...
Distort the color of a Tensor image. Each color distortion is non-commutative and thus ordering of the color ops matters. Ideally we would randomly permute the ordering of the color ops. Rather then adding that level of complication, we select a distinct ordering of color ops for each preprocessing thread. ...
def from_text(cls, text, mapping='mapping'): """ Create a Profile instance from the Unicode graphemes found in `text`. Parameters ---------- text mapping Returns ------- A Profile instance. """ graphemes = Counter(grapheme_patter...
Create a Profile instance from the Unicode graphemes found in `text`. Parameters ---------- text mapping Returns ------- A Profile instance.
def index_agreement(s, o): """ index of agreement input: s: simulated o: observed output: ia: index of agreement """ # s,o = filter_nan(s,o) ia = 1 - (np.sum((o-s)**2)) /\ (np.sum((np.abs(s-np.mean(o))+np.abs(o-np.mean(o)))**2)) return...
index of agreement input: s: simulated o: observed output: ia: index of agreement
def random(cls, num_qubits, seed=None): """Return a random Pauli on number of qubits. Args: num_qubits (int): the number of qubits seed (int): Optional. To set a random seed. Returns: Pauli: the random pauli """ if seed is not None: ...
Return a random Pauli on number of qubits. Args: num_qubits (int): the number of qubits seed (int): Optional. To set a random seed. Returns: Pauli: the random pauli
def _get_directives_and_roles_from_sphinx(): """Return a tuple of Sphinx directive and roles.""" if SPHINX_INSTALLED: sphinx_directives = list(sphinx.domains.std.StandardDomain.directives) sphinx_roles = list(sphinx.domains.std.StandardDomain.roles) for domain in [sphinx.domains.c.CDoma...
Return a tuple of Sphinx directive and roles.
def download_image(self, img_url): """ Downloads a single image. Downloads img_url using self.page_url as base. Also, raises the appropriate exception if required. """ img_request = None try: img_request = requests.request( 'get', img_...
Downloads a single image. Downloads img_url using self.page_url as base. Also, raises the appropriate exception if required.
def load_gffutils_db(f): """ Load database for gffutils. Parameters ---------- f : str Path to database. Returns ------- db : gffutils.FeatureDB gffutils feature database. """ import gffutils db = gffutils.FeatureDB(f, keep_order=True) return db
Load database for gffutils. Parameters ---------- f : str Path to database. Returns ------- db : gffutils.FeatureDB gffutils feature database.
def get_compatible_generator_action(self, filename): """ Return the **first** compatible :class:`GeneratorAction` for a given filename or ``None`` if none is found. Args: filename (str): The filename of the template to process. """ # find first compatible generator a...
Return the **first** compatible :class:`GeneratorAction` for a given filename or ``None`` if none is found. Args: filename (str): The filename of the template to process.
def rotate_content(day=None): """ this method gets the parameters that are needed for rotate_latest and rotate_featured_in_homepage methods, and calls them both""" # getting the content rotation settings from site settings for main in Main.objects.all(): site = main.sites_rooted_here.all().firs...
this method gets the parameters that are needed for rotate_latest and rotate_featured_in_homepage methods, and calls them both
def read_from_LSQ(self, LSQ_file): """ Clears all current interpretations and replaces them with interpretations read from LSQ file. Parameters ---------- LSQ_file : path to LSQ file to read in """ cont = self.user_warning( "LSQ import only wo...
Clears all current interpretations and replaces them with interpretations read from LSQ file. Parameters ---------- LSQ_file : path to LSQ file to read in
def concatenate_not_none(l, axis=0): """Construct a numpy array by stacking not-None arrays in a list Parameters ---------- data : list of arrays The list of arrays to be concatenated. Arrays have same shape in all but one dimension or are None, in which case they are ignored. axi...
Construct a numpy array by stacking not-None arrays in a list Parameters ---------- data : list of arrays The list of arrays to be concatenated. Arrays have same shape in all but one dimension or are None, in which case they are ignored. axis : int, default = 0 Axis for the co...
def get_gtf_db(gtf, in_memory=False): """ create a gffutils DB """ db_file = gtf + '.db' if gtf.endswith('.gz'): db_file = gtf[:-3] + '.db' if file_exists(db_file): return gffutils.FeatureDB(db_file) db_file = ':memory:' if in_memory else db_file if in_memory or not file_...
create a gffutils DB
def remove(self, flag, extra): """Remove Slackware binary packages """ self.flag = flag self.extra = extra self.dep_path = self.meta.log_path + "dep/" dependencies, rmv_list = [], [] self.removed = self._view_removed() if not self.removed: prin...
Remove Slackware binary packages
def get_uuid(type=4): """ Get uuid value """ import uuid name = 'uuid'+str(type) u = getattr(uuid, name) return u().hex
Get uuid value
def encode(self, uuid, pad_length=22): """ Encodes a UUID into a string (LSB first) according to the alphabet If leftmost (MSB) bits 0, string might be shorter """ return self._num_to_string(uuid.int, pad_to_length=pad_length)
Encodes a UUID into a string (LSB first) according to the alphabet If leftmost (MSB) bits 0, string might be shorter
def create_configuration(self, node, ports): """Create RAID configuration on the bare metal. This method creates the desired RAID configuration as read from node['target_raid_config']. :param node: A dictionary of the node object :param ports: A list of dictionaries containing ...
Create RAID configuration on the bare metal. This method creates the desired RAID configuration as read from node['target_raid_config']. :param node: A dictionary of the node object :param ports: A list of dictionaries containing information of ports for the node :r...
def get_energies(atoms_list): """ Potential energy for a list of atoms objects""" if len(atoms_list) == 1: return atoms_list[0].get_potential_energy() elif len(atoms_list) > 1: energies = [] for atoms in atoms_list: energies.append(atoms.get_potential_energy()) re...
Potential energy for a list of atoms objects
def get_thin_rect_vertices(ox, oy, dx, dy, r): """Given the starting point, ending point, and width, return a list of vertex coordinates at the corners of the line segment (really a thin rectangle). """ if ox < dx: leftx = ox rightx = dx xco = 1 elif ox > dx: lef...
Given the starting point, ending point, and width, return a list of vertex coordinates at the corners of the line segment (really a thin rectangle).
def get_arguments(self): """Returns the additional options for the grid (such as the queue, memory requirements, ...).""" # In python 2, the command line is unicode, which needs to be converted to string before pickling; # In python 3, the command line is bytes, which can be pickled directly args = load...
Returns the additional options for the grid (such as the queue, memory requirements, ...).
def set_common_fields(self, warc_type: str, content_type: str): '''Set the required fields for the record.''' self.fields[self.WARC_TYPE] = warc_type self.fields[self.CONTENT_TYPE] = content_type self.fields[self.WARC_DATE] = wpull.util.datetime_str() self.fields[self.WARC_RECORD...
Set the required fields for the record.
def load_modes(node): """Load all observing modes""" if isinstance(node, list): values = [load_mode(child) for child in node] keys = [mode.key for mode in values] return dict(zip(keys,values)) elif isinstance(node, dict): values = {key: load_mode(child) for key, child in node...
Load all observing modes
def load_modules(self): """Should instance interfaces and set them to interface, following `modules`""" if self.INTERFACES_MODULE is None: raise NotImplementedError("A module containing interfaces modules " "should be setup in INTERFACES_MODULE !") ...
Should instance interfaces and set them to interface, following `modules`
def update_asset(self, asset_form=None): """Updates an existing asset. :param asset_form: the form containing the elements to be updated :type asset_form: ``osid.repository.AssetForm`` :raise: ``IllegalState`` -- ``asset_form`` already used in anupdate transaction :raise: ``Inva...
Updates an existing asset. :param asset_form: the form containing the elements to be updated :type asset_form: ``osid.repository.AssetForm`` :raise: ``IllegalState`` -- ``asset_form`` already used in anupdate transaction :raise: ``InvalidArgument`` -- the form contains an invalid value ...
def downsample(self, factor): """ Compute a downsampled version of the skeleton by striding while preserving endpoints. factor: stride length for downsampling the saved skeleton paths. Returns: downsampled PrecomputedSkeleton """ if int(factor) != factor or factor < 1: raise ValueEr...
Compute a downsampled version of the skeleton by striding while preserving endpoints. factor: stride length for downsampling the saved skeleton paths. Returns: downsampled PrecomputedSkeleton