code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def detectTierTablet(self): return self.detectIpad() \ or self.detectAndroidTablet() \ or self.detectBlackBerryTablet() \ or self.detectFirefoxOSTablet() \ or self.detectUbuntuTablet() \ or self.detectWebOSTablet()
Return detection of any device in the Tablet Tier The quick way to detect for a tier of devices. This method detects for the new generation of HTML 5 capable, larger screen tablets. Includes iPad, Android (e.g., Xoom), BB Playbook, WebOS, etc.
def __get_line_profile_data(self): if self.line_profiler is None: return {} return self.line_profiler.file_dict[self.pyfile.path][0].line_dict
Method to procure line profiles. @return: Line profiles if the file has been profiles else empty dictionary.
def _check_graph(self, graph): if graph.num_vertices != self.size: raise TypeError("The number of vertices in the graph does not " "match the length of the atomic numbers array.") if (self.numbers != graph.numbers).any(): raise TypeError("The atomic numbers in the graph do not match the " "atomic numbers in the molecule.")
the atomic numbers must match
def right(self): return self.source.directory[self.right_sibling_id] \ if self.right_sibling_id != NOSTREAM else None
Entry is right sibling of current directory entry
def mkdir(self, path): if not os.path.exists(path): os.makedirs(path)
create a directory if it does not exist.
def read_config(self, correlation_id, parameters): value = self._read_object(correlation_id, parameters) return ConfigParams.from_value(value)
Reads configuration and parameterize it with given values. :param correlation_id: (optional) transaction id to trace execution through call chain. :param parameters: values to parameters the configuration or null to skip parameterization. :return: ConfigParams configuration.
def tile_bbox(self, tile_indices): (z, x, y) = tile_indices topleft = (x * self.tilesize, (y + 1) * self.tilesize) bottomright = ((x + 1) * self.tilesize, y * self.tilesize) nw = self.unproject_pixels(topleft, z) se = self.unproject_pixels(bottomright, z) return nw + se
Returns the WGS84 bbox of the specified tile
def spacing(self): libfn = utils.get_lib_fn('getSpacing%s'%self._libsuffix) return libfn(self.pointer)
Get image spacing Returns ------- tuple
def upload_package(context): if not context.dry_run and build_distributions(context): upload_args = 'twine upload ' upload_args += ' '.join(Path('dist').files()) if context.pypi: upload_args += ' -r %s' % context.pypi upload_result = shell.dry_run(upload_args, context.dry_run) if not context.dry_run and not upload_result: raise Exception('Error uploading: %s' % upload_result) else: log.info( 'Successfully uploaded %s:%s', context.module_name, context.new_version ) else: log.info('Dry run, skipping package upload')
Uploads your project packages to pypi with twine.
def printdir(self): print "%-46s %19s %12s" % ("File Name", "Modified ", "Size") for zinfo in self.filelist: date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6] print "%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size)
Print a table of contents for the zip file.
def _update_record(self, record_id, name, address, ttl): data = json.dumps({'record': {'name': name, 'content': address, 'ttl': ttl}}) headers = {'Content-Type': 'application/json'} request = self._session.put(self._baseurl + '/%d' % record_id, data=data, headers=headers) if not request.ok: raise RuntimeError('Failed to update record: %s - %s' % (self._format_hostname(name), request.json())) record = request.json() if 'record' not in record or 'id' not in record['record']: raise RuntimeError('Invalid record JSON format: %s - %s' % (self._format_hostname(name), request.json())) return record['record']
Updates an existing record.
def record(self, partition, num_bytes, num_records): self.unrecorded_partitions.remove(partition) self.total_bytes += num_bytes self.total_records += num_records if not self.unrecorded_partitions: self.sensors.bytes_fetched.record(self.total_bytes) self.sensors.records_fetched.record(self.total_records)
After each partition is parsed, we update the current metric totals with the total bytes and number of records parsed. After all partitions have reported, we write the metric.
async def on_raw_317(self, message): target, nickname, idle_time = message.params[:3] info = { 'idle': int(idle_time), } if nickname in self._pending['whois']: self._whois_info[nickname].update(info)
WHOIS idle time.
def groups(self, query=None, exclude=None, maxResults=9999): params = {} groups = [] if query is not None: params['query'] = query if exclude is not None: params['exclude'] = exclude if maxResults is not None: params['maxResults'] = maxResults for group in self._get_json('groups/picker', params=params)['groups']: groups.append(group['name']) return sorted(groups)
Return a list of groups matching the specified criteria. :param query: filter groups by name with this string :type query: Optional[str] :param exclude: filter out groups by name with this string :type exclude: Optional[Any] :param maxResults: maximum results to return. (Default: 9999) :type maxResults: int :rtype: List[str]
def create_todo_item(self, list_id, content, party_id=None, notify=False): path = '/todos/create_item/%u' % list_id req = ET.Element('request') ET.SubElement(req, 'content').text = str(content) if party_id is not None: ET.SubElement(req, 'responsible-party').text = str(party_id) ET.SubElement(req, 'notify').text = str(bool(notify)).lower() return self._request(path, req)
This call lets you add an item to an existing list. The item is added to the bottom of the list. If a person is responsible for the item, give their id as the party_id value. If a company is responsible, prefix their company id with a 'c' and use that as the party_id value. If the item has a person as the responsible party, you can use the notify key to indicate whether an email should be sent to that person to tell them about the assignment.
def _list_keys(self): req = self.request(self.uri + '/keys') keys = req.get().json() if keys: self._keys = {} for key in keys: self._keys[key['id']] = Key(key, self) else: self._keys = {}
Retrieves a list of all added Keys and populates the self._keys dict with Key instances :returns: A list of Keys instances
def _fetch_all_as_dict(self, cursor): desc = cursor.description return [ dict(zip([col[0] for col in desc], row)) for row in cursor.fetchall() ]
Iterates over the result set and converts each row to a dictionary :return: A list of dictionaries where each row is a dictionary :rtype: list of dict
def combine(self, expert_out, multiply_by_gates=True): stitched = common_layers.convert_gradient_to_tensor( tf.concat(expert_out, 0)) if multiply_by_gates: stitched *= tf.expand_dims(self._nonzero_gates, 1) combined = tf.unsorted_segment_sum(stitched, self._batch_index, tf.shape(self._gates)[0]) return combined
Sum together the expert output, weighted by the gates. The slice corresponding to a particular batch element `b` is computed as the sum over all experts `i` of the expert output, weighted by the corresponding gate values. If `multiply_by_gates` is set to False, the gate values are ignored. Args: expert_out: a list of `num_experts` `Tensor`s, each with shape `[expert_batch_size_i, <extra_output_dims>]`. multiply_by_gates: a boolean Returns: a `Tensor` with shape `[batch_size, <extra_output_dims>]`.
def annotate_intervals(target_file, data): out_file = "%s-gcannotated.tsv" % utils.splitext_plus(target_file)[0] if not utils.file_uptodate(out_file, target_file): with file_transaction(data, out_file) as tx_out_file: params = ["-T", "AnnotateIntervals", "-R", dd.get_ref_file(data), "-L", target_file, "--interval-merging-rule", "OVERLAPPING_ONLY", "-O", tx_out_file] _run_with_memory_scaling(params, tx_out_file, data) return out_file
Provide GC annotated intervals for error correction during panels and denoising. TODO: include mappability and segmentation duplication inputs
def make_request_validator(request): verb = request.values.get('verb', '', type=str) resumption_token = request.values.get('resumptionToken', None) schema = Verbs if resumption_token is None else ResumptionVerbs return getattr(schema, verb, OAISchema)(partial=False)
Validate arguments in incomming request.
def _radial_distance(shape): if len(shape) != 2: raise ValueError('shape must have only 2 elements') position = (np.asarray(shape) - 1) / 2. x = np.arange(shape[1]) - position[1] y = np.arange(shape[0]) - position[0] xx, yy = np.meshgrid(x, y) return np.sqrt(xx**2 + yy**2)
Return an array where each value is the Euclidean distance from the array center. Parameters ---------- shape : tuple of int The size of the output array along each axis. Returns ------- result : `~numpy.ndarray` An array containing the Euclidian radial distances from the array center.
async def fetch(self, limit: int = None) -> Sequence[StorageRecord]: LOGGER.debug('StorageRecordSearch.fetch >>> limit: %s', limit) if not self.opened: LOGGER.debug('StorageRecordSearch.fetch <!< Storage record search is closed') raise BadSearch('Storage record search is closed') if not self._wallet.opened: LOGGER.debug('StorageRecordSearch.fetch <!< Wallet %s is closed', self._wallet.name) raise WalletState('Wallet {} is closed'.format(self._wallet.name)) records = json.loads(await non_secrets.fetch_wallet_search_next_records( self._wallet.handle, self.handle, limit or Wallet.DEFAULT_CHUNK))['records'] or [] rv = [StorageRecord(typ=rec['type'], value=rec['value'], tags=rec['tags'], ident=rec['id']) for rec in records] LOGGER.debug('StorageRecordSearch.fetch <<< %s', rv) return rv
Fetch next batch of search results. Raise BadSearch if search is closed, WalletState if wallet is closed. :param limit: maximum number of records to return (default value Wallet.DEFAULT_CHUNK) :return: next batch of records found
def _check_metrics(cls, schema, metrics): for name, value in metrics.items(): metric = schema.get(name) if not metric: message = "Unexpected metric '{}' returned".format(name) raise Exception(message) cls._check_metric(schema, metric, name, value)
Ensure that returned metrics are properly exposed
def purge_queues(self, queues): for name, vhost in queues: vhost = quote(vhost, '') name = quote(name, '') path = Client.urls['purge_queue'] % (vhost, name) self._call(path, 'DELETE') return True
Purge all messages from one or more queues. :param list queues: A list of ('qname', 'vhost') tuples. :returns: True on success
def hash_data(data, hashlen=None, alphabet=None): r if alphabet is None: alphabet = ALPHABET_27 if hashlen is None: hashlen = HASH_LEN2 if isinstance(data, stringlike) and len(data) == 0: text = (alphabet[0] * hashlen) else: hasher = hashlib.sha512() _update_hasher(hasher, data) text = hasher.hexdigest() hashstr2 = convert_hexstr_to_bigbase(text, alphabet, bigbase=len(alphabet)) text = hashstr2[:hashlen] return text
r""" Get a unique hash depending on the state of the data. Args: data (object): any sort of loosely organized data hashlen (None): (default = None) alphabet (None): (default = None) Returns: str: text - hash string CommandLine: python -m utool.util_hash hash_data Example: >>> # ENABLE_DOCTEST >>> from utool.util_hash import * # NOQA >>> import utool as ut >>> counter = [0] >>> failed = [] >>> def check_hash(input_, want=None): >>> count = counter[0] = counter[0] + 1 >>> got = ut.hash_data(input_) >>> print('({}) {}'.format(count, got)) >>> if want is not None and not got.startswith(want): >>> failed.append((got, input_, count, want)) >>> check_hash('1', 'wuvrng') >>> check_hash(['1'], 'dekbfpby') >>> check_hash(tuple(['1']), 'dekbfpby') >>> check_hash(b'12', 'marreflbv') >>> check_hash([b'1', b'2'], 'nwfs') >>> check_hash(['1', '2', '3'], 'arfrp') >>> check_hash(['1', np.array([1,2,3]), '3'], 'uyqwcq') >>> check_hash('123', 'ehkgxk') >>> check_hash(zip([1, 2, 3], [4, 5, 6]), 'mjcpwa') >>> import numpy as np >>> rng = np.random.RandomState(0) >>> check_hash(rng.rand(100000), 'bdwosuey') >>> for got, input_, count, want in failed: >>> print('failed {} on {}'.format(count, input_)) >>> print('got={}, want={}'.format(got, want)) >>> assert not failed
async def user_info(self, params=None, **kwargs): params = params or {} params[ 'fields'] = 'id,email,first_name,last_name,name,link,locale,' \ 'gender,location' return await super(FacebookClient, self).user_info(params=params, **kwargs)
Facebook required fields-param.
def load_sources(self, sources): self.clear() for s in sources: if isinstance(s, dict): s = Model.create_from_dict(s) self.load_source(s, build_index=False) self._build_src_index()
Delete all sources in the ROI and load the input source list.
def delete_nic(self, instance_id, port_id): self.client.servers.interface_detach(instance_id, port_id) return True
Delete a Network Interface Controller
def issuperset(self, items): return all(_compat.map(self._seen.__contains__, items))
Return whether this collection contains all items. >>> Unique(['spam', 'eggs']).issuperset(['spam', 'spam', 'spam']) True
def where_before_entry(query, ref): return orm.select( e for e in query if e.local_date < ref.local_date or (e.local_date == ref.local_date and e.id < ref.id) )
Generate a where clause for prior entries ref -- The entry of reference
def Describe(self): result = ["\nUsername: %s" % self.urn.Basename()] labels = [l.name for l in self.GetLabels()] result.append("Labels: %s" % ",".join(labels)) if self.Get(self.Schema.PASSWORD) is None: result.append("Password: not set") else: result.append("Password: set") return "\n".join(result)
Return a description of this user.
def _uninstall_signal_handlers(self): signal.signal(signal.SIGINT, signal.SIG_DFL) signal.signal(signal.SIGTERM, signal.SIG_DFL)
Restores default signal handlers.
def _insert_row(self, i, index): if i == len(self._index): self._add_row(index) else: self._index.insert(i, index) for c in range(len(self._columns)): self._data[c].insert(i, None)
Insert a new row in the DataFrame. :param i: index location to insert :param index: index value to insert into the index list :return: nothing
def _parse_subnet(self, subnet_dict): if not subnet_dict: return alloc_pool = subnet_dict.get('allocation_pools') cidr = subnet_dict.get('cidr') subnet = cidr.split('/')[0] start = alloc_pool[0].get('start') end = alloc_pool[0].get('end') gateway = subnet_dict.get('gateway_ip') sec_gateway = subnet_dict.get('secondary_gw') return {'subnet': subnet, 'start': start, 'end': end, 'gateway': gateway, 'sec_gateway': sec_gateway}
Return the subnet, start, end, gateway of a subnet.
def relation_factory(relation_name): role, interface = hookenv.relation_to_role_and_interface(relation_name) if not (role and interface): hookenv.log('Unable to determine role and interface for relation ' '{}'.format(relation_name), hookenv.ERROR) return None return _find_relation_factory(_relation_module(role, interface))
Get the RelationFactory for the given relation name. Looks for a RelationFactory in the first file matching: ``$CHARM_DIR/hooks/relations/{interface}/{provides,requires,peer}.py``
def mapValues(self, f): map_values_fn = lambda kv: (kv[0], f(kv[1])) return self.map(map_values_fn, preservesPartitioning=True)
Pass each value in the key-value pair RDD through a map function without changing the keys; this also retains the original RDD's partitioning. >>> x = sc.parallelize([("a", ["apple", "banana", "lemon"]), ("b", ["grapes"])]) >>> def f(x): return len(x) >>> x.mapValues(f).collect() [('a', 3), ('b', 1)]
def get_linked_metadata(obj, name=None, context=None, site=None, language=None): Metadata = _get_metadata_model(name) InstanceMetadata = Metadata._meta.get_model('modelinstance') ModelMetadata = Metadata._meta.get_model('model') content_type = ContentType.objects.get_for_model(obj) instances = [] if InstanceMetadata is not None: try: instance_md = InstanceMetadata.objects.get(_content_type=content_type, _object_id=obj.pk) except InstanceMetadata.DoesNotExist: instance_md = InstanceMetadata(_content_object=obj) instances.append(instance_md) if ModelMetadata is not None: try: model_md = ModelMetadata.objects.get(_content_type=content_type) except ModelMetadata.DoesNotExist: model_md = ModelMetadata(_content_type=content_type) instances.append(model_md) return FormattedMetadata(Metadata, instances, '', site, language)
Gets metadata linked from the given object.
def FilterFnTable(fn_table, symbol): new_table = list() for entry in fn_table: if entry[0] != symbol: new_table.append(entry) return new_table
Remove a specific symbol from a fn_table.
def map_legend_attributes(self): LOGGER.debug('InaSAFE Map getMapLegendAttributes called') legend_attribute_list = [ 'legend_notes', 'legend_units', 'legend_title'] legend_attribute_dict = {} for legend_attribute in legend_attribute_list: try: legend_attribute_dict[legend_attribute] = \ self._keyword_io.read_keywords( self.impact, legend_attribute) except KeywordNotFoundError: pass except Exception: pass return legend_attribute_dict
Get the map legend attribute from the layer keywords if possible. :returns: None on error, otherwise the attributes (notes and units). :rtype: None, str
def write_to_path(self, path=None): if path is None: path = self.path f = GitFile(path, 'wb') try: self.write_to_file(f) finally: f.close()
Write configuration to a file on disk.
def get_languages(self): languages = ['python'] all_options = CONF.options(self.CONF_SECTION) for option in all_options: if option in [l.lower() for l in LSP_LANGUAGES]: languages.append(option) return languages
Get the list of languages we need to start servers and create clients for.
def make_error_redirect(self, authorization_error=None): if not self.redirect_uri: return HttpResponseRedirect(self.missing_redirect_uri) authorization_error = (authorization_error or AccessDenied('user denied the request')) response_params = get_error_details(authorization_error) if self.state is not None: response_params['state'] = self.state return HttpResponseRedirect( update_parameters(self.redirect_uri, response_params))
Return a Django ``HttpResponseRedirect`` describing the request failure. If the :py:meth:`validate` method raises an error, the authorization endpoint should return the result of calling this method like so: >>> auth_code_generator = ( >>> AuthorizationCodeGenerator('/oauth2/missing_redirect_uri/')) >>> try: >>> auth_code_generator.validate(request) >>> except AuthorizationError as authorization_error: >>> return auth_code_generator.make_error_redirect(authorization_error) If there is no known Client ``redirect_uri`` (because it is malformed, or the Client is invalid, or if the supplied ``redirect_uri`` does not match the regsitered value, or some other request failure) then the response will redirect to the ``missing_redirect_uri`` passed to the :py:meth:`__init__` method. Also used to signify user denial; call this method without passing in the optional ``authorization_error`` argument to return a generic :py:class:`AccessDenied` message. >>> if not user_accepted_request: >>> return auth_code_generator.make_error_redirect()
def get_image_uri(region_name, repo_name, repo_version=1): repo = '{}:{}'.format(repo_name, repo_version) return '{}/{}'.format(registry(region_name, repo_name), repo)
Return algorithm image URI for the given AWS region, repository name, and repository version
def _pick_lead_item(items): paired = vcfutils.get_paired(items) if paired: return paired.tumor_data else: return list(items)[0]
Choose lead item for a set of samples. Picks tumors for tumor/normal pairs and first sample for batch groups.
def get_hierarchy_form(self, *args, **kwargs): if isinstance(args[-1], list) or 'hierarchy_record_types' in kwargs: return self.get_hierarchy_form_for_create(*args, **kwargs) else: return self.get_hierarchy_form_for_update(*args, **kwargs)
Pass through to provider HierarchyAdminSession.get_hierarchy_form_for_update
def cache_set(key, value, timeout=None, refreshed=False): if timeout is None: timeout = settings.CACHE_MIDDLEWARE_SECONDS refresh_time = timeout + time() real_timeout = timeout + settings.CACHE_SET_DELAY_SECONDS packed = (value, refresh_time, refreshed) return cache.set(_hashed_key(key), packed, real_timeout)
Wrapper for ``cache.set``. Stores the cache entry packed with the desired cache expiry time. When the entry is retrieved from cache, the packed expiry time is also checked, and if past, the stale cache entry is stored again with an expiry that has ``CACHE_SET_DELAY_SECONDS`` added to it. In this case the entry is not returned, so that a cache miss occurs and the entry should be set by the caller, but all other callers will still get the stale entry, so no real cache misses ever occur.
def columns_formatter(cls, colname): def wrapper(func): cls.columns_formatters[colname] = func return func return wrapper
Decorator to mark a function as columns formatter.
def vector(self) -> typing.Tuple[typing.Tuple[float, float], typing.Tuple[float, float]]: ...
Return the vector property in relative coordinates. Vector will be a tuple of tuples ((y_start, x_start), (y_end, x_end)).
def inject_settings(mixed: Union[str, Settings], context: MutableMapping[str, Any], fail_silently: bool = False) -> None: if isinstance(mixed, str): try: mixed = import_module(mixed) except Exception: if fail_silently: return raise for key, value in iter_settings(mixed): context[key] = value
Inject settings values to given context. :param mixed: Settings can be a string (that it will be read from Python path), Python module or dict-like instance. :param context: Context to assign settings key values. It should support dict-like item assingment. :param fail_silently: When enabled and reading settings from Python path ignore errors if given Python path couldn't be loaded.
def validate_signed_elements(self, signed_elements): if len(signed_elements) > 2: return False response_tag = '{%s}Response' % OneLogin_Saml2_Constants.NS_SAMLP assertion_tag = '{%s}Assertion' % OneLogin_Saml2_Constants.NS_SAML if (response_tag in signed_elements and signed_elements.count(response_tag) > 1) or \ (assertion_tag in signed_elements and signed_elements.count(assertion_tag) > 1) or \ (response_tag not in signed_elements and assertion_tag not in signed_elements): return False if response_tag in signed_elements: expected_signature_nodes = OneLogin_Saml2_Utils.query(self.document, OneLogin_Saml2_Utils.RESPONSE_SIGNATURE_XPATH) if len(expected_signature_nodes) != 1: raise OneLogin_Saml2_ValidationError( 'Unexpected number of Response signatures found. SAML Response rejected.', OneLogin_Saml2_ValidationError.WRONG_NUMBER_OF_SIGNATURES_IN_RESPONSE ) if assertion_tag in signed_elements: expected_signature_nodes = self.__query(OneLogin_Saml2_Utils.ASSERTION_SIGNATURE_XPATH) if len(expected_signature_nodes) != 1: raise OneLogin_Saml2_ValidationError( 'Unexpected number of Assertion signatures found. SAML Response rejected.', OneLogin_Saml2_ValidationError.WRONG_NUMBER_OF_SIGNATURES_IN_ASSERTION ) return True
Verifies that the document has the expected signed nodes. :param signed_elements: The signed elements to be checked :type signed_elements: list :param raise_exceptions: Whether to return false on failure or raise an exception :type raise_exceptions: Boolean
def print_object_attributes( thing, heading=None, file=None ): if heading : print( '==', heading, '==', file=file ) print( '\n'.join( object_attributes( thing ) ), file=file )
Print the attribute names in thing vertically
def execute(self, limit='default', params=None, **kwargs): from ibis.client import execute return execute(self, limit=limit, params=params, **kwargs)
If this expression is based on physical tables in a database backend, execute it against that backend. Parameters ---------- limit : integer or None, default 'default' Pass an integer to effect a specific row limit. limit=None means "no limit". The default is whatever is in ibis.options. Returns ------- result : expression-dependent Result of compiling expression and executing in backend
def call(args, stdout=PIPE, stderr=PIPE): p = Popen(args, stdout=stdout, stderr=stderr) out, err = p.communicate() try: return out.decode(sys.stdout.encoding), err.decode(sys.stdout.encoding) except Exception: return out, err
Calls the given arguments in a seperate process and returns the contents of standard out.
def computePCs(plink_path,k,bfile,ffile): try: output = subprocess.check_output('%s --version --noweb'%plink_path,shell=True) use_plink = float(output.split(' ')[1][1:-3])>=1.9 except: use_plink = False assert bfile!=None, 'Path to bed-file is missing.' assert os.path.exists(bfile+'.bed'), '%s.bed is missing.'%bfile assert os.path.exists(bfile+'.bim'), '%s.bim is missing.'%bfile assert os.path.exists(bfile+'.fam'), '%s.fam is missing.'%bfile out_dir = os.path.split(ffile)[0] if out_dir!='' and (not os.path.exists(out_dir)): os.makedirs(out_dir) if use_plink: computePCsPlink(plink_path,k,out_dir,bfile,ffile) else: computePCsPython(out_dir,k,bfile,ffile)
compute the first k principal components Input: k : number of principal components plink_path : plink path bfile : binary bed file (bfile.bed, bfile.bim and bfile.fam are required) ffile : name of output file
def rescan_file(self, filename, sha256hash, apikey): url = self.base_url + "file/rescan" params = { 'apikey': apikey, 'resource': sha256hash } rate_limit_clear = self.rate_limit() if rate_limit_clear: response = requests.post(url, params=params) if response.status_code == self.HTTP_OK: self.logger.info("sent: %s, HTTP: %d, content: %s", os.path.basename(filename), response.status_code, response.text) elif response.status_code == self.HTTP_RATE_EXCEEDED: time.sleep(20) else: self.logger.error("sent: %s, HTTP: %d", os.path.basename(filename), response.status_code) return response
just send the hash, check the date
def create_logger(log_file, name='logger', cmd=True): import logging logger = logging.getLogger(name) logger.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s | %(name)s | %(levelname)s | %(message)s', datefmt='%Y-%m-%d %H:%M:%S') fh = logging.FileHandler(log_file) fh.setLevel(logging.DEBUG) fh.setFormatter(formatter) logger.addHandler(fh) if cmd: ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) ch.setFormatter(formatter) logger.addHandler(ch) return logger
define a logger for your program parameters ------------ log_file file name of log name name of logger example ------------ logger = create_logger('example.log',name='logger',) logger.info('This is an example!') logger.warning('This is a warn!')
def greedy_max_inden_setcover(candidate_sets_dict, items, max_covers=None): uncovered_set = set(items) rejected_keys = set() accepted_keys = set() covered_items_list = [] while True: if max_covers is not None and len(covered_items_list) >= max_covers: break maxkey = None maxlen = -1 for key, candidate_items in six.iteritems(candidate_sets_dict): if key in rejected_keys or key in accepted_keys: continue lenval = len(candidate_items) if uncovered_set.issuperset(candidate_items): if lenval > maxlen: maxkey = key maxlen = lenval else: rejected_keys.add(key) if maxkey is None: break maxval = candidate_sets_dict[maxkey] accepted_keys.add(maxkey) covered_items_list.append(list(maxval)) uncovered_set.difference_update(maxval) uncovered_items = list(uncovered_set) covertup = uncovered_items, covered_items_list, accepted_keys return covertup
greedy algorithm for maximum independent set cover Covers items with sets from candidate sets. Could be made faster. CommandLine: python -m utool.util_alg --test-greedy_max_inden_setcover Example0: >>> # ENABLE_DOCTEST >>> from utool.util_alg import * # NOQA >>> import utool as ut >>> candidate_sets_dict = {'a': [5, 3], 'b': [2, 3, 5], ... 'c': [4, 8], 'd': [7, 6, 2, 1]} >>> items = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] >>> max_covers = None >>> tup = greedy_max_inden_setcover(candidate_sets_dict, items, max_covers) >>> (uncovered_items, covered_items_list, accepted_keys) = tup >>> result = ut.repr4((uncovered_items, sorted(list(accepted_keys))), nl=False) >>> print(result) ([0, 9], ['a', 'c', 'd']) Example1: >>> # ENABLE_DOCTEST >>> from utool.util_alg import * # NOQA >>> import utool as ut >>> candidate_sets_dict = {'a': [5, 3], 'b': [2, 3, 5], ... 'c': [4, 8], 'd': [7, 6, 2, 1]} >>> items = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] >>> max_covers = 1 >>> tup = greedy_max_inden_setcover(candidate_sets_dict, items, max_covers) >>> (uncovered_items, covered_items_list, accepted_keys) = tup >>> result = ut.repr4((uncovered_items, sorted(list(accepted_keys))), nl=False) >>> print(result) ([0, 3, 4, 5, 8, 9], ['d'])
def update_state_machine_tab_label(self, state_machine_m): sm_id = state_machine_m.state_machine.state_machine_id if sm_id in self.tabs: sm = state_machine_m.state_machine if not self.tabs[sm_id]['marked_dirty'] == sm.marked_dirty or \ not self.tabs[sm_id]['file_system_path'] == sm.file_system_path or \ not self.tabs[sm_id]['root_state_name'] == sm.root_state.name: label = self.view["notebook"].get_tab_label(self.tabs[sm_id]["page"]).get_child().get_children()[0] set_tab_label_texts(label, state_machine_m, unsaved_changes=sm.marked_dirty) self.tabs[sm_id]['file_system_path'] = sm.file_system_path self.tabs[sm_id]['marked_dirty'] = sm.marked_dirty self.tabs[sm_id]['root_state_name'] = sm.root_state.name else: logger.warning("State machine '{0}' tab label can not be updated there is no tab.".format(sm_id))
Updates tab label if needed because system path, root state name or marked_dirty flag changed :param StateMachineModel state_machine_m: State machine model that has changed :return:
def focus_down(pymux): " Move focus down. " _move_focus(pymux, lambda wp: wp.xpos, lambda wp: wp.ypos + wp.height + 2)
Move focus down.
def widget_from_single_value(o): if isinstance(o, string_types): return Text(value=unicode_type(o)) elif isinstance(o, bool): return Checkbox(value=o) elif isinstance(o, Integral): min, max, value = _get_min_max_value(None, None, o) return IntSlider(value=o, min=min, max=max) elif isinstance(o, Real): min, max, value = _get_min_max_value(None, None, o) return FloatSlider(value=o, min=min, max=max) else: return None
Make widgets from single values, which can be used as parameter defaults.
def churn(self): canceled = self.canceled().count() active = self.active().count() return decimal.Decimal(str(canceled)) / decimal.Decimal(str(active))
Return number of canceled Subscriptions divided by active Subscriptions.
def get_group_for_col(self, table_name, col_name): df = self.dm[table_name] try: group_name = df.loc[col_name, 'group'] except KeyError: return '' return group_name
Check data model to find group name for a given column header Parameters ---------- table_name: str col_name: str Returns --------- group_name: str
def _set_hyperparameters(self, parameters): for name, value in parameters.iteritems(): try: getattr(self, name) except AttributeError: raise ValueError( 'Each parameter in parameters must be an attribute. ' '{} is not.'.format(name)) setattr(self, name, value)
Set internal optimization parameters.
def create_backend(self, service_id, version_number, name, address, use_ssl=False, port=80, connect_timeout=1000, first_byte_timeout=15000, between_bytes_timeout=10000, error_threshold=0, max_conn=20, weight=100, auto_loadbalance=False, shield=None, request_condition=None, healthcheck=None, comment=None): body = self._formdata({ "name": name, "address": address, "use_ssl": use_ssl, "port": port, "connect_timeout": connect_timeout, "first_byte_timeout": first_byte_timeout, "between_bytes_timeout": between_bytes_timeout, "error_threshold": error_threshold, "max_conn": max_conn, "weight": weight, "auto_loadbalance": auto_loadbalance, "shield": shield, "request_condition": request_condition, "healthcheck": healthcheck, "comment": comment, }, FastlyBackend.FIELDS) content = self._fetch("/service/%s/version/%d/backend" % (service_id, version_number), method="POST", body=body) return FastlyBackend(self, content)
Create a backend for a particular service and version.
def prox_soft_plus(X, step, thresh=0): return prox_plus(prox_soft(X, step, thresh=thresh), step)
Soft thresholding with projection onto non-negative numbers
def exclude_chars(text, exclusion=None): exclusion = [] if exclusion is None else exclusion regexp = r"|".join([select_regexp_char(x) for x in exclusion]) or r'' return re.sub(regexp, '', text)
Clean text string of simbols in exclusion list.
def handle_unset_command(self, line: str, position: int, tokens: ParseResults) -> ParseResults: key = tokens['key'] self.validate_unset_command(line, position, key) del self.annotations[key] return tokens
Handle an ``UNSET X`` statement or raises an exception if it is not already set. :raises: MissingAnnotationKeyWarning
def _fetch(self): if 'uri' in self._meta_data: error = "There was an attempt to assign a new uri to this "\ "resource, the _meta_data['uri'] is %s and it should"\ " not be changed." % (self._meta_data['uri']) raise URICreationCollision(error) _create_uri = self._meta_data['container']._meta_data['uri'] session = self._meta_data['bigip']._meta_data['icr_session'] response = session.post(_create_uri, json={}) return self._produce_instance(response)
wrapped by `fetch` override that in subclasses to customize
def paint_agent_trail(self, y, x, val): for j in range(1,self.cell_height-1): for i in range(1,self.cell_width-1): self.img.put(self.agent_color(val), (x*self.cell_width+i, y*self.cell_height+j))
paint an agent trail as ONE pixel to allow for multiple agent trails to be seen in the same cell
def standardize_input_data(data): if type(data) == bytes: data = data.decode('utf-8') if type(data) == list: data = [ el.decode('utf-8') if type(data) == bytes else el for el in data ] return data
Ensure utf-8 encoded strings are passed to the indico API
def is_successful(self, retry=False): if not self.is_terminated(retry=retry): return False retry_num = options.retry_times while retry_num > 0: try: statuses = self.get_task_statuses() return all(task.status == Instance.Task.TaskStatus.SUCCESS for task in statuses.values()) except (errors.InternalServerError, errors.RequestTimeTooSkewed): retry_num -= 1 if not retry or retry_num <= 0: raise
If the instance runs successfully. :return: True if successful else False :rtype: bool
def load(fp, class_=None, **kwargs): return loado(json.load(fp, **kwargs), class_=class_)
Convert content in a JSON-encoded text file to a Physical Information Object or a list of such objects. :param fp: File-like object supporting .read() method to deserialize from. :param class_: Subclass of :class:`.Pio` to produce, if not unambiguous :param kwargs: Any options available to json.load(). :return: Single object derived from :class:`.Pio` or a list of such object.
def _related_field_data(field, obj): data = _basic_field_data(field, obj) relation_info = { Field.REL_DB_TABLE: field.rel.to._meta.db_table, Field.REL_APP: field.rel.to._meta.app_label, Field.REL_MODEL: field.rel.to.__name__ } data[Field.TYPE] = FieldType.REL data[Field.REL] = relation_info return data
Returns relation ``field`` as a dict. Dict contains related pk info and some meta information for reconstructing objects.
def make_ioc(name=None, description='Automatically generated IOC', author='IOC_api', links=None, keywords=None, iocid=None): root = ioc_et.make_ioc_root(iocid) root.append(ioc_et.make_metadata_node(name, description, author, links, keywords)) metadata_node = root.find('metadata') top_level_indicator = make_indicator_node('OR') parameters_node = (ioc_et.make_parameters_node()) root.append(ioc_et.make_criteria_node(top_level_indicator)) root.append(parameters_node) ioc_et.set_root_lastmodified(root) return root, metadata_node, top_level_indicator, parameters_node
This generates all parts of an IOC, but without any definition. This is a helper function used by __init__. :param name: string, Name of the ioc :param description: string, description of the ioc :param author: string, author name/email address :param links: ist of tuples. Each tuple should be in the form (rel, href, value). :param keywords: string. This is normally a space delimited string of values that may be used as keywords :param iocid: GUID for the IOC. This should not be specified under normal circumstances. :return: a tuple containing three elementTree Element objects The first element, the root, contains the entire IOC itself. The second element, the top level OR indicator, allows the user to add additional IndicatorItem or Indicator nodes to the IOC easily. The third element, the parameters node, allows the user to quickly parse the parameters.
def escape_unicode_string(u): def replacer(matchobj): if ord(matchobj.group(1)) == 127: return "\\x7f" if ord(matchobj.group(1)) == 92: return "\\\\" return REPLACEMENT_TABLE[ord(matchobj.group(1))] return re.sub("([\\000-\\037\\134\\177])", replacer, u)
Escapes the nonprintable chars 0-31 and 127, and backslash; preferably with a friendly equivalent such as '\n' if available, but otherwise with a Python-style backslashed hex escape.
def escape_shell_arg(shell_arg): if isinstance(shell_arg, six.text_type): msg = "ERROR: escape_shell_arg() expected string argument but " \ "got '%s' of type '%s'." % (repr(shell_arg), type(shell_arg)) raise TypeError(msg) return "'%s'" % shell_arg.replace("'", r"'\''")
Escape shell argument shell_arg by placing it within single-quotes. Any single quotes found within the shell argument string will be escaped. @param shell_arg: The shell argument to be escaped. @type shell_arg: string @return: The single-quote-escaped value of the shell argument. @rtype: string @raise TypeError: if shell_arg is not a string. @see: U{http://mail.python.org/pipermail/python-list/2005-October/346957.html}
def get_customjs(self, references, plot_id=None): if plot_id is None: plot_id = self.plot.id or 'PLACEHOLDER_PLOT_ID' self_callback = self.js_callback.format(comm_id=self.comm.id, timeout=self.timeout, debounce=self.debounce, plot_id=plot_id) attributes = self.attributes_js(self.attributes) conditions = ["%s" % cond for cond in self.skip] conditional = '' if conditions: conditional = 'if (%s) { return };\n' % (' || '.join(conditions)) data = "var data = {};\n" code = conditional + data + attributes + self.code + self_callback return CustomJS(args=references, code=code)
Creates a CustomJS callback that will send the requested attributes back to python.
def merge_urls_data_to(to, food={}): if not to: to.update(food) for url, data in food.items(): if url not in to: to[url] = data else: to[url] = to[url].merge_with(data)
Merge urls data
def __make_footprint(input, size, footprint): "Creates a standard footprint element ala scipy.ndimage." if footprint is None: if size is None: raise RuntimeError("no footprint or filter size provided") sizes = _ni_support._normalize_sequence(size, input.ndim) footprint = numpy.ones(sizes, dtype=bool) else: footprint = numpy.asarray(footprint, dtype=bool) return footprint
Creates a standard footprint element ala scipy.ndimage.
def next_img(self, loop=True): channel = self.get_current_channel() if channel is None: self.show_error("Please create a channel.", raisetab=True) return channel.next_image() return True
Go to the next image in the channel.
def _onWhat(self, name, line, pos, absPosition): self.__lastImport.what.append(ImportWhat(name, line, pos, absPosition))
Memorizes an imported item
def make_fasta_url( ensembl_release, species, sequence_type, server=ENSEMBL_FTP_SERVER): ensembl_release, species, reference_name = normalize_release_properties( ensembl_release, species) subdir = _species_subdir( ensembl_release, species=species, filetype="fasta", server=server) server_subdir = urllib_parse.urljoin(server, subdir) server_sequence_subdir = join(server_subdir, sequence_type) filename = make_fasta_filename( ensembl_release=ensembl_release, species=species, sequence_type=sequence_type) return join(server_sequence_subdir, filename)
Construct URL to FASTA file with cDNA transcript or protein sequences Parameter examples: ensembl_release = 75 species = "Homo_sapiens" sequence_type = "cdna" (other option: "pep")
def delete_thing_shadow(self, **kwargs): r thing_name = self._get_required_parameter('thingName', **kwargs) payload = b'' return self._shadow_op('delete', thing_name, payload)
r""" Deletes the thing shadow for the specified thing. :Keyword Arguments: * *thingName* (``string``) -- [REQUIRED] The name of the thing. :returns: (``dict``) -- The output from the DeleteThingShadow operation * *payload* (``bytes``) -- The state information, in JSON format.
def national(self): if self._national is None: self._national = NationalList( self._version, account_sid=self._solution['account_sid'], country_code=self._solution['country_code'], ) return self._national
Access the national :returns: twilio.rest.api.v2010.account.available_phone_number.national.NationalList :rtype: twilio.rest.api.v2010.account.available_phone_number.national.NationalList
def client_getname(self, encoding=_NOTSET): return self.execute(b'CLIENT', b'GETNAME', encoding=encoding)
Get the current connection name.
def contributions(self, request, **kwargs): if Contribution not in get_models(): return Response([]) if request.method == "POST": serializer = ContributionSerializer(data=get_request_data(request), many=True) if not serializer.is_valid(): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) serializer.save() return Response(serializer.data) else: content_pk = kwargs.get('pk', None) if content_pk is None: return Response([], status=status.HTTP_404_NOT_FOUND) queryset = Contribution.search_objects.search().filter( es_filter.Term(**{'content.id': content_pk}) ) serializer = ContributionSerializer(queryset[:queryset.count()].sort('id'), many=True) return Response(serializer.data)
gets or adds contributions :param request: a WSGI request object :param kwargs: keyword arguments (optional) :return: `rest_framework.response.Response`
def update(self): if not self._track_changes: return True data = self.to_api_data(restrict_keys=self._track_changes) response = self.session.patch(self.build_url(''), data=data) if not response: return False data = response.json() for field in self._track_changes: setattr(self, snakecase(field), data.get(field)) self._track_changes.clear() return True
Update this range
def _ParseRecordExtraField(self, byte_stream, file_offset): extra_field_map = self._GetDataTypeMap('asl_record_extra_field') try: record_extra_field = self._ReadStructureFromByteStream( byte_stream, file_offset, extra_field_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError(( 'Unable to parse record extra field at offset: 0x{0:08x} with error: ' '{1!s}').format(file_offset, exception)) return record_extra_field
Parses a record extra field. Args: byte_stream (bytes): byte stream. file_offset (int): offset of the record extra field relative to the start of the file. Returns: asl_record_extra_field: record extra field. Raises: ParseError: if the record extra field cannot be parsed.
def submodules(self): p = lambda o: isinstance(o, Module) and self._docfilter(o) return sorted(filter(p, self.doc.values()))
Returns all documented sub-modules in the module sorted alphabetically as a list of `pydoc.Module`.
def inet_to_str(inet): try: return socket.inet_ntop(socket.AF_INET, inet) except ValueError: return socket.inet_ntop(socket.AF_INET6, inet)
Convert inet object to a string Args: inet (inet struct): inet network address Returns: str: Printable/readable IP address
def install_dependencies(self): if self._skip_virtualenv: LOG.info('Skip Virtualenv set ... nothing to do') return has_reqs = _isfile(self._requirements_file) or self._requirements if self._virtualenv is None and has_reqs: LOG.info('Building new virtualenv and installing requirements') self._build_new_virtualenv() self._install_requirements() elif self._virtualenv is None and not has_reqs: LOG.info('No requirements found, so no virtualenv will be made') self._pkg_venv = False else: raise Exception('Cannot determine what to do about virtualenv')
Creates a virtualenv and installs requirements
def _master_control_program(self): return mcp.MasterControlProgram(self.config, consumer=self.args.consumer, profile=self.args.profile, quantity=self.args.quantity)
Return an instance of the MasterControlProgram. :rtype: rejected.mcp.MasterControlProgram
def create_switch(type, settings, pin): switch = None if type == "A": group, device = settings.split(",") switch = pi_switch.RCSwitchA(group, device) elif type == "B": addr, channel = settings.split(",") addr = int(addr) channel = int(channel) switch = pi_switch.RCSwitchB(addr, channel) elif type == "C": family, group, device = settings.split(",") group = int(group) device = int(device) switch = pi_switch.RCSwitchC(family, group, device) elif type == "D": group, device = settings.split(",") device = int(device) switch = pi_switch.RCSwitchD(group, device) else: print "Type %s is not supported!" % type sys.exit() switch.enableTransmit(pin) return switch
Create a switch. Args: type: (str): type of the switch [A,B,C,D] settings (str): a comma separted list pin (int): wiringPi pin Returns: switch
def on_import1(self, event): pmag_menu_dialogs.MoveFileIntoWD(self.parent, self.parent.WD)
initialize window to import an arbitrary file into the working directory
def init_all_receivers(): receivers = discover() init_receivers = [] for receiver in receivers: init_receiver = DenonAVR(receiver["host"]) init_receivers.append(init_receiver) return init_receivers
Initialize all discovered Denon AVR receivers in LAN zone. Returns a list of created Denon AVR instances. By default SSDP broadcasts are sent up to 3 times with a 2 seconds timeout.
def from_argparse(cls, opts): return cls(opts.ethinca_pn_order, opts.filter_cutoff, opts.ethinca_frequency_step, fLow=None, full_ethinca=opts.calculate_ethinca_metric, time_ethinca=opts.calculate_time_metric_components)
Initialize an instance of the ethincaParameters class from an argparse.OptionParser instance. This assumes that insert_ethinca_metric_options and verify_ethinca_metric_options have already been called before initializing the class.
def _by_columns(self, columns): return columns if self.isstr(columns) else self._backtick_columns(columns)
Allow select.group and select.order accepting string and list
def _operator_norms(L): L_norms = [] for Li in L: if np.isscalar(Li): L_norms.append(float(Li)) elif isinstance(Li, Operator): L_norms.append(Li.norm(estimate=True)) else: raise TypeError('invalid entry {!r} in `L`'.format(Li)) return L_norms
Get operator norms if needed. Parameters ---------- L : sequence of `Operator` or float The operators or the norms of the operators that are used in the `douglas_rachford_pd` method. For `Operator` entries, the norm is computed with ``Operator.norm(estimate=True)``.
def _first_word_not_cmd(self, first_word: str, command: str, args: tuple, kwargs: dict) -> None: if self.service_interface.is_service(first_word): self._logger.debug(' first word is a service') kwargs = self.service_interface.get_metadata(first_word, kwargs) self._logger.debug(' service transform kwargs: %s', kwargs) elif self.author_interface.is_author(first_word): self._logger.debug(' first word is an author') kwargs = self.author_interface.get_metadata(first_word, kwargs) self._logger.debug(' author transform kwargs: %s', kwargs) if not kwargs.get('remote'): kwargs['remote_command'] = command command= 'REMOTE' self.messaging.send_command(command, *args, **kwargs) return else: self.messaging.send_command(command, *args, **kwargs)
check to see if this is an author or service. This method does high level control handling
def run(cmd_str,cwd='.',verbose=False): warnings.warn("run() has moved to pyemu.os_utils",PyemuWarning) pyemu.os_utils.run(cmd_str=cmd_str,cwd=cwd,verbose=verbose)
an OS agnostic function to execute command Parameters ---------- cmd_str : str the str to execute with os.system() cwd : str the directory to execute the command in verbose : bool flag to echo to stdout complete cmd str Note ---- uses platform to detect OS and adds .exe or ./ as appropriate for Windows, if os.system returns non-zero, raises exception Example ------- ``>>>import pyemu`` ``>>>pyemu.helpers.run("pestpp pest.pst")``