content
stringlengths
22
815k
id
int64
0
4.91M
def gather_parent_cnvs(vcf, fa, mo): """ Create BEDTools corresponding to parent CNVs for converage-based inheritance """ cnv_format = '{0}\t{1}\t{2}\t{3}\t{4}\n' fa_cnvs = '' mo_cnvs = '' for record in vcf: # Do not include variants from sex chromosomes if record.chrom in sex_chroms: continue # Process biallelic CNVs if record.info['SVTYPE'] in 'DEL DUP'.split() \ and 'MULTIALLELIC' not in record.filter: # Father fa_ac = get_AC(get_GT(record, fa)) if fa_ac != 'NA': if int(fa_ac) > 0: new_cnv = cnv_format.format(record.chrom, str(record.pos), str(record.stop), record.info['SVTYPE'], fa_ac) fa_cnvs = fa_cnvs + new_cnv # Mother mo_ac = get_AC(get_GT(record, mo)) if mo_ac != 'NA': if int(mo_ac) > 0: new_cnv = cnv_format.format(record.chrom, str(record.pos), str(record.stop), record.info['SVTYPE'], mo_ac) mo_cnvs = mo_cnvs + new_cnv # Process multiallelic CNVs if record.info['SVTYPE'] == 'MCNV' and 'MULTIALLELIC' in record.filter: # Father fa_ac = get_GT(record, fa).split('/')[1] if fa_ac != 'None': fa_ac = int(fa_ac) if fa_ac < 2: new_cnv = cnv_format.format(record.chrom, str(record.pos), str(record.stop), 'DEL', str(2 - fa_ac)) fa_cnvs = fa_cnvs + new_cnv elif fa_ac > 2: new_cnv = cnv_format.format(record.chrom, str(record.pos), str(record.stop), 'DUP', str(fa_ac - 2)) fa_cnvs = fa_cnvs + new_cnv # Mother mo_ac = get_GT(record, mo).split('/')[1] if mo_ac != 'None': mo_ac = int(mo_ac) if mo_ac < 2: new_cnv = cnv_format.format(record.chrom, str(record.pos), str(record.stop), 'DEL', str(2 - mo_ac)) mo_cnvs = mo_cnvs + new_cnv elif mo_ac > 2: new_cnv = cnv_format.format(record.chrom, str(record.pos), str(record.stop), 'DUP', str(mo_ac - 2)) mo_cnvs = mo_cnvs + new_cnv fa_cnvs = pbt.BedTool(fa_cnvs, from_string=True) mo_cnvs = pbt.BedTool(mo_cnvs, from_string=True) return fa_cnvs, mo_cnvs
6,600
def test_get_light_information(light_control): """Test get light information API.""" light_control._request.return_value = response_getLightInformation light_control.get_light_information() light_control._request.assert_called_with( "post", "/axis-cgi/lightcontrol.cgi", json={ "method": "getLightInformation", "apiVersion": "1.1", "context": "Axis library", }, )
6,601
def create_images(): """ Create new images Internal Parameters: image (FileStorage): Image Returns: success (boolean) image (list) """ # vars image_file = request.files.get('image') validate_image_data({"image": image_file}) image_url_set = create_img_set(image_file) # create image image = Image(**{ "user_id": auth_user_id(), "url": json.dumps(image_url_set) }) try: image.insert() # return the result return jsonify({ 'success': True, 'image': image.format() }) except Exception as e: abort(400)
6,602
def docx2python( docx_filename: Union[str, Path], image_folder: Optional[str] = None, html: bool = False, paragraph_styles: bool = False, extract_image: bool = None, ) -> DocxContent: """ Unzip a docx file and extract contents. :param docx_filename: path to a docx file :param image_folder: optionally specify an image folder (images in docx will be copied to this folder) :param html: bool, extract some formatting as html :param paragraph_styles: prepend the paragraphs style (if any, else "") to each paragraph. This will only be useful with ``*_runs`` attributes. :param extract_image: bool, extract images from document (default True) :return: DocxContent object """ if extract_image is not None: warn( "'extract_image' is no longer a valid argument for docx2python. If an " "image_folder is given as an argument to docx2python, images will be " "written to that folder. A folder can be provided later with " "``docx2python(filename).write_images(image_folder)``. Images files are " "available as before with ``docx2text(filename).images`` attribute." ) docx_context = DocxReader(docx_filename, html, paragraph_styles) docx_content = DocxContent(docx_context, locals()) if image_folder: _ = docx_content.images return docx_content
6,603
def test_check_datasets_raises_with_unsorted_interactions(): """Passed datasets that have sublattice interactions not in sorted order should raise.""" with pytest.raises(DatasetError): check_dataset(dataset_single_unsorted_interaction)
6,604
def parcel_analysis(con_imgs, parcel_img, msk_img=None, vcon_imgs=None, design_matrix=None, cvect=None, fwhm=8, smooth_method='default', res_path=None): """ Helper function for Bayesian parcel-based analysis. Given a sequence of independent images registered to a common space (for instance, a set of contrast images from a first-level fMRI analysis), perform a second-level analysis assuming constant effects throughout parcels defined from a given label image in reference space. Specifically, a model of the following form is assumed: Y = X * beta + variability, where Y denotes the input image sequence, X is a design matrix, and beta are parcel-wise parameter vectors. The algorithm computes the Bayesian posterior probability of cvect'*beta, where cvect is a given contrast vector, in each parcel using an expectation propagation scheme. Parameters ---------- con_imgs: sequence of nipy-like images Images input to the group analysis. parcel_img: nipy-like image Label image where each label codes for a parcel. msk_img: nipy-like image, optional Binary mask to restrict analysis. By default, analysis is carried out on all parcels with nonzero value. vcon_imgs: sequece of nipy-like images, optional First-level variance estimates corresponding to `con_imgs`. This is useful if the input images are "noisy". By default, first-level variances are assumed to be zero. design_matrix: array, optional If None, a one-sample analysis model is used. Otherwise, an array with shape (n, p) where `n` matches the number of input scans, and `p` is the number of regressors. cvect: array, optional Contrast vector of interest. The method makes an inference on the contrast defined as the dot product cvect'*beta, where beta are the unknown parcel-wise effects. If None, `cvect` is assumed to be np.array((1,)). However, the `cvect` argument is mandatory if `design_matrix` is provided. fwhm: float, optional A parameter that represents the localization uncertainty in reference space in terms of the full width at half maximum of an isotropic Gaussian kernel. smooth_method: str, optional One of 'default' and 'spm'. Setting `smooth_method=spm` results in simply smoothing the input images using a Gaussian kernel, while the default method involves more complex smoothing in order to propagate spatial uncertainty into the inference process. res_path: str, optional An existing path to write output images. If None, no output is written. Returns ------- pmap_mu_img: nipy image Image of posterior contrast means for each parcel. pmap_prob_img: nipy image Corresponding image of posterior probabilities of positive contrast. """ p = ParcelAnalysis(con_imgs, parcel_img, parcel_info=None, msk_img=msk_img, vcon_imgs=vcon_imgs, design_matrix=design_matrix, cvect=cvect, fwhm=fwhm, smooth_method=smooth_method, res_path=res_path) return p.parcel_maps()
6,605
def memoize(fn): """Simple memoization decorator for functions and methods, assumes that all arguments to the function can be hashed and compared. """ memoized_values = {} @wraps(fn) def wrapped_fn(*args, **kwargs): key = (args, tuple(sorted(kwargs.items()))) try: return memoized_values[key] except KeyError: memoized_values[key] = fn(*args, **kwargs) return memoized_values[key] return wrapped_fn
6,606
def insert_record(bitdotio, record, qualified_table): """Inserts a single sensor measurement record. Parameters ---------- bitdotio : _Bit object bit.io connection client. record : list The record as a list of str representations. qualified_table: str The schema qualified table to upload to. ---- """ sql = f'INSERT INTO {qualified_table} ' sql += 'VALUES (' + ', '.join(['%s'] * len(record)) + ');' execute_sql(bitdotio, sql, record)
6,607
def remove(tag: str, not_exist_ok: bool = True) -> None: """ Remove the map with the given ``tag``. Parameters ---------- tag The ``tag`` to search for and remove. not_exist_ok If ``False``, raise :class:`htmap.exceptions.MapIdNotFound` if the ``tag`` doesn't exist. """ try: load(tag).remove() except (exceptions.TagNotFound, FileNotFoundError) as e: if not not_exist_ok: if not isinstance(e, exceptions.TagNotFound): raise exceptions.TagNotFound(f"Map {tag} not found") from e raise e
6,608
def validate( args: Namespace, model: BaseModel ) -> pd.DataFrame: """Perform the validation. Parameters ---------- args : Namespace Arguments to configure the model and the validation. model : BaseModel The model to be used for validation. Returns ------- pd.DataFrame A DataFrame with the metric results. See Also -------- ptlflow.models.base_model.base_model.BaseModel : The parent class of the available models. """ model.eval() if torch.cuda.is_available(): model = model.cuda() dataloaders = model.val_dataloader() dataloaders = {model.val_dataloader_names[i]: dataloaders[i] for i in range(len(dataloaders))} metrics_df = pd.DataFrame() metrics_df['model'] = [args.model] metrics_df['checkpoint'] = [args.pretrained_ckpt] for dataset_name, dl in dataloaders.items(): metrics_mean = validate_one_dataloader(args, model, dl, dataset_name) metrics_df[[f'{dataset_name}-{k}' for k in metrics_mean.keys()]] = list(metrics_mean.values()) args.output_path.mkdir(parents=True, exist_ok=True) metrics_df.T.to_csv(args.output_path / 'metrics.csv', header=False) metrics_df = metrics_df.round(3) return metrics_df
6,609
def test_directory_origin_multi_char_delimited(sdc_builder, sdc_executor): """ Test Directory Origin with multi-character delimited format. This will generate a sample file with the custom multi-char delimiter then read it with the test pipeline. The pipeline looks like: directory >> trash """ tmp_directory = os.path.join(tempfile.gettempdir(), get_random_string(string.ascii_letters, 10)) # crazy delimiter delim = '_/-\\_' custom_delimited_lines = [ f"first{delim}second{delim}third", f"1{delim}11{delim}111", f"2{delim}22{delim}222", f"31{delim}3,3{delim}3,_/-_3,3" ] setup_dilimited_file(sdc_executor, tmp_directory, custom_delimited_lines) pipeline_builder = sdc_builder.get_pipeline_builder() directory = pipeline_builder.add_stage('Directory', type='origin') directory.set_attributes(data_format='DELIMITED', delimiter_format_type='MULTI_CHARACTER', multi_character_field_delimiter=delim, header_line='WITH_HEADER', file_name_pattern='sdc*', file_name_pattern_mode='GLOB', file_post_processing='DELETE', files_directory=tmp_directory, process_subdirectories=True, read_order='TIMESTAMP') trash = pipeline_builder.add_stage('Trash') directory >> trash directory_pipeline = pipeline_builder.build('Multi Char Delimited Directory') sdc_executor.add_pipeline(directory_pipeline) snapshot = sdc_executor.capture_snapshot(directory_pipeline, start_pipeline=True, batch_size=3).snapshot sdc_executor.stop_pipeline(directory_pipeline) output_records = snapshot[directory.instance_name].output assert 3 == len(output_records) assert output_records[0].get_field_data('/first') == '1' assert output_records[0].get_field_data('/second') == '11' assert output_records[0].get_field_data('/third') == '111' assert output_records[1].get_field_data('/first') == '2' assert output_records[1].get_field_data('/second') == '22' assert output_records[1].get_field_data('/third') == '222' assert output_records[2].get_field_data('/first') == '31' assert output_records[2].get_field_data('/second') == '3,3' assert output_records[2].get_field_data('/third') == '3,_/-_3,3'
6,610
def stats(last_day=None, timeframe=None, dates_sources=None): """See :class:`bgpranking.api.get_stats`""" query = {'method': 'stats'} query.update({'last_day': last_day, 'timeframe': timeframe, 'dates_sources': dates_sources}) return __prepare_request(query)
6,611
def restore_purchases() -> None: """restore_purchases() -> None (internal) """ return None
6,612
def show_corner_plot(chain, burnin=0.5, save=False, **kwargs): """ Display or save a figure showing the corner plot (pdfs + correlation plots) Parameters ---------- chain: numpy.array The Markov chain. The shape of chain must be nwalkers x length x dim. If a part of the chain is filled with zero values, the method will discard these steps. burnin: float, default: 0 The fraction of a walker we want to discard. save: boolean, default: False If True, a pdf file is created. kwargs: Additional attributs are passed to the corner.corner() method. Returns ------- Display the figure or create a pdf file named walk_plot.pdf in the working directory. Raises ------ ImportError """ #burnin = kwargs.pop('burnin',0) try: temp = np.where(chain[0,:,0] == 0.0)[0] if len(temp) != 0: chain = chain[:,:temp[0],:] length = chain.shape[1] chain = chain[:,int(np.floor(burnin*(length-1))):length,:].reshape((-1,3)) except IndexError: pass if chain.shape[0] == 0: print("It seems that the chain is empty. Have you already run the MCMC ?") else: fig = corner.corner(chain, labels=kwargs.pop('labels',["$r$",r"$\theta$","$f$"]), **kwargs) if save: plt.savefig('corner_plot.pdf') plt.close(fig) else: plt.show()
6,613
def sim_mat(fc7_feats): """ Given a matrix of features, generate the similarity matrix S and sparsify it. :param fc7_feats: the fc7 features :return: matrix_S - the sparsified matrix S """ print("Something") t = time.time() pdist_ = spatial.distance.pdist(fc7_feats) print('Created distance matrix' + ' ' + str(time.time() - t) + ' sec') t = time.time() dist_mat = spatial.distance.squareform(pdist_) print('Created square distance matrix' + ' ' + str(time.time() - t) + ' sec') del pdist_ t = time.time() sigmas = np.sort(dist_mat, axis=1)[:, 7] + 1e-16 matrice_prodotti_sigma = np.dot(sigmas[:, np.newaxis], sigmas[np.newaxis, :]) print('Generated Sigmas' + ' ' + str(time.time() - t) + ' sec') t = time.time() dist_mat /= -matrice_prodotti_sigma print('Computed dists/-sigmas' + ' ' + str(time.time() - t) + ' sec') del matrice_prodotti_sigma t = time.time() W = np.exp(dist_mat, dist_mat) # W = np.exp(-(dist_mat / matrice_prodotti_sigma)) np.fill_diagonal(W, 0.) # sparsify the matrix k = int(np.floor(np.log2(fc7_feats.shape[0])) + 1) n = W.shape[0] print('Created inplace similarity matrix' + ' ' + str(time.time() - t) + ' sec') t = time.time() for x in W: x[np.argpartition(x, n - k)[:(n - k)]] = 0.0 print('Sparsify the matrix' + ' ' + str(time.time() - t) + ' sec') t = time.time() # matrix_S = np.zeros((n, n)) m1 = W[np.triu_indices(n, k=1)] m2 = W.T[np.triu_indices(n, k=1)] W = spatial.distance.squareform(np.maximum(m1, m2)) print('Symmetrized the similarity matrix' + ' ' + str(time.time() - t) + ' sec') return W
6,614
def text_split(in_text, insert_points, char_set): """ Returns: Input Text Split into Text and Nonce Strings. """ nonce_key = [] encrypted_nonce = "" in_list = list(in_text) for pos in range(3967): if insert_points[pos] >= len(in_list) - 1: point = len(in_list) - 2 else: point = insert_points[pos] char = in_list[point] in_list.pop(point) nonce_key.append(char) if char is not char_set[-1]: break length = ((len(nonce_key) - 1) * (len(char_set) -2)) + char_set.index(nonce_key[len(nonce_key) - 1]) for pos in range(length): if insert_points[pos + len(nonce_key)] >= len(in_list) - 1: point = len(in_list) - 2 else: point = insert_points[pos + len(nonce_key)] char = in_list[point] in_list.pop(point) encrypted_nonce = encrypted_nonce + char return "".join(in_list), encrypted_nonce
6,615
def set_global_manager(manager): """Set the global ResourceManager.""" global RESOURCE_MANAGER RESOURCE_MANAGER = manager
6,616
def f_approximation(g_matrix, coeficients_array): """ Retorna um vetor para o valor aproximado de f, dados os coeficientes ak. """ decimal.getcontext().prec = PRECSION decimal.getcontext().rounding = ROUNDING_MODE num_of_xs = len(g_matrix[0]) num_of_coeficients = len(g_matrix) f_approx_array = np.full(num_of_xs, decimal.Decimal('0')) for i in range(0, num_of_xs): approx_sum = 0 for k in range(0, num_of_coeficients): approx_sum += coeficients_array[k] * g_matrix[k][i] f_approx_array[i] = approx_sum return f_approx_array
6,617
def _module_exists(module_name): """ Checks if a module exists. :param str module_name: module to check existance of :returns: **True** if module exists and **False** otherwise """ try: __import__(module_name) return True except ImportError: return False
6,618
def users(request): """Show a list of users and their puzzles.""" context = {'user_list': []} for user in User.objects.all().order_by('username'): objs = Puzzle.objects.filter(user=user, pub_date__lte=timezone.now()).order_by('-number') if objs: puzzle_list = [] for puz in objs: puzzle_list.append({'number': puz.number, 'date': get_date_string(puz)}) context['user_list'].append({'name': user.username, 'puzzles': puzzle_list}) return render(request, 'puzzle/users.html', context)
6,619
def write_file(data_feed, file_path): """Write data to a file.""" if data_feed == None: return with open(file_path, "w") as open_file: keys = data_feed[0].keys() header = ",".join(keys) open_file.write("{}\n".format(header)) for data in data_feed: data_string = "^^".join(data.values()) data_string = data_string.replace(",", "") data_string = data_string.replace("^^", ",") data_string = data_string.replace('"', "") open_file.write("{}\n".format(data_string.encode("UTF-8"))) return
6,620
def switchClock(onoff, alarmSymbol): """ changes the mode of the clock display, from time to text :param onoff (bool): True=Time mode False=Text Display :param alarmSymbol (bool): indicates whether the alarm is set when in clock mode (alarm symbol is drawn) :param scrollOverride (bool): indicates whether the scrolling will be stopped when in text mode """ print "clock",onoff
6,621
def featurize(mode, output_filename=None): """ Catch all method to featurize either train or test dataset and save to CSV Params: mode: (str) TRAIN or TEST output_filename: (str) Optional- name of the csv to save the data """ MODE = mode if not os.path.exists('train/') or not os.path.exists('test/'): train_test_split() if not os.path.exists('block_files/'): os.mkdir('block_files/') BLOCK_FILE = 'block_files/'+MODE+'.jl' CORPUS_FREQ_FILE = MODE+'/corpus_freq.json' ds_amzn = rltk.Dataset(reader=rltk.CSVReader(open(MODE + '/Amazon.csv', encoding='latin-1')), record_class=AmazonRecord, adapter=rltk.MemoryAdapter()) ds_goog = rltk.Dataset(reader=rltk.CSVReader(open(MODE + '/GoogleProducts.csv', encoding='latin-1')), record_class=GoogleRecord, adapter=rltk.MemoryAdapter()) try: block_handler = open(BLOCK_FILE,'r') print("Block file exists. Reading from disk...") except FileNotFoundError: block_handler = rltk.InvertedIndexBlockGenerator( ds_amzn, ds_goog, writer=rltk.BlockFileWriter(BLOCK_FILE), tokenizer=tokenizer).generate() features = ['id1', 'id2', 'price_difference', 'desc_jaccard', 'desc_tf_idf', 'desc_trigram', 'manufacturer_jaccard', 'manufacturer_jaro_winkler', 'manufacturer_levenshtien', 'name_jaccard', 'name_jaro_winkler', 'name_trigram','label'] pairs = rltk.get_record_pairs(ds_amzn, ds_goog, rltk.BlockFileReader(block_handler)) freq = get_document_frequency(CORPUS_FREQ_FILE, ds_amzn, ds_goog) if MODE == "train": print("Featurizing train") if not output_filename: output_filename = 'train/features_train.csv' featurize_all_records(pairs, features, output_filename, freq, TRAIN_DOC_SIZE) elif MODE == "test": print("Featurizing test") if not output_filename: output_filename = 'test/features_test.csv' featurize_all_records(pairs, features, output_filename, freq, TEST_DOC_SIZE)
6,622
def my_removefile(filename,my_sys_ldebug=0,fatal=0): """ Verwijdert een file. Argumenten: pad naar file en evt een fatal-optie """ #Controleer of opgegeven pad een bestaande file is. if os.path.isfile(filename): try: if (my_sys_ldebug): print("MY_REMOVEFILE: Deleting file: "+filename) os.remove(filename) except: my_print("MY_REMOVEFILE: Bestand " + str(filename) + " kan niet verwijderd worden.",fatal) else: my_print("MY_REMOVEFILE: Bestand " + str(filename) + " bestaat niet.",fatal)
6,623
def MIPS_SPIRE_gen(phot_priors,sed_prior_model,chains=4,seed=5363,iter=1000,max_treedepth=10,adapt_delta=0.8): """ Fit the three SPIRE bands :param priors: list of xidplus.prior class objects. Order (MIPS,PACS100,PACS160,SPIRE250,SPIRE350,SPIRE500) :param sed_prior: xidplus.sed.sed_prior class :param chains: number of chains :param iter: number of iterations :return: pystan fit object """ prior24=phot_priors[0] prior250=phot_priors[1] prior350=phot_priors[2] prior500=phot_priors[3] #input data into a dictionary XID_data = { 'nsrc': prior250.nsrc, 'bkg_prior': [prior24.bkg[0],prior250.bkg[0], prior350.bkg[0], prior500.bkg[0]], 'bkg_prior_sig': [prior24.bkg[1],prior250.bkg[1], prior350.bkg[1], prior500.bkg[1]], 'conf_prior_sig': [0.0001, 0.1, 0.1, 0.1], 'z_median': prior24.z_median, 'z_sig': prior24.z_sig, 'npix_psw': prior250.snpix, 'nnz_psw': prior250.amat_data.size, 'db_psw': prior250.sim, 'sigma_psw': prior250.snim, 'Val_psw': prior250.amat_data, 'Row_psw': prior250.amat_row.astype(np.long), 'Col_psw': prior250.amat_col.astype(np.long), 'npix_pmw': prior350.snpix, 'nnz_pmw': prior350.amat_data.size, 'db_pmw': prior350.sim, 'sigma_pmw': prior350.snim, 'Val_pmw': prior350.amat_data, 'Row_pmw': prior350.amat_row.astype(np.long), 'Col_pmw': prior350.amat_col.astype(np.long), 'npix_plw': prior500.snpix, 'nnz_plw': prior500.amat_data.size, 'db_plw': prior500.sim, 'sigma_plw': prior500.snim, 'Val_plw': prior500.amat_data, 'Row_plw': prior500.amat_row.astype(np.long), 'Col_plw': prior500.amat_col.astype(np.long), 'npix_mips24': prior24.snpix, 'nnz_mips24': prior24.amat_data.size, 'db_mips24': prior24.sim, 'sigma_mips24': prior24.snim, 'Val_mips24': prior24.amat_data, 'Row_mips24': prior24.amat_row.astype(np.long), 'Col_mips24': prior24.amat_col.astype(np.long), 'nTemp': sed_prior_model.shape[0], 'nz': sed_prior_model.shape[2], 'nband': sed_prior_model.shape[1], 'SEDs': sed_prior_model, } #see if model has already been compiled. If not, compile and save it model_file='/XID+MIPS_SPIRE_SED_gen' from xidplus.stan_fit import get_stancode sm = get_stancode(model_file) fit = sm.sampling(data=XID_data,iter=iter,chains=chains,seed=seed,verbose=True,control=dict(max_treedepth=max_treedepth,adapt_delta=adapt_delta)) #return fit data return fit
6,624
def check_cstr(solver, indiv): """Check the number of constraints violations of the individual Parameters ---------- solver : Solver Global optimization problem solver indiv : individual Individual of the population Returns ------- is_feasible : bool Individual feasibility """ # Non valid simulation violate every constraints if indiv.is_simu_valid == False: indiv.cstr_viol = len(solver.problem.constraint) return True # To not add errors to infeasible # Browse constraints for constraint in solver.problem.constraint: # Compute value to compare var_val = constraint.get_variable(indiv.output) # Compare the value with the constraint type_const = constraint.type_const if type_const == "<=": if var_val > constraint.value: indiv.cstr_viol += 1 elif type_const in ["==", "="]: if var_val != constraint.value: indiv.cstr_viol += 1 elif type_const == ">=": if var_val < constraint.value: indiv.cstr_viol += 1 elif type_const == "<": if var_val >= constraint.value: indiv.cstr_viol += 1 elif type_const == ">": if var_val <= constraint.value: indiv.cstr_viol += 1 else: raise ValueError("Wrong type of constraint") return indiv.cstr_viol == 0
6,625
def _FixFsSelectionBit(key, expected): """Write a repair script to fix a bad fsSelection bit. Args: key: The name of an fsSelection flag, eg 'ITALIC' or 'BOLD'. expected: Expected value, true/false, of the flag. Returns: A python script to fix the problem. """ if not _ShouldFix('fsSelection'): return None op = '|=' verb = 'set' mask = bin(fonts.FsSelectionMask(key)) if not expected: op = '&=' verb = 'unset' mask = '~' + mask return 'ttf[\'OS/2\'].fsSelection %s %s # %s %s' % (op, mask, verb, key)
6,626
def checkThread(self): """ Скинуть название исключения в потоке, ежели такое произойдет :rtype: none """ for x in as_completed(self.futures): if x.exception() is not None: logging.error(x.exception()) print(f"ошибОЧКА разраба: {x.exception()}") self.futures.remove(x) logging.info("Поток закрылся")
6,627
def dilation_dist(path_dilation, n_dilate=None): """ Compute surface of distances with dilation :param path_dilation: binary array with zeros everywhere except for paths :param dilate: How often to do dilation --> defines radious of corridor :returns: 2dim array of same shape as path_dilation, with values 0 = infinite distance from path n_dilation = path location """ saved_arrs = [path_dilation] if n_dilate is None: # compute number of iterations: maximum distance of pixel to line x_coords, y_coords = np.where(path_dilation) x_len, y_len = path_dilation.shape # dilate as much as the largest distance from the sides n_dilate = max( [ np.min(x_coords), x_len - np.max(x_coords), np.min(y_coords), y_len - np.max(y_coords) ] ) # dilate for _ in range(n_dilate): path_dilation = binary_dilation(path_dilation) saved_arrs.append(path_dilation) saved_arrs = np.sum(np.array(saved_arrs), axis=0) return saved_arrs
6,628
def no_lfs_size_limit(client): """Configure environment track all files in LFS independent of size.""" client.set_value("renku", "lfs_threshold", "0b") client.repo.git.add(".renku/renku.ini") client.repo.index.commit("update renku.ini") yield client
6,629
def append(filename, string): """open file for appending, dump string, close file""" op = open(filename, "a") op.write(string) op.close()
6,630
def plot_rgb_phases(absolute, phase): """ Calculates a visualization of an inverse Fourier transform, where the absolute value is plotted as brightness and the phase is plotted as color. :param absolute: 2D numpy array containing the absolute value :param phase: 2D numpy array containing phase information in units of pi (should range from -1 to +1!) :return: numpy array containing red, green and blue values """ red = 0.5 * (np.sin(phase * np.pi) + 1) * absolute / absolute.max() green = 0.5 * (np.sin(phase * np.pi + 2 / 3 * np.pi) + 1) * absolute / absolute.max() blue = 0.5 * (np.sin(phase * np.pi + 4 / 3 * np.pi) + 1) * absolute / absolute.max() return np.dstack([red, green, blue])
6,631
def init(file: str, template: str, verbose = False, dry_run = False) -> None: """Copies template to file""" if exists(file): inp = input( "File '{}' aldready exists, overwrite with new LaTeX file (y/n) ? ".format(file) ) if not inp or inp.lower()[0] != "y": exit(0) command = 'cp "{}" "{}"'.format(template, file) process = run_command(command, verbose, dry_run) TexmgrConstants.check_error(process, 'when initializing file "{}"'.format(file) )
6,632
def layer_view_attachment_warning(): """Unlimited attachments are warnings""" content = { 'id': str(uuid.uuid4()), '_type': 'CatXL', 'attachment': { 'currency': 'USD', 'value': float_info.max, } } return convert_to_analyzere_object(content, LayerView)
6,633
async def test_multi_group_move( mock_can_messenger: AsyncMock, move_group_multiple: MoveGroups ) -> None: """It should start next group once the prior has completed.""" subject = MoveScheduler(move_groups=move_group_multiple) mock_sender = MockSendMoveCompleter(move_group_multiple, subject) mock_can_messenger.send.side_effect = mock_sender.mock_send position = await subject.run(can_messenger=mock_can_messenger) mock_can_messenger.send.assert_has_calls( calls=[ call( node_id=NodeId.broadcast, message=md.ExecuteMoveGroupRequest( payload=ExecuteMoveGroupRequestPayload( group_id=UInt8Field(0), cancel_trigger=UInt8Field(0), start_trigger=UInt8Field(0), ) ), ), call( node_id=NodeId.broadcast, message=md.ExecuteMoveGroupRequest( payload=ExecuteMoveGroupRequestPayload( group_id=UInt8Field(1), cancel_trigger=UInt8Field(0), start_trigger=UInt8Field(0), ) ), ), call( node_id=NodeId.broadcast, message=md.ExecuteMoveGroupRequest( payload=ExecuteMoveGroupRequestPayload( group_id=UInt8Field(2), cancel_trigger=UInt8Field(0), start_trigger=UInt8Field(0), ) ), ), ] ) assert len(position) == 5 assert position[0][1].payload.current_position_um.value == 229000 assert position[1][1].payload.current_position_um.value == 522000 assert position[2][1].payload.current_position_um.value == 25000 assert position[3][1].payload.current_position_um.value == 12000 assert position[4][1].payload.current_position_um.value == 12000
6,634
def restart(bot: DeltaBot, message: Message, replies: Replies) -> None: """Restart the game in the game group it is sent.""" with session_scope() as session: game = session.query(Game).filter_by(chat_id=message.chat.id).first() if game is None: replies.add("❌ This is not a game group.") return name, player = game.name, game.player _reset_game(_get_folder(bot), name, player) frotz_game = _get_game(name, player, bot) frotz_game.stop() replies.add(text=frotz_game.intro)
6,635
def create_resfinder_sqlite3_db(dbfile, mappings): """ Create and fill an sqlite3 DB with ResFinder mappings. Expects mappings to be a list of tuples: (header, symbol, family, class, extra) """ logging.info("Creating sqlite3 db: %s ...", dbfile) if os.path.isfile(dbfile): logging.warning("Overwriting previously existing dbfile: %s") os.remove(dbfile) logging.debug("Removed pre-existing dbfile: %s") con = sqlite3.connect(dbfile) con.execute("CREATE TABLE resfinder(header TEXT PRIMARY KEY, symbol TEXT, family TEXT, class TEXT, extra TEXT)") con.executemany("INSERT INTO resfinder VALUES (?,?,?,?,?)", mappings) num_mappings = con.execute("SELECT Count(*) FROM resfinder").fetchone()[0] con.commit() logging.debug("Inserted %i mappings in to sqlite3 DB", num_mappings) return con
6,636
def normal_shock_pressure_ratio(M, gamma): """Gives the normal shock static pressure ratio as a function of upstream Mach number.""" return 1.0+2.0*gamma/(gamma+1.0)*(M**2.0-1.0)
6,637
def create_small_test_fixture(output_dir: str = '/tmp') -> None: """ This is how I created the transformer_model.tar.gz. After running this, go to the specified output dir and run tar -czvf transformer_model.tar.gz model/ In case you need to regenerate the fixture for some reason. """ import json import pathlib model_dir = pathlib.Path(output_dir) / 'model' model_dir.mkdir(exist_ok=True) # pylint: disable=no-member symbols = ["e", "w", "o", "wo", "."] byte_pairs = [(sym1, sym2 + end) for sym1 in symbols # prefer earlier first symbol for sym2 in symbols # if tie, prefer earlier second symbol for end in ('</w>', '')] # if tie, prefer ending a word encoding = {f"{sym1}{sym2}": idx for idx, (sym1, sym2) in enumerate(byte_pairs)} encoding["<unk>"] = 0 with open(model_dir / 'encoder_bpe.json', 'w') as encoder_file: json.dump(encoding, encoder_file) with open(model_dir / 'vocab.bpe', 'w') as bpe_file: bpe_file.write("#version 0.0\n") for sym1, sym2 in byte_pairs: bpe_file.write(f"{sym1} {sym2}\n") bpe_file.write("\n") transformer = OpenaiTransformer(embedding_dim=10, num_heads=2, num_layers=2, vocab_size=(50 + 50), n_ctx=50) transformer.dump_weights(output_dir, num_pieces=2)
6,638
def _lorentz_berthelot( epsilon_1: float, epsilon_2: float, sigma_1: float, sigma_2: float ) -> Tuple[float, float]: """Apply Lorentz-Berthelot mixing rules to a pair of LJ parameters.""" return numpy.sqrt(epsilon_1 * epsilon_2), 0.5 * (sigma_1 + sigma_2)
6,639
def test_searchChunked_chunksize(client): """Same as test_searchChunked_simple, but setting a chunksize now. """ # chunksize is an internal tuning parameter in searchChunked() # that should not have any visible impact on the result. So we # may test the same assumptions as above. We choose the chunksize # small enough such that the result cannot be fetched at once and # thus force searchChunked() to repeat the search internally. query = "User" users = client.search(query) chunksize = int(len(users)/2) if chunksize < 1: pytest.skip("too few objects for this test") res = client.searchChunked(query, chunksize=chunksize) assert isinstance(res, Iterable) objs = list(res) assert objs == users
6,640
def test_utc_to_local(): """Check if passed utc datestamp becomes local one.""" import pytz from tokendito import helpers from tzlocal import get_localzone utc = datetime.now(pytz.utc) local_time = utc.replace(tzinfo=pytz.utc).astimezone(tz=get_localzone()) local_time = local_time.strftime("%Y-%m-%d %H:%M:%S %Z") assert helpers.utc_to_local(utc) == local_time
6,641
def test_iterate_input_serializer(aiida_profile, generate_iterator): """Test of the classmethod `_iterate_input_serializer` of `BaseIterator`.""" import pytest with pytest.raises(ValueError): generate_iterator._iterate_input_serializer({"www": "w"}) it_over = generate_iterator._iterate_input_serializer({"www": [1,2]}) it_ov_el = it_over.get_attribute("www") assert isinstance(it_ov_el, orm.List) assert isinstance(orm.load_node(it_ov_el[0]), orm.Int)
6,642
def verify_password_str(password, password_db_str): """Verify password matches database string.""" split_password_db = password_db_str.split('$') algorithm = split_password_db[0] salt = split_password_db[1] return password_db_str == generate_password_str(algorithm, salt, password)
6,643
def get_ical_file_name(zip_file): """Gets the name of the ical file within the zip file.""" ical_file_names = zip_file.namelist() if len(ical_file_names) != 1: raise Exception( "ZIP archive had %i files; expected 1." % len(ical_file_names) ) return ical_file_names[0]
6,644
def unquoted_str(draw): """Generate strings compatible with our definition of an unquoted string.""" start = draw(st.text(alphabet=(ascii_letters + "_"), min_size=1)) body = draw(st.text(alphabet=(ascii_letters + digits + "_"))) return start + body
6,645
def _reduce_attribute(states: List[State], key: str, default: Optional[Any] = None, reduce: Callable[..., Any] = _mean) -> Any: """Find the first attribute matching key from states. If none are found, return default. """ attrs = list(_find_state_attributes(states, key)) if not attrs: return default if len(attrs) == 1: return attrs[0] return reduce(*attrs)
6,646
def sync_garmin(fit_file): """Sync generated fit file to Garmin Connect""" garmin = GarminConnect() session = garmin.login(ARGS.garmin_username, ARGS.garmin_password) return garmin.upload_file(fit_file.getvalue(), session)
6,647
def get_paths(graph: Graph, filter: Callable) -> List: """ Collect all the paths consist of valid vertices. Return one path every time because the vertex index may be modified. """ result = [] if filter == None: return result visited = set() vs = graph.topological_sorting() for vertex in vs: if not filter(vertex, graph) or vertex in visited: continue visited.add(vertex) path = [vertex] slist = graph.successors(vertex) while len(set(slist))==1 and filter(slist[0], graph) and not slist[0] in visited: cur = slist[0] path.append(cur) visited.add(cur) slist = graph.successors(cur) if len(path) > 0: result.append(path) return result
6,648
def create_app(app_name=PKG_NAME): """Initialize the core application.""" app = Flask(app_name) CORS(app) with app.app_context(): # Register Restx Api api.init_app(app) return app
6,649
def smtlib_to_sympy_constraint( smtlib_input: str, interpreted_constants: Dict[str, Callable] = default_interpreted_constants, interpreted_unary_functions: Dict[str, Callable] = default_interpreted_unary_functions): """Convert SMTLIB(v2) constraints into sympy constraints analyzable via SYMPAIS. This function is experimental and introduced as an example. It is implemented on top of PySMT (https://github.com/pysmt/pysmt). Additional features can be added extending the `SMTToSympyWalker` class. Args: smtlib_input: SMT constraint as a string in SMTLIB(v2) format, as accepted by PySMT interpreted_constants: predefined interpreted constants to be declared in the SMT problem. Default: E (Euler), PI interpreted_unary_functions: predefined interpreted functions Real -> Real. Default: sin, cos, tan, asin, acos, atan, log, exp, sqrt Returns: A dict of the estimates found by the DMC sampler. """ interpreted_symbols_declarations = '\n'.join( [f'(declare-const {cname} Real)' for cname in interpreted_constants.keys()]) interpreted_symbols_declarations += '\n'.join([ f'(declare-fun {fname} (Real) Real)' for fname in interpreted_unary_functions.keys() ]) smtlib_with_interpreted_symbols = ( interpreted_symbols_declarations + '\n' + smtlib_input) reset_env() parser = SmtLibParser() script = parser.get_script(cStringIO(smtlib_with_interpreted_symbols)) f = script.get_last_formula() converter = SMTToSympyWalker(get_env(), interpreted_constants, interpreted_unary_functions) f_sympy = converter.walk(f) f_sympy = sympy.logic.simplify_logic(f_sympy) f_sympy = sympy.simplify(f_sympy) if f_sympy.atoms(sympy.logic.Or): warnings.warn( 'Disjunctive constraints are not supported by RealPaver. Consider replacing it with an adequate interval constraint propagation tool for benefit from all the features of SYMPAIS' ) return f_sympy
6,650
async def revert(app, change_id: str) -> dict: """ Revert a history change given by the passed ``change_id``. :param app: the application object :param change_id: a unique id for the change :return: the updated OTU """ db = app["db"] change = await db.history.find_one({"_id": change_id}, ["index"]) if change["index"]["id"] != "unbuilt" or change["index"]["version"] != "unbuilt": raise virtool.errors.DatabaseError( "Change is included in a build an not revertible" ) otu_id, otu_version = change_id.split(".") if otu_version != "removed": otu_version = int(otu_version) _, patched, history_to_delete = await patch_to_version(app, otu_id, otu_version - 1) # Remove the old sequences from the collection. await db.sequences.delete_many({"otu_id": otu_id}) if patched is not None: patched_otu, sequences = virtool.otus.utils.split(patched) # Add the reverted sequences to the collection. for sequence in sequences: await db.sequences.insert_one(sequence) # Replace the existing otu with the patched one. If it doesn't exist, insert it. await db.otus.replace_one({"_id": otu_id}, patched_otu, upsert=True) else: await db.otus.delete_one({"_id": otu_id}) await db.history.delete_many({"_id": {"$in": history_to_delete}}) return patched
6,651
def days_away(date): """Takes in the string form of a date and returns the number of days until date.""" mod_date = string_to_date(date) return abs((current_date() - mod_date).days)
6,652
def node_constraints_transmission(model): """ Constrains e_cap symmetrically for transmission nodes. """ m = model.m # Constraint rules def c_trans_rule(m, y, x): y_remote, x_remote = transmission.get_remotes(y, x) if y_remote in m.y_trans: return m.e_cap[y, x] == m.e_cap[y_remote, x_remote] else: return po.Constraint.NoConstraint # Constraints m.c_transmission_capacity = po.Constraint(m.y_trans, m.x, rule=c_trans_rule)
6,653
def plot_mtf(faxis, MTF, labels=None): """Plot the MTF. Return the figure reference.""" fig_lineplot = plt.figure() plt.rc('axes', prop_cycle=PLOT_STYLES) for i in range(0, MTF.shape[0]): plt.plot(faxis, MTF[i, :]) plt.xlabel('spatial frequency [cycles/length]') plt.ylabel('Radial MTF') plt.gca().set_ylim([0, 1]) if labels is not None: plt.legend([str(n) for n in labels]) plt.title("Modulation Tansfer Function for various angles") return fig_lineplot
6,654
def make_distributor_init(distributor_init, dll_filename): """Create a _distributor_init.py file for the vcomp140.dll. This file is imported first when importing the sklearn package so as to pre-load the vendored vcomp140.dll. """ with open(distributor_init, "wt") as f: f.write(textwrap.dedent(""" '''Helper to preload vcomp140.dll to prevent "not found" errors. Once the vcomp140.dll is preloaded, the namespace is made available to any subsequent vcomp140.dll. This is created as part of the scripts that build the wheel. ''' import os import os.path as op from ctypes import WinDLL if os.name == "nt": # Load the vcomp140.dll in sklearn/.libs by convention dll_path = op.join(op.dirname(__file__), ".libs", "{0}") WinDLL(op.abspath(dll_path)) """.format(dll_filename)))
6,655
def forward_ref_structure_hook(context, converter, data, forward_ref): """Applied to ForwardRef model and enum annotations - Map reserved words in json keys to approriate (safe) names in model. - handle ForwardRef types until github.com/Tinche/cattrs/pull/42/ is fixed Note: this is the reason we need a "context" param and have to use a partial func to register the hook. Once the issue is resolved we can remove "context" and the partial. """ data = hooks.tr_data_keys(data) actual_type = eval(forward_ref.__forward_arg__, context, locals()) if issubclass(actual_type, enum.Enum): instance = converter.structure(data, actual_type) elif issubclass(actual_type, model.Model): # cannot use converter.structure - recursion error instance = converter.structure_attrs_fromdict(data, actual_type) else: raise DeserializeError(f"Unknown type to deserialize: {actual_type}") return instance
6,656
def test_slicestim_3d(): """Test slicing a 3D stimulus into overlapping segments.""" np.random.seed(0) stim_size = (100, 5, 5) stim = np.random.randn(*stim_size) history = 10 sliced_stim = stimulustools.slicestim(stim, history) assert sliced_stim.ndim == stim.ndim + 1 assert sliced_stim.shape[0] == stim.shape[0] - history + 1 for i in range(stim_size[0] - history + 1): assert np.all(sliced_stim[i] == stim[i:i + history, ...]), 'slicing failed'
6,657
def cross(x: VariableLike, y: VariableLike) -> VariableLike: """Element-wise cross product. Parameters ---------- x: Left hand side operand. y: Right hand side operand. Raises ------ scipp.DTypeError If the dtype of the input is not vector3. Returns ------- : The cross product of the input vectors. """ return _call_cpp_func(_cpp.cross, x, y)
6,658
def p_dividerchar_spec(p): """ dividerchar_spec : DIVIDERCHAR CHAR_PAIRS_QUOTED SEMICOLON """ p[0] = DEF.declarations.dividerchar(p[2])
6,659
def delete(service, name, parent_id=None, appProperties=defaults.GDRIVE_USE_APPPROPERTIES): """ Delete a file/folder on Google Drive Parameters ---------- service : googleapiclient.discovery.Resource Google API resource for GDrive v3 name : str Name of file/folder parent_id : str, optional Parent ID of folder containing file (to narrow search) appProperties : bool Search for application-specific files using ``appProperties`` Returns ------- str ID of deleted file/folder """ name_id = exists(service, name, parent_id=parent_id) resp = service.files().delete(fileId=name_id).execute() return name_id
6,660
def _is_LoginForm_in_this_frame(driver, frame): """ 判断指定的 frame 中是否有 登录表单 """ driver.switch_to.frame(frame) # 切换进这个 frame if _is_LoginForm_in_this_page(driver): return True else: driver.switch_to.parent_frame() # 如果没有找到就切换回去 return False
6,661
def parse_range_header(specifier, len_content): """Parses a range header into a list of pairs (start, stop)""" if not specifier or '=' not in specifier: return [] ranges = [] unit, byte_set = specifier.split('=', 1) unit = unit.strip().lower() if unit != "bytes": return [] for val in byte_set.split(","): val = val.strip() if '-' not in val: return [] if val.startswith("-"): # suffix-byte-range-spec: this form specifies the last N # bytes of an entity-body start = len_content + int(val) if start < 0: start = 0 stop = len_content else: # byte-range-spec: first-byte-pos "-" [last-byte-pos] start, stop = val.split("-", 1) start = int(start) # Add 1 to make stop exclusive (HTTP spec is inclusive) stop = int(stop)+1 if stop else len_content if start >= stop: return [] ranges.append((start, stop)) return ranges
6,662
def acf(x, lags=None): """ Computes the empirical autocorralation function. :param x: array (n,), sequence of data points :param lags: int, maximum lag to compute the ACF for. If None, this is set to n-1. Default is None. :return gamma: array (lags,), values of the ACF at lags 0 to lags """ gamma = np.correlate(x, x, mode='full') # Size here is always 2*len(x)-1 gamma = gamma[int((gamma.size - 1) / 2):] # Keep only second half if lags is not None and lags < len(gamma): gamma = gamma[0:lags + 1] return gamma / gamma[0]
6,663
def copyInfotainmentServerFiles(tarName, targetId=None): """ Stuff the server binary into a tar file """ # grab the pre-built binary osImage = getSetting('osImage', targetId=targetId) infotainmentBinDir = getBinDir('infotainment-server', targetId=targetId) cpFilesToBuildDir(infotainmentBinDir, pattern="infotainment_server", targetId=targetId) tarFiles = ["infotainment_server"] infotainmentAppDir = getCyberphysAppDir('infotainment-server') runtimeFilesDir = os.path.join(infotainmentAppDir, osImage) if osImage == 'debian': cpFilesToBuildDir(runtimeFilesDir, pattern="infotainment-server.service", targetId=targetId) tarFiles += ["infotainment-server.service"] elif osImage == 'FreeBSD': cpFilesToBuildDir(runtimeFilesDir, pattern="infotainment-server.sh", targetId=targetId) tarFiles += ["infotainment-server.sh"] else: logAndExit(f"Installing infotainment-server is not supported on <{osImage}>", exitCode=EXIT.Dev_Bug) buildDirPathTuplePartial = functools.partial(buildDirPathTuple, targetId=targetId) filesList=map(buildDirPathTuplePartial, tarFiles) return filesList
6,664
def _login(client, user, users): """Login user and return url.""" login_user_via_session(client, user=User.query.get(user.id)) return user
6,665
def search(request): """renders search page""" queryset_list = Listing.objects.order_by('-list_date') if 'keywords' in request.GET: keywords = request.GET['keywords'] # Checking if its none if keywords: queryset_list = queryset_list.filter( description__icontains=keywords) if 'city' in request.GET: city = request.GET['city'] # Checking if its none if city: queryset_list = queryset_list.filter( city__iexact=city) if 'state' in request.GET: state = request.GET['state'] # Checking if its none if state: queryset_list = queryset_list.filter( state__iexact=state) if 'bedrooms' in request.GET: bedrooms = request.GET['bedrooms'] # Here LTE(lte) means less then or equal if bedrooms: queryset_list = queryset_list.filter( bedrooms__lte=bedrooms) if 'price' in request.GET: price = request.GET['price'] # Here LTE(lte) means less then or equal if price: queryset_list = queryset_list.filter( price__lte=price) context = { "price_choices": price_choices, "bedroom_choices": bedroom_choices, "state_choices": state_choices, "listings": queryset_list, "values": request.GET } return render(request, 'listings/search.html', context)
6,666
def generate_cutout(butler, skymap, ra, dec, band='N708', data_type='deepCoadd', half_size=10.0 * u.arcsec, psf=True, verbose=False): """Generate a single cutout image. """ if not isinstance(half_size, u.Quantity): # Assume that this is in pixel half_size_pix = int(half_size) else: half_size_pix = int(half_size.to('arcsec').value / PIXEL_SCALE) if isinstance(ra, u.Quantity): ra = ra.value if isinstance(dec, u.Quantity): dec = dec.value # Width and height of the post-stamps stamp_shape = (half_size_pix * 2 + 1, half_size_pix * 2 + 1) # Make a list of (RA, Dec) that covers the cutout region radec_list = np.array( sky_cone(ra, dec, half_size_pix * PIXEL_SCALE * u.Unit('arcsec'), steps=50)).T # Retrieve the Patches that cover the cutout region img_patches = _get_patches(butler, skymap, radec_list, band, data_type=data_type) if img_patches is None: if verbose: print('***** No data at {:.5f} {:.5f} *****'.format(ra, dec)) return None # Coordinate of the image center coord = geom.SpherePoint(ra * geom.degrees, dec * geom.degrees) # Making the stacked cutout cutouts = [] idx, bbox_sizes, bbox_origins = [], [], [] for img_p in img_patches: # Generate cutout cut, x0, y0 = _get_single_cutout(img_p, coord, half_size_pix) cutouts.append(cut) # Original lower corner pixel coordinate bbox_origins.append([x0, y0]) # New lower corner pixel coordinate xnew, ynew = cut.getBBox().getBeginX() - x0, cut.getBBox().getBeginY() - y0 idx.append([xnew, xnew + cut.getBBox().getWidth(), ynew, ynew + cut.getBBox().getHeight()]) # Area of the cutout region on this patch in unit of pixels # Will reverse rank all the overlapped images by this bbox_sizes.append(cut.getBBox().getWidth() * cut.getBBox().getHeight()) # Stitch cutouts together with the largest bboxes inserted last stamp = afwImage.MaskedImageF(geom.BoxI(geom.Point2I(0,0), geom.Extent2I(*stamp_shape))) bbox_sorted_ind = np.argsort(bbox_sizes) for i in bbox_sorted_ind: masked_img = cutouts[i].getMaskedImage() stamp[idx[i][0]: idx[i][1], idx[i][2]: idx[i][3]] = masked_img # Build the new WCS of the cutout stamp_wcs = _build_cutout_wcs(coord, cutouts, bbox_sorted_ind[-1], bbox_origins) cutout = afwImage.ExposureF(stamp, stamp_wcs) if bbox_sizes[bbox_sorted_ind[-1]] < (half_size_pix * 2 + 1) ** 2: flag = 1 else: flag = 2 # The final product of the cutout if psf: psf = _get_psf(cutouts[bbox_sorted_ind[-1]], coord) return cutout, psf, flag return cutout, flag
6,667
def main(): """ Main Program """ pygame.init() # Set the height and width of the screen size = [SCREEN_WIDTH, SCREEN_HEIGHT] screen = pygame.display.set_mode(size) pygame.display.set_caption("Pingwiny z Rovaniemi") # Create the player player = Player() # Create all the levels level_list = [] level_list.append( Level_01(player) ) level_list.append( Level_02(player) ) level_list.append( Level_03(player) ) # Set the current level current_level_no = 0 current_level = level_list[current_level_no] active_sprite_list = pygame.sprite.Group() player.level = current_level for enemy in current_level.enemy_list: enemy.level = current_level player.rect.x = player.level.start[0] player.rect.y = player.level.start[1] active_sprite_list.add(player) bubble_list = pygame.sprite.Group() # Wczytujemy aktualny najlepszy wynik high_score_file = open("high_score.txt", "r") high_score_str = high_score_file.readlines() high_score = [int(high_score_str[0]),int(high_score_str[1])] high_score_file.close() font = pygame.font.Font(None, 45) timerfont = pygame.font.Font(None, 40) titlefont = pygame.font.Font(None, 60) frame_count = 0 minutes = 0 seconds = 0 game_over = False win_game = False # Loop until the user clicks the close button. done = False # Used to manage how fast the screen updates clock = pygame.time.Clock() display_menu = True display_instructions = False option = 0 # Odczytywanie ustawień z pliku settings_file = open("settings.txt", "r") settings_str = settings_file.readlines() settings = [int(settings_str[0]),int(settings_str[1])] settings_file.close() difficulty = settings[0] mute = settings[1] # -------- Main menu Loop ----------- while not done and display_menu: for event in pygame.event.get(): if event.type == pygame.QUIT: done = True if event.type == pygame.KEYDOWN: if event.key == pygame.K_DOWN: option +=1 option = option % 5 if event.key == pygame.K_UP: option -=1 option = option % 5 if display_instructions == True and event.key == pygame.K_SPACE: display_instructions = False if event.key == pygame.K_RETURN and option == 0: display_menu = False if event.key == pygame.K_RETURN and option == 1: display_instructions = True if event.key == pygame.K_RETURN and option == 2: difficulty += 1 difficulty = difficulty % 3 if event.key == pygame.K_RETURN and option == 3: mute *= -1 if event.key == pygame.K_RETURN and option == 4: done = True # Set the screen background screen.fill(BG_BLUE) wall_list = pygame.sprite.Group() walls = [] seg_pion = int(SCREEN_HEIGHT/40) seg_poz = int(SCREEN_WIDTH/40) for i in range(seg_poz): walls.append([40, 40, 40*i, 0]) # sufit walls.append([40, 40, 40*i, SCREEN_HEIGHT - 40]) # podłoga for i in range(seg_pion): walls.append([40, 40, 0, 40*i]) # lewa ściana walls.append([40, 40, SCREEN_WIDTH - 40, 40*i]) # prawa ściana for platform in walls: wall = Platform(platform[0], platform[1], 1) wall.rect.x = platform[2] wall.rect.y = platform[3] wall_list.add(wall) wall_list.draw(screen) if display_instructions == True: ster = titlefont.render("Sterowanie", True, BLUE) ster_rect = ster.get_rect() ster_x = SCREEN_WIDTH / 2 - ster_rect.width / 2 screen.blit(ster, [ster_x, 80]) up = pygame.image.load("./obrazki/Keyboard_Black_Arrow_Up.png").convert() up = pygame.transform.scale(up, (50,50)) up.set_colorkey(BLACK) screen.blit(up, [200, 350]) up_ins = font.render("Skok", True, BLACK) screen.blit(up_ins, [300, 360]) left = pygame.image.load("./obrazki/Keyboard_Black_Arrow_Left.png").convert() left = pygame.transform.scale(left, (50,50)) left.set_colorkey(BLACK) screen.blit(left, [200, 250]) left_ins = font.render("Ruch w lewo", True, BLACK) screen.blit(left_ins, [300, 260]) right = pygame.image.load("./obrazki/Keyboard_Black_Arrow_Right.png").convert() right = pygame.transform.scale(right, (50,50)) right.set_colorkey(BLACK) screen.blit(right, [200, 150]) right_ins = font.render("Ruch w prawo", True, BLACK) screen.blit(right_ins, [300, 160]) space = pygame.image.load("./obrazki/Keyboard_Black_Space.png").convert() space = pygame.transform.scale(space, (50,50)) space.set_colorkey(BLACK) screen.blit(space, [200, 450]) space_ins = font.render("Strzał bąbelkiem", True, BLACK) screen.blit(space_ins, [300, 460]) powrot = font.render("Jeśli chcesz wrócić do Menu, wciśnij SPACJĘ", True, WHITE) powrot_rect = powrot.get_rect() powrot_x = SCREEN_WIDTH / 2 - powrot_rect.width / 2 screen.blit(powrot, [powrot_x, 520]) else: nazwa = titlefont.render("Pingwiny z Rovaniemi", True, BLACK) nazwa_rect = nazwa.get_rect() nazwa_x = SCREEN_WIDTH / 2 - nazwa_rect.width / 2 screen.blit(nazwa, [nazwa_x, 80]) start = font.render("Start Gry", True, BLACK) sterowanie = font.render("Sterowanie", True, BLACK) poziom_trudnosci = font.render("Poziom Trudności", True, BLACK) dzwiek = font.render("Dźwięk", True, BLACK) wyjscie = font.render("Wyjście", True, BLACK) if option == 0: start = font.render("Start Gry", True, BLUE) elif option == 1: sterowanie = font.render("Sterowanie", True, BLUE) elif option == 2: if difficulty == 0: poziom_trudnosci = font.render("Poziom Trudności: Łatwy", True, BLUE) elif difficulty == 1: poziom_trudnosci = font.render("Poziom Trudności: Średni", True, BLUE) else: poziom_trudnosci = font.render("Poziom Trudności: Trudny", True, BLUE) elif option == 3: if mute == -1: dzwiek = font.render("Dźwięk: włączony", True, BLUE) else: dzwiek = font.render("Dźwięk: wyłączony", True, BLUE) else: wyjscie = font.render("Wyjście", True, BLUE) start_rect = start.get_rect() start_x = SCREEN_WIDTH / 2 - start_rect.width / 2 screen.blit(start, [start_x, 170]) sterowanie_rect = sterowanie.get_rect() sterowanie_x = SCREEN_WIDTH / 2 - sterowanie_rect.width / 2 screen.blit(sterowanie, [sterowanie_x, 230]) poziom_trudnosci_rect = poziom_trudnosci.get_rect() poziom_trudnosci_x = SCREEN_WIDTH / 2 - poziom_trudnosci_rect.width / 2 screen.blit(poziom_trudnosci, [poziom_trudnosci_x, 290]) dzwiek_rect = dzwiek.get_rect() dzwiek_x = SCREEN_WIDTH / 2 - dzwiek_rect.width / 2 screen.blit(dzwiek, [dzwiek_x, 350]) wyjscie_rect = wyjscie.get_rect() wyjscie_x = SCREEN_WIDTH / 2 - wyjscie_rect.width / 2 screen.blit(wyjscie, [wyjscie_x, 410]) naw = font.render("Nawigacja strzałkami góra/dół", True, WHITE) naw_rect = naw.get_rect() naw_x = SCREEN_WIDTH / 2 - naw_rect.width / 2 screen.blit(naw, [naw_x, 480]) naw2 = font.render("Wybór/zmiana przyciskiem ENTER", True, WHITE) naw2_rect = naw2.get_rect() naw2_x = SCREEN_WIDTH / 2 - naw2_rect.width / 2 screen.blit(naw2, [naw2_x, 520]) clock.tick(60) pygame.display.flip() for enemy in current_level.enemy_list: if enemy.direction == "R": enemy.change_x = 1 + 0.5*difficulty else: enemy.change_x = -1 - 0.5*difficulty # Dźwięki pop = pygame.mixer.Sound("./dzwieki/pop.ogg") santa = pygame.mixer.Sound("./dzwieki/santa-ho-ho-ho-2.ogg") switch = pygame.mixer.Sound("./dzwieki/switch5.ogg") # Wyciszenie if mute == 1: pop.set_volume(0) santa.set_volume(0) switch.set_volume(0) # Zapisanie ustawień settings_file = open("settings.txt", "w") settings_file.writelines([str(difficulty)+"\n",str(mute)]) settings_file.close() # -------- Main Program Loop ----------- while not done: for event in pygame.event.get(): if event.type == pygame.QUIT: done = True if event.type == pygame.KEYDOWN: if event.key == pygame.K_LEFT: player.go_left() if event.key == pygame.K_RIGHT: player.go_right() if event.key == pygame.K_UP: player.jump() if event.key == pygame.K_SPACE: bubble = player.bubble() bubble.level = current_level active_sprite_list.add(bubble) bubble_list.add(bubble) if player.direction == "R": bubble.where_pop = bubble.rect.x + bubble.range + player.bonus - 50*difficulty else: bubble.where_pop = player.rect.x - bubble.range - player.bonus + 50*difficulty if event.type == pygame.KEYUP: if event.key == pygame.K_LEFT and player.change_x < 0: player.stop() if event.key == pygame.K_RIGHT and player.change_x > 0: player.stop() # Update the player and bubbles active_sprite_list.update() # Update items in the level current_level.update() # Zmiana poziomu przy przejściu przez drzwi if player.rect.left > SCREEN_WIDTH: current_level_no += 1 if current_level_no == 3: win_game = True done = True else: current_level = level_list[current_level_no] active_sprite_list = pygame.sprite.Group() player.level = current_level current_level.difficulty = difficulty for enemy in current_level.enemy_list: enemy.level = current_level player.rect.x = player.level.start[0] player.rect.y = player.level.start[1] active_sprite_list.add(player) bubble_list = pygame.sprite.Group() # Kiedy gracz wskoczy w podłogę, to pojawia się na suficie if player.rect.top > SCREEN_HEIGHT: player.rect.bottom = 0 # To samo dla pingwinków for enemy in current_level.enemy_list: if enemy.rect.top > SCREEN_HEIGHT: enemy.rect.bottom = 0 for bubble in bubble_list: # Wrogowie trafieni bąbelkiem enemy_hit_list = pygame.sprite.spritecollide(bubble, current_level.enemy_list, False) # Jak trafiliśmy wroga, to bąbelek znika if len(enemy_hit_list) > 0: bubble_list.remove(bubble) active_sprite_list.remove(bubble) for enemy in enemy_hit_list: enemy.bubble() wall_hit_list = pygame.sprite.spritecollide(bubble, current_level.wall_list, False) # Bąbelek pęka po zderzeniu ze ścianą if len(wall_hit_list) > 0: pop.play() bubble_list.remove(bubble) active_sprite_list.remove(bubble) # Bąbelek znika gdy za długo leci if ((player.direction == "R" and bubble.rect.x > bubble.where_pop) or (player.direction == "L" and bubble.rect.x + bubble.rect.width < bubble.where_pop)): pop.play() bubble_list.remove(bubble) active_sprite_list.remove(bubble) for enemy in current_level.enemy_inbubble_list: # Wydostanie się z bąbelka koniec = pygame.time.get_ticks() if koniec - enemy.poczatek > (7000 - 1000*difficulty): pop.play() enemy.unbubble() elif (player.rect.right > enemy.rect.left - 2) and (player.rect.left < enemy.rect.right + 2) and (player.rect.top < enemy.rect.bottom + 2) and (player.rect.bottom > enemy.rect.top - 2): pop.play() current_level.enemy_list.remove(enemy) current_level.enemy_inbubble_list.remove(enemy) gifts_collected = pygame.sprite.spritecollide(player,current_level.gift_list,True) for gift in gifts_collected: if gift.type == "Blue": santa.play() elif gift.type == "Green": player.bonus += 100 elif gift.type == "Red": player.speed += 2 lever_pushed = pygame.sprite.spritecollide(player, current_level.lever_list,False) if len(lever_pushed) > 0 and len(current_level.enemy_list) == 0: for candy in current_level.lever_list: if candy.pushed == False: switch.play() candy.image = pygame.image.load("./obrazki/RTSobject_10.png").convert() candy.image = candy.image.subsurface((12,16,41,30)) candy.image.set_colorkey(BLACK) candy.rect = candy.image.get_rect() candy.rect.x = current_level.candy_pushed[0] candy.rect.y = current_level.candy_pushed[1] candy.pushed = True current_level.door_list.empty() if player.dead: done = True game_over = True total_seconds = frame_count // 30 # Divide by 60 to get total minutes minutes = total_seconds // 60 # Use modulus (remainder) to get seconds seconds = total_seconds % 60 # Use python string formatting to format in leading zeros output_string = "Czas: {0:02}:{1:02}".format(minutes, seconds) current_level.draw(screen) active_sprite_list.draw(screen) # Blit to the screen text = timerfont.render(output_string, True, BLUE) screen.blit(text, [80, 10]) frame_count += 1 clock.tick(30) pygame.display.flip() quit = False while game_over and not quit: screen.fill(BG_BLUE) wall_list = pygame.sprite.Group() walls = [] seg_pion = int(SCREEN_HEIGHT/40) seg_poz = int(SCREEN_WIDTH/40) for i in range(seg_poz): walls.append([40, 40, 40*i, 0]) # sufit walls.append([40, 40, 40*i, SCREEN_HEIGHT - 40]) # podłoga for i in range(seg_pion): walls.append([40, 40, 0, 40*i]) # lewa ściana walls.append([40, 40, SCREEN_WIDTH - 40, 40*i]) # prawa ściana for platform in walls: wall = Platform(platform[0], platform[1], 1) wall.rect.x = platform[2] wall.rect.y = platform[3] wall_list.add(wall) wall_list.draw(screen) koniec = font.render("Koniec gry", True, BLUE) koniec_rect = koniec.get_rect() koniec_x = SCREEN_WIDTH / 2 - koniec_rect.width / 2 koniec_y = SCREEN_HEIGHT / 2 - koniec_rect.height / 2 screen.blit(koniec, [koniec_x, koniec_y]) powrot = font.render("Jeśli chcesz zagrać jeszcze raz, wciśnij SPACJĘ", True, WHITE) powrot_rect = powrot.get_rect() powrot_x = SCREEN_WIDTH / 2 - powrot_rect.width / 2 screen.blit(powrot, [powrot_x, 520]) pygame.display.flip() for event in pygame.event.get(): if event.type == pygame.QUIT: quit = True if event.type == pygame.KEYDOWN: if event.key == pygame.K_SPACE: main() quit = False current_score = [minutes,seconds] while win_game and not quit: screen.fill(BG_BLUE) wall_list = pygame.sprite.Group() walls = [] seg_pion = int(SCREEN_HEIGHT/40) seg_poz = int(SCREEN_WIDTH/40) for i in range(seg_poz): walls.append([40, 40, 40*i, 0]) # sufit walls.append([40, 40, 40*i, SCREEN_HEIGHT - 40]) # podłoga for i in range(seg_pion): walls.append([40, 40, 0, 40*i]) # lewa ściana walls.append([40, 40, SCREEN_WIDTH - 40, 40*i]) # prawa ściana for platform in walls: wall = Platform(platform[0], platform[1], 1) wall.rect.x = platform[2] wall.rect.y = platform[3] wall_list.add(wall) wall_list.draw(screen) grat = font.render("Gratulacje!", True, BLUE) grat_rect = grat.get_rect() grat_x = SCREEN_WIDTH / 2 - grat_rect.width / 2 grat_y = SCREEN_HEIGHT / 2 - grat_rect.height / 2 output_string = "{0:01}:{1:02}".format(minutes, seconds) czas = font.render("Twój czas to: " + output_string, True, BLUE) czas_rect = czas.get_rect() czas_x = SCREEN_WIDTH / 2 - czas_rect.width / 2 czas_y = SCREEN_HEIGHT / 2 - czas_rect.height / 2 + 40 if current_score[0] < high_score[0] or (current_score[0] == high_score[0] and current_score[1] < high_score[1]): high = font.render("Pobiłeś rekord!", True, BLUE) high_score_file = open("high_score.txt", "w") high_score_file.writelines([str(current_score[0])+"\n",str(current_score[1])]) high_score_file.close() else: high = font.render("Aktualny rekord to: " + str(high_score[0]) + ":" + str(high_score[1]), True, BLUE) high_rect = high.get_rect() high_x = SCREEN_WIDTH / 2 - high_rect.width / 2 high_y = SCREEN_HEIGHT / 2 - high_rect.height / 2 + 80 screen.blit(grat, [grat_x, grat_y]) screen.blit(czas, [czas_x, czas_y]) screen.blit(high, [high_x, high_y]) powrot = font.render("Jeśli chcesz zagrać jeszcze raz, wciśnij SPACJĘ", True, WHITE) powrot_rect = powrot.get_rect() powrot_x = SCREEN_WIDTH / 2 - powrot_rect.width / 2 screen.blit(powrot, [powrot_x, 520]) pygame.display.flip() for event in pygame.event.get(): if event.type == pygame.QUIT: quit = True if event.type == pygame.KEYDOWN: if event.key == pygame.K_SPACE: main() pygame.quit()
6,668
def get_arraytypes (): """pygame.sndarray.get_arraytypes (): return tuple Gets the array system types currently supported. Checks, which array system types are available and returns them as a tuple of strings. The values of the tuple can be used directly in the use_arraytype () method. If no supported array system could be found, None will be returned. """ vals = [] if __hasnumeric: vals.append ("numeric") if __hasnumpy: vals.append ("numpy") if len (vals) == 0: return None return tuple (vals)
6,669
def is_request_authentic(request, secret_token: bytes = conf.WEBHOOK_SECRET_TOKEN): """ Examine the given request object to determine if it was sent by an authorized source. :param request: Request object to examine for authenticity :type request: :class:`~chalice.app.Request` :param secret_token: Shared secret token used to create payload hash :type: :class:`~bytes` :return: Response object indicating whether or not the request is authentic :rtype: :class:`~lopper.response.Response` """ signature = request.headers.get('X-Hub-Signature') if not signature: return response.unauthorized('Missing "X-Hub-Signature" header') return auth.is_authentic(signature, request.raw_body, secret_token)
6,670
def verify_convenience_header(folder): """ Performs the actual checking of convenience header for specific folder. Checks that 1) The header even exists 2) That all includes in the header are sorted 3) That there are no duplicated includes 4) That all includes that should be in the header are actually present in the header 5) That there are no superfluous includes that should not be in the header """ global errors_found path = normalized_path(folder.path) assert path.startswith(source_path), '{} does not start with {}'.format(path, source_path) stripped_path = path[len(source_path) + 1:] path_pieces = stripped_path.split('/') if path == source_path: header_name = 'catch_all.hpp' else: header_name = 'catch_{}_all.hpp'.format('_'.join(path_pieces)) # 1) Does it exist? full_path = path + '/' + header_name if not os.path.isfile(full_path): errors_found = True print('Missing convenience header: {}'.format(full_path)) return file_incs = includes_from_file(path + '/' + header_name) # 2) Are the includes are sorted? if sorted(file_incs) != file_incs: errors_found = True print("'{}': Includes are not in sorted order!".format(header_name)) # 3) Are there no duplicates? duplicated = get_duplicates(file_incs) for duplicate in duplicated: errors_found = True print("'{}': Duplicated include: '{}'".format(header_name, duplicate)) target_includes = normalize_includes(collated_includes(path)) # Avoid requiring the convenience header to include itself target_includes = [x for x in target_includes if header_name not in x] # 4) Are all required headers present? file_incs_set = set(file_incs) for include in target_includes: if include not in file_incs_set: errors_found = True print("'{}': missing include '{}'".format(header_name, include)) # 5) Are there any superfluous headers? desired_set = set(target_includes) for include in file_incs: if include not in desired_set: errors_found = True print("'{}': superfluous include '{}'".format(header_name, include))
6,671
def circular_mask_string(centre_ra_dec_posns, aperture_radius="1arcmin"): """Get a mask string representing circular apertures about (x,y) tuples""" mask = '' if centre_ra_dec_posns is None: return mask for coords in centre_ra_dec_posns: mask += 'circle [ [ {x} , {y}] , {r} ]\n'.format( x=coords[0], y=coords[1], r=aperture_radius) return mask
6,672
def dataset_hdf5(dataset, tmp_path): """Make an HDF5 dataset and write it to disk.""" path = str(tmp_path / 'test.h5') dataset.write_hdf5(path, object_id_itemsize=10) return path
6,673
def _usage(): """Print command line usage.""" txt = "[INFO] Usage: %s ldtfile tsfile finalfile" %(sys.argv[0]) txt += " anomaly_gt_prefix climo_gt_prefix LSM yyyymmddhh" print(txt) print("[INFO] where:") print("[INFO] ldtfile: LDT parameter file with full lat/lon data") print("[INFO] tsfile: LVT 'TS' soil moisture anomaly file") print("[INFO] finalfile: LVT 'FINAL' soil moisture anomaly file") print("[INFO] anomaly_gt_prefix: prefix for new anomaly GeoTIFF files") print("[INFO] climo_gt_prefix: prefix for new climatology GeoTIFF files") print("[INFO] LSM: land surface model") print("[INFO] yyyymmddhh: Valid date and time (UTC)")
6,674
def test_exists_calculate_index( mocked_buckets_hash_map, ): # pylint: disable=redefined-outer-name """ GIVEN hash map with mocked _calculate_index and key WHEN exists is called with the key THEN _calculate_index is called with the key. """ mocked_buckets_hash_map._calculate_index.return_value = 0 key = "key 1" mocked_buckets_hash_map.exists(key) mocked_buckets_hash_map._calculate_index.assert_called_once_with(key)
6,675
def _make_indexable(iterable): """Ensure iterable supports indexing or convert to an indexable variant. Convert sparse matrices to csr and other non-indexable iterable to arrays. Let `None` and indexable objects (e.g. pandas dataframes) pass unchanged. Parameters ---------- iterable : {list, dataframe, array, sparse} or None Object to be converted to an indexable iterable. """ if sp.issparse(iterable): return iterable.tocsr() elif hasattr(iterable, "__getitem__") or hasattr(iterable, "iloc"): return iterable elif iterable is None: return iterable return np.array(iterable)
6,676
def get_all_readers(): """Get all the readers from the module.""" readers = [] for _, name in getmembers(sys.modules[__name__]): if isinstance(name, abc.ABCMeta) and name.__name__ != 'Reader': readers.append(name) return readers
6,677
def fib_for(n): """ Compute Fibonnaci sequence using a for loop Parameters ---------- n : integer the nth Fibonnaci number in the sequence Returns ------- the nth Fibonnaci number in the sequence """ res = [0, 1] for i in range(n-1): res.append(res[i] + res[i+1]) return res[n]
6,678
def command_factory(command): """A factory which returns functions for direct daemon communication. This factory will create a function which sends a payload to the daemon and returns the unpickled object which is returned by the daemon. Args: command (string): The type of payload this should be. This determines as what kind of instruction this will be interpreted by the daemon. Returns: function: The created function. """ def communicate(body={}, root_dir=None): """Communicate with the daemon. This function sends a payload to the daemon and returns the unpickled object sent by the daemon. Args: body (dir): Any other arguments that should be put into the payload. root_dir (str): The root directory in which we expect the daemon. We need this to connect to the daemons socket. Returns: function: The returned payload. """ client = connect_socket(root_dir) body['mode'] = command # Delete the func entry we use to call the correct function with argparse # as functions can't be pickled and this shouldn't be send to the daemon. if 'func' in body: del body['func'] data_string = pickle.dumps(body, -1) client.send(data_string) # Receive message, unpickle and return it response = receive_data(client) return response return communicate
6,679
def load_prepare_saif_data(threshold=0.25): """ Loads and prepares saif's data. Parameters ---------- threshold : float Only data with intensities equal to or above this threshold will be kept (range 0-1). Returns ------- DataFrame : pd.DataFrame Concatenated tweets with labels as a pandas DataFrame. """ files = get_saif_files() df = pd.concat([pd.read_csv(f, sep='\t', index_col=0, names=['tweet', 'emotion', 'intensity']) for f in files], axis=0) df = df[df['intensity'] >= threshold] df.drop('intensity', axis=1, inplace=True) return df
6,680
def nonmax_suppression(harris_resp, halfwidth=2): """ Takes a Harris response from an image, performs nonmax suppression, and outputs the x,y values of the corners in the image. :param harris_resp: Harris response for an image which is an array of the same shape as the original image. :param halfwidth: The size of the padding to use in building the window (matrix) for nonmax suppression. The window will have a total shape of (2*halfwidth+1, 2*halfwidth+1). :return: Tuple of x and y coordinates for the corners that were found from the Harris response after nonmax suppression. """ cornersx = [] cornersy = [] h, w = harris_resp.shape[:2] boxlength = 2*halfwidth + 1 for i in range(halfwidth, w-halfwidth-1): for j in range(halfwidth, h-halfwidth-1): matrix = np.zeros((boxlength, boxlength)) for k in range(-halfwidth, halfwidth+1): for l in range(-halfwidth, halfwidth+1): matrix[k+halfwidth, l+halfwidth] = harris_resp[i+k, j+l] if matrix[halfwidth, halfwidth] == 0: pass elif matrix[halfwidth, halfwidth] < np.amax(matrix): matrix[halfwidth, halfwidth] = 0 else: cornersx.append(j) cornersy.append(i) return cornersx, cornersy
6,681
def create_credit_request(course_key, provider_id, username): """ Initiate a request for credit from a credit provider. This will return the parameters that the user's browser will need to POST to the credit provider. It does NOT calculate the signature. Only users who are eligible for credit (have satisfied all credit requirements) are allowed to make requests. A provider can be configured either with *integration enabled* or not. If automatic integration is disabled, this method will simply return a URL to the credit provider and method set to "GET", so the student can visit the URL and request credit directly. No database record will be created to track these requests. If automatic integration *is* enabled, then this will also return the parameters that the user's browser will need to POST to the credit provider. These parameters will be digitally signed using a secret key shared with the credit provider. A database record will be created to track the request with a 32-character UUID. The returned dictionary can be used by the user's browser to send a POST request to the credit provider. If a pending request already exists, this function should return a request description with the same UUID. (Other parameters, such as the user's full name may be different than the original request). If a completed request (either accepted or rejected) already exists, this function will raise an exception. Users are not allowed to make additional requests once a request has been completed. Arguments: course_key (CourseKey): The identifier for the course. provider_id (str): The identifier of the credit provider. username (str): The user initiating the request. Returns: dict Raises: UserIsNotEligible: The user has not satisfied eligibility requirements for credit. CreditProviderNotConfigured: The credit provider has not been configured for this course. RequestAlreadyCompleted: The user has already submitted a request and received a response from the credit provider. Example Usage: >>> create_credit_request(course.id, "hogwarts", "ron") { "url": "https://credit.example.com/request", "method": "POST", "parameters": { "request_uuid": "557168d0f7664fe59097106c67c3f847", "timestamp": 1434631630, "course_org": "HogwartsX", "course_num": "Potions101", "course_run": "1T2015", "final_grade": "0.95", "user_username": "ron", "user_email": "ron@example.com", "user_full_name": "Ron Weasley", "user_mailing_address": "", "user_country": "US", "signature": "cRCNjkE4IzY+erIjRwOQCpRILgOvXx4q2qvx141BCqI=" } } """ try: user_eligibility = CreditEligibility.objects.select_related('course').get( username=username, course__course_key=course_key ) credit_course = user_eligibility.course credit_provider = CreditProvider.objects.get(provider_id=provider_id) except CreditEligibility.DoesNotExist: log.warning( 'User "%s" tried to initiate a request for credit in course "%s", ' 'but the user is not eligible for credit', username, course_key ) raise UserIsNotEligible # lint-amnesty, pylint: disable=raise-missing-from except CreditProvider.DoesNotExist: log.error('Credit provider with ID "%s" has not been configured.', provider_id) raise CreditProviderNotConfigured # lint-amnesty, pylint: disable=raise-missing-from # Check if we've enabled automatic integration with the credit # provider. If not, we'll show the user a link to a URL # where the user can request credit directly from the provider. # Note that we do NOT track these requests in our database, # since the state would always be "pending" (we never hear back). if not credit_provider.enable_integration: return { "url": credit_provider.provider_url, "method": "GET", "parameters": {} } else: # If automatic credit integration is enabled, then try # to retrieve the shared signature *before* creating the request. # That way, if there's a misconfiguration, we won't have requests # in our system that we know weren't sent to the provider. shared_secret_key = get_shared_secret_key(credit_provider.provider_id) check_keys_exist(shared_secret_key, credit_provider.provider_id) if isinstance(shared_secret_key, list): # if keys exist, and keys are stored as a list # then we know at least 1 is available for [0] shared_secret_key = [key for key in shared_secret_key if key][0] # Initiate a new request if one has not already been created credit_request, created = CreditRequest.objects.get_or_create( course=credit_course, provider=credit_provider, username=username, ) # Check whether we've already gotten a response for a request, # If so, we're not allowed to issue any further requests. # Skip checking the status if we know that we just created this record. if not created and credit_request.status != "pending": log.warning( ( 'Cannot initiate credit request because the request with UUID "%s" ' 'exists with status "%s"' ), credit_request.uuid, credit_request.status ) raise RequestAlreadyCompleted if created: credit_request.uuid = uuid.uuid4().hex # Retrieve user account and profile info user = User.objects.select_related('profile').get(username=username) # Retrieve the final grade from the eligibility table try: final_grade = CreditRequirementStatus.objects.get( username=username, requirement__namespace="grade", requirement__name="grade", requirement__course__course_key=course_key, status="satisfied" ).reason["final_grade"] # NOTE (CCB): Limiting the grade to seven characters is a hack for ASU. if len(str(final_grade)) > 7: final_grade = f'{final_grade:.5f}' else: final_grade = str(final_grade) except (CreditRequirementStatus.DoesNotExist, TypeError, KeyError): msg = 'Could not retrieve final grade from the credit eligibility table for ' \ 'user [{user_id}] in course [{course_key}].'.format(user_id=user.id, course_key=course_key) log.exception(msg) raise UserIsNotEligible(msg) # lint-amnesty, pylint: disable=raise-missing-from # Getting the students's enrollment date course_enrollment = CourseEnrollment.get_enrollment(user, course_key) enrollment_date = course_enrollment.created if course_enrollment else "" # Getting the student's course completion date completion_date = get_last_exam_completion_date(course_key, username) parameters = { "request_uuid": credit_request.uuid, "timestamp": to_timestamp(datetime.datetime.now(pytz.UTC)), "course_org": course_key.org, "course_num": course_key.course, "course_run": course_key.run, "enrollment_timestamp": to_timestamp(enrollment_date) if enrollment_date else "", "course_completion_timestamp": to_timestamp(completion_date) if completion_date else "", "final_grade": final_grade, "user_username": user.username, "user_email": user.email, "user_full_name": user.profile.name, "user_mailing_address": "", "user_country": ( user.profile.country.code if user.profile.country.code is not None else "" ), } credit_request.parameters = parameters credit_request.save() if created: log.info('Created new request for credit with UUID "%s"', credit_request.uuid) else: log.info( 'Updated request for credit with UUID "%s" so the user can re-issue the request', credit_request.uuid ) # Sign the parameters using a secret key we share with the credit provider. parameters["signature"] = signature(parameters, shared_secret_key) return { "url": credit_provider.provider_url, "method": "POST", "parameters": parameters }
6,682
def is_valid_project_root(project_root: pathlib.Path) -> bool: """Check if the project root is a valid trestle project root.""" if project_root is None or project_root == '' or len(project_root.parts) <= 0: return False trestle_dir = pathlib.Path.joinpath(project_root, const.TRESTLE_CONFIG_DIR) if trestle_dir.exists() and trestle_dir.is_dir(): return True return False
6,683
def make_2D_predictions_into_one_hot_4D(prediction_2D, dim): """ This method gets 2D prediction of shape (#batch, #kpts) and then returns 4D one_hot maps of shape (#batch, #kpts, #dim, #dim) """ # getting one_hot maps of predicted locations # one_hot_maps is of shape (#batch, #kpts, #dim * #dim) one_hot_Maps = get_one_hot_map(prediction_2D, dim) num_batch, num_kpt = prediction_2D.shape one_hot_Maps_4D = one_hot_Maps.reshape(num_batch, num_kpt, dim, dim) return one_hot_Maps_4D
6,684
def aszarr(path, verbose, remap, flip, host, output): """ Convert arbitrary dataset into Zarr dataset format. If OUTPUT is not specified, it will default to 'SOURCE.zarr' \f Args: path (str): path to the original dataset verbose (str, optional): how verbose should the logger behave output (str, optional): path to the destination """ # we know this is annoying, silence it logging.getLogger("tifffile").setLevel(logging.ERROR) # convert verbose level verbose = 2 if verbose > 2 else verbose level = {0: "WARNING", 1: "INFO", 2: "DEBUG"}.get(verbose) coloredlogs.install( level=level, fmt="%(asctime)s %(levelname)s %(message)s", datefmt="%H:%M:%S" ) # ensure we does not have ambiguous input src_path = os.path.abspath(path) logger.info("loading source dataset") show_trace = logger.getEffectiveLevel() <= logging.DEBUG ds = open_dataset(src_path, show_trace=show_trace) ds = _remap_and_flip(ds, remap, flip) # generate the output if output is None: parent, dname = os.path.split(src_path) dst_path = os.path.join(parent, f"{dname}.zarr") else: dst_path = output logger.info(f'converted dataset will save to "{dst_path}"') dump, overwrite = True, False if os.path.exists(dst_path): # output already exists, ask user what's next dump, overwrite = button_dialog( title="Zarr dataset exists", text="What should we do?", buttons=[ ("Skip", (False, None)), ("Update", (True, False)), ("Overwrite", (True, True)), ], ).run() else: dump, overwrite = True, False if dump: with get_client(address=host): ZarrDataset.dump(dst_path, ds, overwrite=overwrite) logger.info("complete zarr dataset conversion")
6,685
def checkwritable(val, depth=0): """Check whether a value is valid for writing to the database. If it is, returns nothing. If not, raises TypeError. This should embody the same logic as BSON.encode(val, check_keys=True). Why not just use that? Because the BSON class displays this weird runtime warning: couldn't encode - reloading python modules and trying again I don't know what that means, but it can't be good for me. """ if val is None: return if isinstance(val, (bool, int, float, str, bytes, ObjectId, datetime.datetime)): return if depth >= 8: raise TypeError('Database object cannot be recursive') if isinstance(val, (list, tuple)): for subval in val: checkwritable(subval, depth=depth+1) return if isinstance(val, dict): for (key, subval) in val.items(): if not isinstance(key, str): raise TypeError('Database dicts must have string keys: %s' % (key,)) if '.' in key: raise TypeError('Database dict keys must not contain period: %s' % (key,)) if key.startswith('$'): raise TypeError('Database dict keys must not start with dollar: %s' % (key,)) checkwritable(subval, depth=depth+1) return raise TypeError('Not a database type: %s' % (val,))
6,686
def delete_user_model(creds: PostgresCredentials, uid: str, model_id: UUID): """DB function used to delete user model from database Args: creds (PostgresCredentials): [description] uid (str): [description] model_id (UUID): [description] Returns: [type]: [description] """ with get_cursor(creds) as db: db.execute('DELETE FROM models WHERE model_id = %s AND username = %s', (model_id, uid))
6,687
def display_code_marginal_densities(codes, num_hist_bins, log_prob=False, ignore_vals=[], lines=True, overlaid=False, plot_title=""): """ Estimates the marginal density of coefficients of a code over some dataset Parameters ---------- codes : ndarray(float32, size=(D, s)) The codes for a dataset of size D. These are the vectors x for each sample from the dataset. The value s is the dimensionality of the code num_hist_bins : int The number of bins to use when we make a histogram estimate of the empirical density. log_prob : bool, optional Display probabilities on a logarithmic scale. Useful for most sparse codes. Default False. ignore_vals : list, optional A list of code values to ignore from the estimate. Default []. TODO: make this more flexible so this can ignore values in a certain range. lines : bool, optional If true, plot the binned counts using a line rather than bars. This can make it a lot easier to compare multiple datasets at once but can look kind of jagged if there aren't many samples overlaid : bool, optional If true, then make a single plot with the marginal densities all overlaid on top of eachother. This gets messy for more than a few coefficients. Alteratively, display the densities in their own separate plots. Default False. plot_title : str, optional The title of the plot. Default "" Returns ------- code_density_figs : list A list containing pyplot figures. Can be saved separately, or whatever from the calling function """ def filter_code_vals(scalar_code_vals): if len(ignore_vals) > 0: keep_these_inds = scalar_code_vals != ignore_vals[0] for i in range(1, len(ignore_vals)): keep_these_inds = np.logical_and(keep_these_inds, scalar_code_vals != ignore_vals[i]) return scalar_code_vals[keep_these_inds] else: return scalar_code_vals # TODO: get this going for convolutional codes if overlaid: # there's just a single plot fig = plt.figure(figsize=(15, 15)) fig.suptitle(plot_title, fontsize=15) ax = plt.subplot(1, 1, 1) blue=plt.get_cmap('Blues') cmap_indeces = np.linspace(0.25, 1.0, codes.shape[1]) histogram_min = np.min(codes) histogram_max = np.max(codes) histogram_bin_edges = np.linspace(histogram_min, histogram_max, num_hist_bins + 1) histogram_bin_centers = (histogram_bin_edges[:-1] + histogram_bin_edges[1:]) / 2 for de_idx in range(codes.shape[1]): code = filter_code_vals(codes[:, de_idx]) counts, _ = np.histogram(code, histogram_bin_edges) empirical_density = counts / np.sum(counts) if lines: ax.plot(histogram_bin_centers, empirical_density, color=blue(cmap_indeces[de_idx]), linewidth=2, label='Coeff idx ' + str(de_idx)) else: ax.bar(histogram_bin_centers, empirical_density, align='center', color=blue(cmap_indeces[de_idx]), width=histogram_bin_centers[1]-histogram_bin_centers[0], alpha=0.4, label='Coeff idx ' + str(de_idx)) ax.legend(fontsize=10) if log_prob: ax.set_yscale('log') de_figs = [fig] else: # every coefficient gets its own subplot max_de_per_fig = 20*20 # max 20x20 {d}ictionary {e}lements displayed assert np.sqrt(max_de_per_fig) % 1 == 0, 'please pick a square number' num_de = codes.shape[1] num_de_figs = int(np.ceil(num_de / max_de_per_fig)) # this determines how many dictionary elements are aranged in a square # grid within any given figure if num_de_figs > 1: de_per_fig = max_de_per_fig else: squares = [x**2 for x in range(1, int(np.sqrt(max_de_per_fig))+1)] de_per_fig = squares[bisect.bisect_left(squares, num_de)] plot_sidelength = int(np.sqrt(de_per_fig)) de_idx = 0 de_figs = [] for in_de_fig_idx in range(num_de_figs): fig = plt.figure(figsize=(15, 15)) fig.suptitle(plot_title + ', fig {} of {}'.format( in_de_fig_idx+1, num_de_figs), fontsize=15) subplot_grid = gridspec.GridSpec(plot_sidelength, plot_sidelength) fig_de_idx = de_idx % de_per_fig while fig_de_idx < de_per_fig and de_idx < num_de: if de_idx % 100 == 0: print('plotted', de_idx, 'of', num_de, 'code coefficients') ax = plt.Subplot(fig, subplot_grid[fig_de_idx]) code = filter_code_vals(codes[:, de_idx]) histogram_min = min(code) histogram_max = max(code) histogram_bin_edges = np.linspace(histogram_min, histogram_max, num_hist_bins + 1) histogram_bin_centers = (histogram_bin_edges[:-1] + histogram_bin_edges[1:]) / 2 counts, _ = np.histogram(code, histogram_bin_edges) empirical_density = counts / np.sum(counts) max_density = np.max(empirical_density) variance = np.var(code) hist_kurtosis = kurtosis(empirical_density, fisher=False) if lines: ax.plot(histogram_bin_centers, empirical_density, color='k', linewidth=1) else: ax.bar(histogram_bin_centers, empirical_density, align='center', color='k', width=histogram_bin_centers[1]-histogram_bin_centers[0]) ax.yaxis.set_major_formatter(FormatStrFormatter('%0.1f')) ax.xaxis.set_major_formatter(FormatStrFormatter('%0.1f')) ax.tick_params(axis='both', which='major', labelsize=5) if histogram_min < 0.: ax.set_xticks([histogram_min, 0., histogram_max]) else: ax.set_xticks([histogram_min, histogram_max]) ax.text(0.1, 0.75, 'K: {:.1f}'.format( hist_kurtosis), transform=ax.transAxes, color='g', fontsize=5) ax.text(0.95, 0.75, 'V: {:.1f}'.format( variance), transform=ax.transAxes, color='b', fontsize=5, horizontalalignment='right') ax.set_yticks([0., max_density]) ax.spines['right'].set_visible(False) ax.spines['top'].set_visible(False) if log_prob: ax.set_yscale('log') fig.add_subplot(ax) fig_de_idx += 1 de_idx += 1 subplot_grid.tight_layout(figure=fig, pad=3.25, w_pad=0.2, h_pad=0.2) de_figs.append(fig) return de_figs
6,688
def distance(a, b): """ """ dimensions = len(a) _sum = 0 for dimension in range(dimensions): difference_sq = (a[dimension] - b[dimension]) ** 2 _sum += difference_sq return sqrt(_sum)
6,689
def is_block(modules): """Check if is ResNet building block.""" if isinstance(modules, (BasicBlock, Bottleneck)): return True return False
6,690
def playlist_500_fixture(): """Load payload for playlist 500 and return it.""" return load_fixture("plex/playlist_500.xml")
6,691
def strip_output(nb): """strip the outputs from a notebook object""" nb.metadata.pop('signature', None) for cell in nb.cells: if 'outputs' in cell: cell['outputs'] = [] if 'prompt_number' in cell: cell['prompt_number'] = None return nb
6,692
async def on_ready(): """starts AVAX ticker""" print("joeBot have logged in as {0.user}".format(discord_bot)) TaskManager((AvaxTicker(),)).start()
6,693
def set_current_directory(path): """Open a context with specified current directory.""" curdir = os.path.abspath(os.path.curdir) os.chdir(os.path.abspath(path)) try: yield finally: os.chdir(curdir)
6,694
def get_neighbors_general(status: CachingDataStructure, key: tuple) -> list: """ Returns a list of tuples of all coordinates that are direct neighbors, meaning the index is valid and they are not KNOWN """ coords = [] for key in get_direct_neighbour_coords_general(key): if status.valid_index(*key) and not status[key]: # Not known coords.append(key) return coords
6,695
def transit_params(time): """ Dummy transit parameters for time series simulations Parameters ---------- time: sequence The time axis of the transit observation Returns ------- batman.transitmodel.TransitModel The transit model """ params = batman.TransitParams() params.t0 = 0. # time of inferior conjunction params.per = 5.7214742 # orbital period (days) params.a = 0.0558*q.AU.to(q.R_sun)*0.66 # semi-major axis (in units of stellar radii) params.inc = 89.8 # orbital inclination (in degrees) params.ecc = 0. # eccentricity params.w = 90. # longitude of periastron (in degrees) params.limb_dark = 'quadratic' # limb darkening profile to use params.u = [0.1, 0.1] # limb darkening coefficients params.rp = 0. # planet radius (placeholder) tmodel = batman.TransitModel(params, time) tmodel.teff = 3500 # effective temperature of the host star tmodel.logg = 5 # log surface gravity of the host star tmodel.feh = 0 # metallicity of the host star return tmodel
6,696
def actions(__INPUT): """ Regresamos una lista de los posibles movimientos de la matriz """ MOVIMIENTOS = [] m = eval(__INPUT) i = 0 while 0 not in m[i]: i += 1 # Espacio en blanco (#0) j = m[i].index(0); if i > 0: #ACCION MOVER ARRIBA m[i][j], m[i-1][j] = m[i-1][j], m[i][j]; MOVIMIENTOS.append(str(m)) m[i][j], m[i-1][j] = m[i-1][j], m[i][j]; if i < 3: # ACCION MOVER ABAJO m[i][j], m[i+1][j] = m[i+1][j], m[i][j] MOVIMIENTOS.append(str(m)) m[i][j], m[i+1][j] = m[i+1][j], m[i][j] if j > 0: # ACCION MOVER IZQUIERDA m[i][j], m[i][j-1] = m[i][j-1], m[i][j] MOVIMIENTOS.append(str(m)) m[i][j], m[i][j-1] = m[i][j-1], m[i][j] if j < 3: # ACCION MOVER DERECHA m[i][j], m[i][j+1] = m[i][j+1], m[i][j] MOVIMIENTOS.append(str(m)) m[i][j], m[i][j+1] = m[i][j+1], m[i][j] return MOVIMIENTOS
6,697
def get_massif_geom(massif: str) -> WKBElement: """process to get the massifs geometries: * go on the meteofrance bra website * then get the html "area" element * then convert it to fake GeoJSON (wrong coordinates) * then open it in qgis. * Select *all* the geom of the layer. * rotate -90° * swap X and Y coordinates (with plugin) * use grass v.transform with various x, y scale and rotation until you get what you want. """ with resource_stream("nivo_api", "cli/data/all_massifs.geojson") as fp: gj = geojson.load(fp) for obj in gj.features: if obj.properties["label"].upper() == massif.upper(): return from_shape(shape(obj.geometry), 4326) else: raise ValueError(f"Massif {massif} geometry cannot be found.")
6,698
def _to_arrow(x): """Move data to arrow format""" if isinstance(x, cudf.DataFrame): return x.to_arrow() else: return pa.Table.from_pandas(x, preserve_index=False)
6,699