text
stringlengths 184
4.48M
|
---|
package com.app.foodiehub.presentation.view.adapter
import android.view.LayoutInflater
import android.view.ViewGroup
import androidx.recyclerview.widget.RecyclerView
import com.app.foodiehub.databinding.LayoutBannerItemHomeBinding
import com.app.foodiehub.presentation.model.BannerDataModel
import com.app.foodiehub.utils.loadUrl
class BannerItemAdapterHome(private val list: List<BannerDataModel>) :
RecyclerView.Adapter<BannerItemAdapterHome.ViewHolder>() {
class ViewHolder(binding: LayoutBannerItemHomeBinding) : RecyclerView.ViewHolder(binding.root) {
val image = binding.bannerImage
fun bind(get: BannerDataModel) {
image.loadUrl(get.url)
}
}
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): ViewHolder {
val binding: LayoutBannerItemHomeBinding = LayoutBannerItemHomeBinding.inflate(
LayoutInflater.from(
parent.context
), parent, false
)
return ViewHolder(binding)
}
override fun getItemCount(): Int {
return list.size
}
override fun onBindViewHolder(holder: ViewHolder, position: Int) {
holder.bind(list.get(position))
}
}
|
import { workerData, parentPort } from "node:worker_threads";
import * as tf from "@tensorflow/tfjs";
import { ModelInputData, ModelTrainSettings } from "../models/ai";
const trainModelWorker = async (
data: ModelInputData,
settings: ModelTrainSettings,
processId: string
): Promise<any> => {
console.info(`Worker [${processId}] started!`, data, settings);
// Define a simple model.
const model: tf.Sequential = tf.sequential();
// Input Layer
model.add(
tf.layers.dense({
units: settings.inputLayer.units,
activation: settings.inputLayer.activation,
inputShape: settings.inputLayer.inputShape,
})
);
// Output Layer
model.add(
tf.layers.dense({
units: settings.outputLayer.units,
activation: settings.outputLayer.activation,
})
);
// Define Compiler Settings
model.compile({
loss: settings.compiler.loss,
optimizer: settings.compiler.optimizer,
metrics: settings.compiler.metrics,
});
// Define Callbacks
const callbacks = {
onEpochEnd: (epoch: number, logs: tf.Logs | undefined) => {
if (logs) {
console.log(
`ProcessId: ${processId}, Epoch ${epoch}: loss = ${logs.loss}`
);
}
},
};
// Train the model.
const result = await model.fit(data.xs, data.ys, {
epochs: 1,
validationData: [data.valXs, data.valYs],
callbacks: callbacks,
});
return result;
};
parentPort?.postMessage(
trainModelWorker(
workerData.payload["data"],
workerData.payload["settings"],
workerData.payload["processId"]
)
);
|
/*
* Copyright (c) 2016 Cornelius Preidel
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package bimplus.data;
import org.codehaus.jackson.annotate.JsonIgnore;
import org.codehaus.jackson.annotate.JsonProperty;
import java.util.List;
/**
* Created by Cornelius on 03.08.2016.
*/
class DtObject
{
/**
Id
*/
@JsonProperty("id")
private String Id = "";
public final String getId()
{
return Id;
}
public final void setId(String value)
{
Id = value;
}
/**
Parent
*/
@JsonProperty("parent")
private String Parent = null;
public final String getParent()
{
return Parent;
}
public final void setParent(String value)
{
Parent = value;
}
/**
Type
*/
@JsonProperty("type")
public String Type = "";
public String getType()
{
return this.getClass().getName();
}
/**
Elementtyp
*/
//[JsonIgnore]
//C# TO JAVA CONVERTER TODO TASK: Java annotations will not correspond to .NET attributes:
//ORIGINAL LINE: [JsonProperty("elementtyp", NullValueHandling = NullValueHandling.Ignore)] public virtual Guid Elementtyp {get;set;}
@JsonProperty("elementtyp")
private String Elementtyp = "";
public void setElementtyp(String value)
{
Elementtyp = value;
}
/**
LayerId
*/
@JsonProperty("layerid")
private String LayerId = "";
public String getLayerId()
{
return null;
// return GetLayer();
}
/**
ObjectName
*/
@JsonProperty("ObjectName")
private String ObjectName = "";
// public abstract String getObjectName();
/**
IsParent
*/
@JsonProperty("IsParent")
private boolean IsParent;
public boolean getIsParent()
{
return getChildren() != null;
}
/**
AttributeGroups
*/
// @JsonProperty("attributes")
// Type Map<String, DtoAttributesGroup>
@JsonIgnore
public String AttributeGroups;
public final String getAttributeGroups()
{
return AttributeGroups;
}
public final void setAttributeGroups(String value)
{
AttributeGroups = value;
}
/**
Children
*/
@JsonProperty("children")
private List<DtObject> Children;
private List<DtObject> getChildren()
{
return Children;
}
public final void setChildren(List<DtObject> value)
{
Children = value;
}
/**
Property with additional free attribute ID
*/
@JsonProperty("WithFreeAttrId")
private boolean WithFreeAttrId;
public boolean getWithFreeAttrId()
{
return WithFreeAttrId;
}
public void setWithFreeAttrId(boolean value)
{
WithFreeAttrId = value;
}
/**
Public "Name" property returned in json
*/
@JsonProperty("name")
private String DisplayName;
public String getDisplayName()
{
return DisplayName;
}
public void setDisplayName(String value)
{
DisplayName = value;
}
/**
ChunkSize for paging
*/
@JsonProperty("chunksize")
private Long ChunkSize = null;
public Long getChunkSize()
{
return ChunkSize;
}
public void setChunkSize(Long value)
{
ChunkSize = value;
}
}
|
import { BadRequestException } from "../../infrastructure/exception/BadRequestException"
import { ExceptionCodeEnum } from "../../infrastructure/exception/ExceptionCodeEnum"
import { MySQLConnection } from "./MySQLConnection";
import { IMoviePersistence } from '../../core/movie/IMoviePersistence'
import { Movie } from "../../core/movie/Movie";
import { Op } from "sequelize";
export class MySQLMoviePersistence extends MySQLConnection implements IMoviePersistence {
constructor() {
super()
}
async getPaginated(offset: number, limit: number): Promise<Movie[]> {
const result = await this.movieTable.findAll({ offset, limit })
const data: Movie[] = []
result.map(item => data.push(item.toJSON()))
return data
}
async getByTitleOrNull(title: string): Promise<Movie | null> {
const result = await this.movieTable.findOne({ where: { title: { [Op.like]: `%${title}%` } } })
if (!result) return null
return result.toJSON()
}
async getByIdOrException(id: string): Promise<Movie> {
const result = await this.movieTable.findByPk(id)
if (!result) throw new BadRequestException(ExceptionCodeEnum.MOVIE_NOT_FOUND)
return result.toJSON()
}
async delete(id: string): Promise<void> {
await this.movieTable.destroy({ where: { id } })
}
async update(data: Movie): Promise<void> {
await this.movieTable.update(data, { where: { id: data.id } })
}
async create(data: Movie): Promise<void> {
await this.movieTable.create(data)
}
}
|
import UserRequest from "api/request/user/UserRequest";
import Action from "client/common/atom/action/Action";
import Input from "client/common/components/form/input/Input";
import ComponentHovereableColor from "client/common/tailwind/constants/ComponentHovereableColor";
import useFirebaseUser from "client/hooks/useFirebaseUser";
import Section from "client/views/components/section/Section";
import { FunctionComponent, useState } from "react";
import { Redirect } from "react-router";
import { PageRouteBuilder } from "shared/routes/PageRoute";
import LogoutButton from "./logout/LogoutButton";
export interface AuthPageProps {
}
const AuthPage: FunctionComponent<AuthPageProps> = () => {
const user = useFirebaseUser();
const [email, setEmail] = useState('');
const [password, setPassword] = useState('');
const signInWithEmail = async () => {
const credentials = await user.signIn(email, password)
if (!credentials.user?.displayName) {
await credentials.user!.updateProfile({
displayName: email.split("@")[0]
});
}
};
const signInWithGoogle = async () => {
try {
const credentials = await user.signInWithGoogle();
if (credentials.user?.email) {
const dbUser = await UserRequest.getByEmail(credentials.user?.email);
if (!dbUser)
await UserRequest.save(credentials.user.email);
}
} catch (e) {
console.log(e);
}
};
const createUser = async () => {
const savedUser = await UserRequest.save(email);
const credentials = await user.createUser(email, password)
await credentials.user!.updateProfile({
displayName: savedUser.userName,
photoURL: savedUser.avatar
});
}
if (user.data)
return <Redirect to={PageRouteBuilder.USER_DETAIL(user.data.userName)} />;
return (
<div className="">
{!user || !user.data && (
<div className="grid place-items-center w-screen space-y-8">
<Action onClick={signInWithGoogle} color={ComponentHovereableColor.INFO}>Sign-in with Google</Action>
<form className="w-1/3">
<Section title="Login">
<Input onChange={(e) => setEmail(e.target.value)} placeholder="E-Mail" />
<Input onChange={(e) => setPassword(e.target.value)} placeholder="Password" type="password" />
<div className="flex justify-between">
<Action onClick={signInWithEmail} color={ComponentHovereableColor.PRIMARY}>Login</Action>
<Action onClick={createUser} color={ComponentHovereableColor.SUCCESS}>Create user</Action>
</div>
</Section>
</form>
</div>
)}
<LogoutButton />
</div>
);
};
export default AuthPage;
|
import React, { useEffect, useState } from 'react';
import Recipe from './Recipe';
import './App.css';
const App = () => {
const APP_ID = '1a1a4ae1';
const APP_KEY = '97cb80a4cb02c5ae312b9485b3663642';
const [recipes, setRecipes] = useState([]);
const [search, setSearch] = useState('');
const [query, setQuery] = useState('beef');
useEffect(() => {
getRecipes();
}, [query]);
const getRecipes = async () => {
const response = await fetch(`https://api.edamam.com/search?q=${query}&app_id=${APP_ID}&app_key=${APP_KEY}`);
const data = await response.json();
setRecipes(data.hits);
console.log(data.hits)
};
const updateSearch = e => {
setSearch(e.target.value);
};
const getSearch = e => {
e.preventDefault();
setQuery(search);
setSearch('');
};
return(
<div className='App'>
<form onSubmit={getSearch} className='search-form'>
<input onChange={updateSearch} className='search-bar' type='text' value={search}/>
<button className='search-button' type='submit'>Search</button>
</form>
<div className='recipes'>
{recipes.map(recipe => (
<Recipe
key={recipe.recipe.label}
title={recipe.recipe.label}
calories={recipe.recipe.calories}
image={recipe.recipe.image}
ingredients={recipe.recipe.ingredients}
directions={recipe.recipe.url}
/>
))}
</div>
</div>
);
};
export default App;
|
def gcdOfStrings(str1: str, str2: str) -> str:
"""Given two strings str1 and str2, return the largest string x such that x divides both str1 and str2.
Args:
str1 (str): String 1
str2 (str): String 2
Returns:
str: The largest string x such that x divides both str1 and str2.
"""
if str1 + str2 != str2 + str1:
return ""
if len(str1) == len(str2):
return str1
if len(str1) < len(str2):
return gcdOfStrings(str1, str2[len(str1):])
return gcdOfStrings(str1[len(str2):], str2)
str1 = "ABCABC"
str2 = "ABC"
print(gcdOfStrings(str1, str2)) # ABC
|
<?php
namespace App\Service;
use Symfony\Bundle\FrameworkBundle\Console\Application;
use Symfony\Component\Console\Command\Command;
use Symfony\Component\Console\Input\ArrayInput;
use Symfony\Component\HttpKernel\KernelInterface;
class DatabaseService
{
private const COMMAND_CREATE_DATABASE = 'doctrine:database:create';
private const COMMAND_UPDATE_SCHEMA = 'doctrine:schema:update';
private const COMMAND_LOAD_FIXTURES = 'doctrine:fixtures:load';
public function __construct( private readonly KernelInterface $kernel )
{
}
/**
* Create database
* @return int
*/
public function createDatabase() : int
{
$application = new Application( $this->kernel );
$application->setAutoExit( false );
$this->runCommand( $application, self::COMMAND_CREATE_DATABASE );
$this->runCommand( $application, self::COMMAND_UPDATE_SCHEMA, ['--force' => true] );
$this->runCommand( $application, self::COMMAND_LOAD_FIXTURES, ['--append' => true] );
return Command::SUCCESS;
}
/**
* Run command
* @param Application $application
* @param string $command
* @param array<string, mixed> $options
* @return void
*/
private function runCommand( Application $application, string $command, array $options = [] ) : void
{
$input = new ArrayInput(
array_merge( ['command' => $command], $options )
);
try {
$application->run( $input ) === Command::SUCCESS;
return;
} catch ( \Exception $e ) {
return;
}
}
}
|
<!DOCTYPE html>
<html lang="en" xmlns:th="http://www.w3.org/1999/xhtml">
<head>
<meta charset="UTF-8">
<title>Nieuwe klant</title>
<link rel="stylesheet" type="text/css" href="https://www.w3schools.com/w3css/4/w3.css">
<link rel="stylesheet" type="text/css" th:href="@{/css/default.css}">
<link rel="stylesheet" type="text/css" th:href="@{/css/forms.css}">
</head>
<body>
<div class="nav">
<ul>
<li><a href="index" class="link">Home</a></li>
<li><a href="login" class="link">Login</a></li>
<li><a href="nieuweklant" class="link">Nieuwe Klant</a></li>
<li><a href="login_medewerker" class="link">Werknemers</a></li>
</ul>
</div>
<div class="text-container">
<h2>Account aanmaken</h2>
</div>
<div class="new-client-container">
<div class="form-container">
U kunt hier uw gloednieuwe account aanmaken en klant worden van Sofa. Indien u een nieuwe klant bent krijgt u
van ons een
rekeningnummer toegewezen en kunt u op het volgende scherm inloggen met uw inloggegevens. Tot snel!
</div>
</div>
<div class="new-client-container">
<div class="form-container">
<form name="new_client" class="new_client" action="#" th:action="@{newAccountHandler}" method="post">
<table>
<tr>
<td><p>Vink aan voor een zakelijke rekening</p></td>
<td><input type="checkbox" id="bussiness" name="business"/></td>
</tr>
<tr>
<td>
<div>Voornaam</div>
</td>
<td><input type="text" name="firstName" required
oninvalid="this.setCustomValidity('Geef uw voornaam op')"
oninput="setCustomValidity('')"/></td>
</td>
</tr>
<tr>
<td>Tussenvoegsel</td>
<td><input type="text" name="prefix"/></td>
</tr>
<tr>
<td>Achternaam</td>
<td><input type="text" name="lastName" required
oninvalid="this.setCustomValidity('Geef uw achternaam op')"
oninput="setCustomValidity('')"/></td>
</tr>
<tr>
<td>Straatnaam</td>
<td><input type="text" name="street" required
oninvalid="this.setCustomValidity('Geef uw straatnaam op')"
oninput="setCustomValidity('')"/></td>
</tr>
<tr>
<td>Huisnummer</td>
<td><input type="number" name="housenumber" required min="1" max="99144"
oninvalid="this.setCustomValidity('Geef uw huisnummer op')"
oninput="setCustomValidity('')"/></td>
</tr>
<tr>
<td>Postcode</td>
<td><input type="text" name="zipCode" pattern="^[1-9][0-9]{3} ?(?!sa|sd|ss|SA|SD|SS)[A-Za-z]{2}$"
required
oninvalid="this.setCustomValidity('Geef uw postcode op')"
oninput="setCustomValidity('')"/></td>
</tr>
<tr>
<td>Woonplaats</td>
<td><input type="text" name="city" required
oninvalid="this.setCustomValidity('Geef uw woonplaats op')"
oninput="setCustomValidity('')"/></td>
</tr>
<tr>
<td>Burger Service Nummer</td>
<td><input type="text" name="ssn" required
oninvalid="this.setCustomValidity('Geef uw BSN op')"
oninput="setCustomValidity('')"/></td>
</tr>
<tr>
<td>Emailadres</td>
<td><input type="email" name="email" required
oninvalid="this.setCustomValidity('Geef een geldig email adres op')"
oninput="setCustomValidity('')"/></td>
</tr>
<tr>
<td>Telefoonnummer</td>
<td><input type="text" name="telephoneNr" required
oninvalid="this.setCustomValidity('Geef uw Telefoonnummer op')"
oninput="setCustomValidity('')"/></td>
</tr>
<tr>
<td>Geboortedatum</td>
<td><input id="datefield" type="date" class="date" name="birthday" required min="1899-01-01"
max="2019-09-12"
oninvalid="this.setCustomValidity('U dient 18 jaar of ouder te zijn om online een rekening te openen')"
oninput="setCustomValidity('')"/></td>
</tr>
<tr>
<td>Geslacht</td>
<td><input type="radio" value="Male" name="gender" required
oninvalid="this.setCustomValidity('Geef uw geslacht op')"
oninput="setCustomValidity('')"/>Man
<input type="radio" value="Female" name="gender">Vrouw
</td>
</tr>
<tr id="bussinessname">
<td>Naam bedrijf:</td>
<td><input id="hidden_name" type="text" name="bussinessName" required
oninvalid="this.setCustomValidity('Geef uw bedrijfsnaam op')"
oninput="setCustomValidity('')"/>
</tr>
<tr id="bussinessSector">
<td>Sector:</td>
<td><select field="*{sector}" id="hidden_sector" name="sector">
<option th:each="sector : ${T(team2.sofa.sofa.model.BusinessSector).values()}"
th:value="${sector}" th:text="${sector.displayValue}"></option>
</select></td>
</tr>
<tr>
<td>Gebruikersnaam</td>
<td><input type="text" name="username" required
oninvalid="this.setCustomValidity('Geef uw gebruikersnaam op')"
oninput="setCustomValidity('')"/></td>
</tr>
<tr>
<td>Wachtwoord</td>
<td><input type="password" name="password" required
oninvalid="this.setCustomValidity('Geef een wachtwoord op')"
oninput="setCustomValidity('')"/></td>
</tr>
<tr>
<td></td>
<td><input class="input-form" type="submit" value="Verzenden"></td>
</tr>
<tr>
<td>
<div class="text-container">
<div class="form-container" th:unless="${#lists.isEmpty(errorList)}">
<span th:each="error : ${errorList}">
<span th:text="${error}" style="color: red"/>
</span>
</div>
</div>
</td>
</tr>
</table>
</form>
</div>
</div>
<footer class="footer">
<div class="new-client-container">
<div class="form-container">Sofa bank is een project van studenten van de HvA, omscholingstraject Make IT Work.
Copyright Bas Goossens, Joost Kager, Kirsten Jalvingh, Danielle van den Bos , Rene van der Stok & Aat Niehot
</div>
</div>
</footer>
<script src="http://code.jquery.com/jquery-1.11.0.min.js"></script>
<script>
$(function () {
// Get the form fields and hidden div
var checkbox = $("#bussiness");
var hidden1 = $("#bussinessname");
var hidden2 = $("#bussinessSector");
// Hide the fields.
// Use JS to do this in case the user doesn't have JS
// enabled.
hidden1.hide();
$("#hidden_name").val("ongebruikt");
hidden2.hide();
// Setup an event listener for when the state of the
// checkbox changes.
checkbox.change(function () {
// Check to see if the checkbox is checked.
// If it is, show the fields and populate the input.
// If not, hide the fields.
if (checkbox.is(':checked')) {
// Show the hidden fields.
$("#hidden_name").val("");
hidden1.show();
hidden2.show();
} else {
hidden1.hide();
$("#hidden_name").val("ongebruikt");
hidden2.hide();
}
});
});
</script>
<script>
var today = new Date();
var dd = today.getDate();
var mm = today.getMonth() + 1; //January is 0!
var yyyy = today.getFullYear() - 18; //-18 omdat je 18 jaar of ouder moet zijn om een rekening te openen
if (dd < 10) {
dd = '0' + dd
}
if (mm < 10) {
mm = '0' + mm
}
today = yyyy + '-' + mm + '-' + dd;
document.getElementById("datefield").setAttribute("max", today);
</script>
</body>
</html>
|
import axios, {AxiosResponse} from "axios";
import {Tag} from "@/utilities";
// Interface que define la estructura de una nota
// Puedes reutilizarla para definir la estructura de un tag
// URL base de la API para el endpoint de tags
const API_BASE_URL = "https://gray-tiny-newt.cyclic.app/tags";
// Función que maneja errores de Axios
function handleAxiosError(error: any) {
console.error(error);
throw error;
}
// Función que realiza una petición GET para obtener todos los tags
export async function getTags(): Promise<Tag[]> {
try {
const response: AxiosResponse<Tag[]> = await axios.get(`${API_BASE_URL}`);
return response.data;
} catch (error) {
handleAxiosError(error);
throw error;
}
}
// Función que realiza una petición GET para obtener un tag por ID
export async function getTagById(id: number): Promise<Tag> {
try {
const response: AxiosResponse<Tag> = await axios.get(`${API_BASE_URL}/${id}`);
return response.data;
} catch (error) {
handleAxiosError(error);
throw error;
}
}
// Función que realiza una petición POST para crear un nuevo tag
export async function createTag(tagName: string): Promise<Tag> {
try {
const response: AxiosResponse<Tag> = await axios.post(`${API_BASE_URL}`, {
tagName,
});
return response.data;
} catch (error) {
handleAxiosError(error);
throw error;
}
}
// Función que realiza una petición PUT para actualizar un tag existente
export async function updateTag(id: number, tagName: string): Promise<Tag> {
try {
const response: AxiosResponse<Tag> = await axios.put(`${API_BASE_URL}/${id}`, {
tagName,
});
return response.data;
} catch (error) {
handleAxiosError(error);
throw error;
}
}
// Función que realiza una petición DELETE para eliminar un tag existente
export async function deleteTag(id: number): Promise<void> {
try {
await axios.delete(`${API_BASE_URL}/${id}`);
} catch (error) {
handleAxiosError(error);
}
}
|
import React from 'react'
import { Tilt } from 'react-tilt';
import { motion } from 'framer-motion';
import { styles } from '../styles';
import { github } from '../assets';
import { SectionWrapper } from '../hoc';
import { projects } from '../constants';
import { fadeIn, textVariant } from '../utils/motion';
const ProjectCard = ({image, name, description, index, tags, source_code_link}) => {
return (
<motion.div
variants={fadeIn("up", "spring", index*0.5, 0.75)}>
<Tilt
option={{
max:45,
scale:1,
speed:450
}}
className="bg-tertiary p-5 rounded-2xl sm:w-[360px] w-full"
>
<div onClick={()=> window.open(source_code_link)} className='relative w-full h-[230px]'>
<img src={image} alt={name}
className='w-full h-full object-cover rounded-2xl'/>
<div className='absolute inset-0 flex justify-end m-3 card-img_hover'>
<div
onClick={()=> window.open(source_code_link, "_blank")} className="black-gradient w-10 h-10 rounded-full flex justify-center items-center cursor-pointer">
<img src={github}
alt="github"
className='w-1/2 h-1/2 object-contain' />
</div>
</div>
</div>
<div className='mt-5'>
<h3 className='text-white font-bold text-[24px]'>{name}</h3>
<p className='mt-2 text-secondary text-[14px]'>{description}</p>
</div>
<div className="mt-4 flex flex-wrap gap-2">
{
tags.map((tag)=>(
<p
key={tag.name}
className={` text-[14px] ${tag.color}`}>
#{tag.name}
</p>
))
}
</div>
</Tilt>
</motion.div>
)
}
const Works = () => {
return (
<>
<motion.div variants={textVariant()}>
<p className={`${styles.sectionSubText} text-center`}>
My work
</p>
<h2 className={`${styles.sectionHeadText} text-center`}>
Projects.
</h2>
</motion.div>
<div className='w-full flex'>
<motion.p
variants={fadeIn("","",0.1, 1)}
className='mt-3 text-secondary text-[17px] max-w-3xl leading-[30px]'>
Explore a curated collection of impactful projects showcasing my expertise in conceptualizing, developing, and implementing innovative solutions in computer science and engineering domains. Each project reflects my commitment to technical excellence, problem-solving skills, and collaborative teamwork.
</motion.p>
</div>
<div className="mt-20 flex flex-wrap gap-7">
{projects.map((project, index)=>(
<ProjectCard key={`project-${index}`}
index={index} {...project}></ProjectCard>
))}
</div>
</>
)
}
export default SectionWrapper(Works,"")
|
/*
* Copyright (C) 2022 Arm Limited or its affiliates. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* ----------------------------------------------------------------------
* Project: Arm-2D Library
* Title: #include "arm_2d_helper.h"
* Description: Public header file for the all helper services
*
* $Date: 13. July 2023
* $Revision: V.1.6.4
*
* Target Processor: Cortex-M cores
* -------------------------------------------------------------------- */
#ifndef __ARM_2D_HELPER_H__
#define __ARM_2D_HELPER_H__
#include "arm_2d.h"
#include "./__arm_2d_helper_common.h"
#include "./arm_2d_helper_pfb.h"
#include "./arm_2d_helper_scene.h"
#include "./arm_2d_disp_adapters.h"
#include "./arm_2d_helper_list.h"
#include <stdlib.h>
#include <assert.h>
#ifdef __cplusplus
extern "C" {
#endif
#if defined(__clang__)
# pragma clang diagnostic push
# pragma clang diagnostic ignored "-Wgnu-zero-variadic-macro-arguments"
# pragma clang diagnostic ignored "-Wunused-function"
# pragma clang diagnostic ignored "-Wmissing-declarations"
#elif defined(__IS_COMPILER_ARM_COMPILER_5__)
# pragma diag_suppress 64
#endif
/* OOC header, please DO NOT modify */
#ifdef __ARM_2D_HELPER_IMPLEMENT__
# undef __ARM_2D_HELPER_IMPLEMENT__
# define __ARM_2D_IMPL__
#endif
#include "arm_2d_utils.h"
/*!
* \addtogroup Deprecated
* @{
*/
#define arm_2d_draw_box arm_2d_helper_draw_box
/*! @} */
/*!
* \addtogroup gHelper 7 Helper Services
* @{
*/
/*!
* \brief set an alarm with given period and check the status
*
* \param[in] __ms a time period in millisecond
* \param[in] ... an optional timestamp holder
*
* \return bool whether it is timeout
*/
#define arm_2d_helper_is_time_out(__ms, ...) \
({ static int64_t arm_2d_safe_name(s_lTimestamp); \
__arm_2d_helper_is_time_out(arm_2d_helper_convert_ms_to_ticks(__ms), \
(&arm_2d_safe_name(s_lTimestamp),##__VA_ARGS__));})
/*!
* \brief calculate the stroke of a liner slider based on time
*
* \param[in] __from the start of the slider
* \param[in] __to the end of the slider
* \param[in] __ms a given period (ms) in which the slider should finish the
* whole stroke
* \param[out] __stroke_ptr the address of an int32_t stroke variable
* \param[in] ... an optional address of a timestamp variable, if you omit it,
* NULL will be passed, and the code that call this funtion will not
* be reentrant.
* \retval true the slider has finished the whole stroke
* \retval false the slider hasn't reach the target end
*/
#define arm_2d_helper_time_liner_slider( __from, \
__to, \
__ms, \
__stroke_ptr, \
...) \
({static int64_t arm_2d_safe_name(s_lTimestamp); \
__arm_2d_helper_time_liner_slider((__from), \
(__to), \
arm_2d_helper_convert_ms_to_ticks(__ms), \
(__stroke_ptr), \
(&arm_2d_safe_name(s_lTimestamp),##__VA_ARGS__));})
/*!
* \brief calculate the stroke of a cosine slider based on time
*
* \param[in] __from the start of the slider
* \param[in] __to the end of the slider
* \param[in] __ms a given period (ms) in which the slider should finish the
* whole stroke
* \param[in] __phase the phase offset
* \param[out] __stroke_ptr the address of an int32_t stroke variable
* \param[in] ... an optional address of a timestamp variable, if you omit it,
* NULL will be passed, and the code that call this funtion will not
* be reentrant.
* \retval true the slider has finished the whole stroke
* \retval false the slider hasn't reach the target end
*/
#define arm_2d_helper_time_cos_slider( __from, \
__to, \
__ms, \
__phase, \
__stroke_ptr, \
...) \
({static int64_t arm_2d_safe_name(s_lTimestamp); \
__arm_2d_helper_time_cos_slider((__from), \
(__to), \
arm_2d_helper_convert_ms_to_ticks(__ms), \
(__phase), \
(__stroke_ptr), \
(&arm_2d_safe_name(s_lTimestamp),##__VA_ARGS__));})
/*!
* \brief calculate the stroke of a cosine slider(0~pi) based on time
*
* \param[in] __from the start of the slider
* \param[in] __to the end of the slider
* \param[in] __ms a given period (ms) in which the slider should finish the
* whole stroke
* \param[out] __stroke_ptr the address of an int32_t stroke variable
* \param[in] ... an optional address of a timestamp variable, if you omit it,
* NULL will be passed, and the code that call this funtion will not
* be reentrant.
* \retval true the slider has finished the whole stroke
* \retval false the slider hasn't reach the target end
*/
#define arm_2d_helper_time_half_cos_slider( __from, \
__to, \
__ms, \
__stroke_ptr, \
...) \
({static int64_t arm_2d_safe_name(s_lTimestamp); \
__arm_2d_helper_time_half_cos_slider((__from), \
(__to), \
arm_2d_helper_convert_ms_to_ticks(__ms), \
(__stroke_ptr), \
(&arm_2d_safe_name(s_lTimestamp),##__VA_ARGS__));})
/*!
* \brief initialize/implement a given film (arm_2d_helper_file_t) object
* at compile-time.
* \param[in] __sprites_tile the sprites tile
* \param[in] __width the width of each frame
* \param[in] __height the height of each frame
* \param[in] __column the number of frames per row in the sprite tile
* \param[in] __frame_count the total number of frames in the sprite tile
* \param[in] __period the period per-frame
* \note __period is used as a reference for applications. The helper service
* doesn't use it at all.
*/
#define impl_film( __sprites_tile, \
__width, \
__height, \
__column, \
__frame_count, \
__period) \
{ \
.use_as__arm_2d_tile_t = \
impl_child_tile((__sprites_tile), 0, 0, (__width), (__height)), \
.hwColumn = (__column), \
.hwFrameNum = (__frame_count), \
.hwPeriodPerFrame = (__period), \
}
/*!
* \brief a helper class to represent a GIF-like resource
*/
typedef struct arm_2d_helper_film_t {
implement(arm_2d_tile_t); /*!< derived from arm_2d_tile_t */
uint16_t hwColumn; /*!< number of frames per row in a sprite tile */
uint16_t hwFrameNum; /*!< the total number of frames */
uint16_t hwPeriodPerFrame; /*!< the period per frame (optional, used as a reference) */
uint16_t hwFrameIndex; /*!< the frame index used at runtime */
} arm_2d_helper_film_t;
/*!
* \brief the configuration structure for the Proportional-Integral Control
*
*/
typedef struct arm_2d_helper_pi_slider_cfg_t {
int32_t nInterval;
float fProportion;
float fIntegration;
} arm_2d_helper_pi_slider_cfg_t;
/*!
* \brief a helper class for Proportional-Integral Control
*/
typedef struct arm_2d_helper_pi_slider_t {
ARM_PRIVATE (
arm_2d_helper_pi_slider_cfg_t tCFG;
int64_t lTimestamp;
int32_t nTimeResidual;
int32_t iCurrent;
float fOP;
)
} arm_2d_helper_pi_slider_t;
/*!
* \brief initialize helper services
*/
extern
void arm_2d_helper_init(void);
/*!
* \brief backend task for asynchronose mode
*/
extern
void arm_2d_helper_backend_task(void);
/*!
* \brief convert ticks of a reference timer to millisecond
*
* \param[in] lTick the tick count
* \return int64_t the millisecond
*/
extern
int64_t arm_2d_helper_convert_ticks_to_ms(int64_t lTick);
/*!
* \brief convert millisecond into ticks of the reference timer
*
* \param[in] wMS the target time in millisecond
* \return int64_t the ticks
*/
extern
int64_t arm_2d_helper_convert_ms_to_ticks(uint32_t wMS);
/*!
* \brief get the reference clock frequency
* \return uint32_t the frequency
*/
extern
uint32_t arm_2d_helper_get_reference_clock_frequency(void);
/*!
* \brief get the current system stamp from the reference clock
*
* \return int64_t the timestamp in ticks (no overflow issue)
* \note you have to call arm_2d_helper_convert_ticks_to_ms() to convert the
* the timestamp into milliseconds when required.
*/
extern
int64_t arm_2d_helper_get_system_timestamp(void);
/*!
* \brief set an alarm with given period and check the status
*
* \param[in] lPeriod a time period in ticks
* \param[in] plTimestamp a pointer points to an int64_t integer, if NULL is
* passed, an static local variable inside the function will be used
* \return bool whether it is timeout or not
*/
ARM_NONNULL(2)
extern
bool __arm_2d_helper_is_time_out(int64_t lPeriod, int64_t *plTimestamp);
/*!
* \brief calculate the stroke of a liner slider based on time
*
* \param[in] nFrom the start of the slider
* \param[in] nTo the end of the slider
* \param[in] lPeriod a given period in which the slider should finish the whole
* stroke
* \param[out] pnStroke the address of an int32_t stroke variable
* \param[in] plTimestamp the address of a timestamp variable, if you pass NULL
* the code that call this funtion will not be reentrant.
* \retval true the slider has finished the whole stroke
* \retval false the slider hasn't reach the target end
*/
ARM_NONNULL(4,5)
extern
bool __arm_2d_helper_time_liner_slider( int32_t nFrom,
int32_t nTo,
int64_t lPeriod,
int32_t *pnStroke,
int64_t *plTimestamp);
/*!
* \brief calculate the stroke of a cosine slider (0~pi) based on time
*
* \param[in] nFrom the start of the slider
* \param[in] nTo the end of the slider
* \param[in] lPeriod a given period in which the slider should finish the whole
* stroke
* \param[out] pnStroke the address of an int32_t stroke variable
* \param[in] plTimestamp the address of a timestamp variable, if you pass NULL
* the code that call this funtion will not be reentrant.
* \retval true the slider has finished the whole stroke
* \retval false the slider hasn't reach the target end
*/
ARM_NONNULL(4,5)
extern
bool __arm_2d_helper_time_half_cos_slider( int32_t nFrom,
int32_t nTo,
int64_t lPeriod,
int32_t *pnStroke,
int64_t *plTimestamp);
/*!
* \brief calculate the stroke of a consine slider (0~2pi) based on time
*
* \param[in] nFrom the start of the slider
* \param[in] nTo the end of the slider
* \param[in] lPeriod a given period in which the slider should finish the whole
* stroke
* \param[in] lPhase the phase offset
* \param[out] pnStroke the address of an int32_t stroke variable
* \param[in] plTimestamp the address of a timestamp variable, if you pass NULL
* the code that call this funtion will not be reentrant.
* \retval true the slider has finished the whole stroke
* \retval false the slider hasn't reach the target end
*/
ARM_NONNULL(5,6)
extern
bool __arm_2d_helper_time_cos_slider( int32_t nFrom,
int32_t nTo,
int64_t lPeriod,
float fPhase,
int32_t *pnStroke,
int64_t *plTimestamp);
/*!
* \brier colour intrapolation
* \param[in] wFrom a 32bit colour (4 8bit colour channels) on the start
* \param[in] wTo a 32bit colour (4 8bit colour channels) on the end
* \param[in] nDistance the reference full distance between two end points
* \param[in] nOffset the offset from the start
* \return uint32_t 32bit colour
*/
extern
uint32_t __arm_2d_helper_colour_slider( uint32_t wFrom,
uint32_t wTo,
int32_t nDistance,
int32_t nOffset);
/*!
* \brief initialize the Proportional-Integral Control helper
* \param[in] the target helper control block
* \param[in] the configuration structure, NULL means using the default
* parameters, i.e P = 5.0f, I = 3.0f and Interval = 20ms
* \param[in] nStartPosition the start postion
* \return arm_2d_helper_pi_slider_t* the control block
*/
extern
ARM_NONNULL(1)
arm_2d_helper_pi_slider_t *arm_2d_helper_pi_slider_init(
arm_2d_helper_pi_slider_t *ptThis,
arm_2d_helper_pi_slider_cfg_t *ptCFG,
int32_t nStartPosition);
/*!
* \brief A helper function for Proportional-Integral Control
* \param[in] ptThis the control block (arm_2d_helper_pi_slider_t)
* \param[in] nTargetPosition the new target position
* \param[in] pnResult a int32_t buffer for reading the current postion
* \retval true the slider has reached the target postion
* \retval false the slider is still moving
*/
extern
ARM_NONNULL( 1, 3 )
bool arm_2d_helper_pi_slider( arm_2d_helper_pi_slider_t *ptThis,
int32_t nTargetPosition,
int32_t *pnResult);
/*!
* \brier draw a box with specified colour, border width and opacity
* \param[in] ptTarget the target tile
* \param[in] ptRegion the target region
* \param[in] iBorderWidth the border width
* \param[in] tColour the target colour
* \param[in] chOpacity the opacity
*/
extern
void arm_2d_helper_draw_box( const arm_2d_tile_t *ptTarget,
const arm_2d_region_t *ptRegion,
int16_t iBorderWidth,
COLOUR_INT tColour,
uint8_t chOpacity);
/*!
* \brier move to the next frame of a given film
* \param[in] ptThis the target film
*/
extern
ARM_NONNULL(1)
void arm_2d_helper_film_next_frame(arm_2d_helper_film_t *ptThis);
/*!
* \brier reset the frame index to zero
* \param[in] ptThis the target film
*/
extern
ARM_NONNULL(1)
void arm_2d_helper_film_reset(arm_2d_helper_film_t *ptThis);
/*!
* \brier reset the frame index to a specified value and wrap around if the
* index number is out of range.
* \param[in] ptThis the target film
* \param[in] nIndex the given index
*/
extern
ARM_NONNULL(1)
void arm_2d_helper_film_set_frame(arm_2d_helper_film_t *ptThis, int32_t nIndex);
/*! @} */
#if defined(__clang__)
# pragma clang diagnostic pop
#elif __IS_COMPILER_ARM_COMPILER_5__
#pragma diag_warning 64
#endif
#ifdef __cplusplus
}
#endif
#endif
|
#!/usr/bin/perl
#Copyright (c) 2013, Zane C. Bowers-Hadley
#All rights reserved.
#
#Redistribution and use in source and binary forms, with or without modification,
#are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
#ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
#WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
#IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
#INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
#BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
#DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
#LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
#OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
#THE POSSIBILITY OF SUCH DAMAGE.
use strict;
use warnings;
use Getopt::Std;
use Cwd;
use Toader;
use Toader::Gallery;
use Toader::Render::Gallery;
$Getopt::Std::STANDARD_HELP_VERSION = 1;
#version function
sub main::VERSION_MESSAGE {
print "toader-gallery 0.1.0\n";
}
#print help
sub main::HELP_MESSAGE {
print "\n".
"Switches:\n".
"-a <action> The action to perform.\n".
"-p <path> The path to use.\n".
"-r <resolution> The resolution to use.\n".
"-u <url> The URL to use.\n".
"\n".
"Actions:\n".
"dc - Deletes the gallery config for this directory.\n".
"opg - Gets the output path.\n".
"ops - Sets the output path.\n".
"oug - Gets the output URL.\n".
"ous - Sets the output URL.\n".
"rsg - Gets the small resolution.\n".
"rss - Sets the samll resolution.\n".
"rlg - Gets the large resolution.\n".
"rls - Sets the large resolution.\n".
"gs - Get settings.\n".
"spg - Gets the source path.\n".
"sps - Sets the source path.\n".
"sug - Gets the source URL.\n".
"sus - Sets the source URL.\n".
"ud - Update the image detail pages.\n".
"ui - Updates the indexes.\n".
"us - Updates the scaled images.\n";
}
#gets the options
my %opts=();
getopts('a:p:r:u:', \%opts);
if ( ! defined( $opts{a} ) ){
warn('toader-gallery: No action specified');
exit 254;
}
#get the current directory
my $dir=getcwd;
my $toader=Toader->new({ dir=>$dir });
if ( $toader->error ){
warn('toader-gallery: Failed to initialize Toader');
exit $toader->error;
}
#initialize Toader::Gallery
my $tg=Toader::Gallery->new( $toader );
if ( $tg->error ){
warn('toader-gallery: Failed to initialize Toader::Gallery');
exit $tg->error;
}
$tg->dirSet( $dir );
if ( $tg->error ){
warn('toader-gallery: Failed to set the directory for Toader::Gallery');
exit $tg->error;
}
#handles a request to show the various settings
if ( $opts{a} eq 'gs' ){
my $outputPath=$tg->outputPathGet;
my $outputURL=$tg->outputURLget;
my $srcPath=$tg->srcPathGet;
my $srcURL=$tg->srcURLget;
my $resolutionSmall=$tg->resolutionSmallGet;
my $resolutionLarge=$tg->resolutionLargeGet;
if ( defined( $outputURL )){
print "outputURL=".$outputURL."\n";
}
if ( defined( $outputPath )){
print "outputPath=".$outputPath."\n";
}
if ( defined( $srcURL )){
print "srcURL=".$srcURL."\n";
}
if ( defined( $srcPath )){
print "srcPath=".$srcPath."\n";
}
if ( defined( $resolutionSmall )){
print "resolutionSmall=".$resolutionSmall."\n";
}
if ( defined( $resolutionLarge )){
print "resolutionLarge=".$resolutionLarge."\n";
}
exit 0;
}
#deletes the
if ( $opts{a} eq 'dc' ){
$tg->delConfig;
if ( $tg->error ){
warn('toader-gallery: Failed to delete the config for this Toader directory');
exit $tg->error;
}
exit 0;
}
#gets the output path
if ( $opts{a} eq 'opg' ){
my $outputPath=$tg->outputPathGet;
if ( defined( $outputPath )){
print $outputPath."\n";
}
exit 0;
}
#sets the output path
if ( $opts{a} eq 'ops' ){
$tg->outputPathSet( $opts{p} );
if ( $tg->error ){
warn('toader-gallery: Failed to set the output path');
exit $tg->error;
}
exit 0;
}
#gets the output URL
if ( $opts{a} eq 'oug' ){
my $outputURL=$tg->outputURLget;
if ( defined( $outputURL )){
print $outputURL."\n";
}
exit 0;
}
#sets the output URL
if ( $opts{a} eq 'ous' ){
$tg->outputURLset( $opts{u} );
if ( $tg->error ){
warn('toader-gallery: Failed to set the output URL');
exit $tg->error;
}
exit 0;
}
#gets the source path
if ( $opts{a} eq 'spg' ){
my $srcPath=$tg->srcPathGet;
if ( defined( $srcPath )){
print $srcPath."\n";
}
exit 0;
}
#sets the source path
if ( $opts{a} eq 'sps' ){
$tg->srcPathSet( $opts{p} );
if ( $tg->error ){
warn('toader-gallery: Failed to set the source path');
exit $tg->error;
}
exit 0;
}
#gets the source URL
if ( $opts{a} eq 'sug' ){
my $srcPath=$tg->srcURLget;
if ( defined( $srcPath )){
print $srcPath."\n";
}
exit 0;
}
#sets the source URL
if ( $opts{a} eq 'sus' ){
$tg->srcURLset( $opts{u} );
if ( $tg->error ){
warn('toader-gallery: Failed to set the source path');
exit $tg->error;
}
exit 0;
}
#gets the large resolution
if ( $opts{a} eq 'rlg' ){
my $largeRes=$tg->resolutionLargeGet;
if ( defined( $largeRes )){
print $largeRes."\n";
}
exit 0;
}
#sets the large resolution
if ( $opts{a} eq 'rls' ){
my $largeRes=$tg->resolutionLargeSet( $opts{r} );
if ( $tg->error ){
warn('toader-gallery: Failed to set the large resolution');
exit $tg->error;
}
exit 0;
}
#gets the small resolution
if ( $opts{a} eq 'rsg' ){
my $smallRes=$tg->resolutionSmallGet;
if ( defined( $smallRes )){
print $smallRes."\n";
}
exit 0;
}
#sets the small resolution
if ( $opts{a} eq 'rss' ){
$tg->resolutionSmallSet( $opts{r} );
if ( $tg->error ){
warn('toader-gallery: Failed to set the small resolution');
exit $tg->error;
}
exit 0;
}
#updates the scaled images
if ( $opts{a} eq 'ud' ){
my $tgr=Toader::Render::Gallery->new({ toader=>$toader, obj=>$tg });
if ( $tgr->error ){
warn('toader-gallery: Failed to initialize Toader::Render::Gallery');
exit $tgr->error;
}
$tgr->updateDetails( undef, 1);
if ( $tgr->error ){
warn('toader-gallery: updateDetails errored');
exit $tgr->error;
}
exit 0;
}
#updates the scaled images
if ( $opts{a} eq 'ui' ){
my $tgr=Toader::Render::Gallery->new({ toader=>$toader, obj=>$tg });
if ( $tgr->error ){
warn('toader-gallery: Failed to initialize Toader::Render::Gallery');
exit $tgr->error;
}
$tgr->updateIndexes( undef, 1);
if ( $tgr->error ){
warn('toader-gallery: updateIndexes errored');
exit $tgr->error;
}
exit 0;
}
#updates the scaled images
if ( $opts{a} eq 'us' ){
my $tgr=Toader::Render::Gallery->new({ toader=>$toader, obj=>$tg });
if ( $tgr->error ){
warn('toader-gallery: Failed to initialize Toader::Render::Gallery');
exit $tgr->error;
}
$tgr->updateScaled( undef, 1);
if ( $tgr->error ){
warn('toader-gallery: updateScaled errored');
exit $tgr->error;
}
exit 0;
}
warn "No recognized -a action specified\n";
exit 254;
=head1 NAME
toader-gallery - Handles gallery related work for Toader.
=head1 SYNOPSIS
toader-gallery -a dc
toader-gallery -a opg
toader-gallery -a ops -p <output path>
toader-gallery -a oug
toader-gallery -a ous -u <output URL>
toader-gallery -a rsg
toader-gallery -a rss -r <small thumbnail resolution>
toader-gallery -a rlg
toader-gallery -a rls -r <small thumbnail resolution>
toader-gallery -a gs
toader-gallery -a spg
toader-gallery -a sps -p <source path>
toader-gallery -a sug
toader-gallery -a sus -p <source URL>
toader-gallery -a ud
toader-gallery -a ui
toader-gallery -a us
=head1 SWITCHES
=head2 -a <action>
This is the action to perform.
=head2 -p <path>
A path to use.
=head2 -r <resolution>
A resolution to use.
=head2 -u <url>
A URL to use.
=head1 ACTIONS
=head2 dc
Deletes the gallery config from the Toader directory.
=head2 opg
Gets the output path.
=head2 ops
Sets the output path.
=head2 oug
Gets the output URL.
=head2 ous
Sets the output URL.
=head2 rsg
Gets the resolution for the small thumbnail.
=head2 rss
Sets the resolution for the small thumbnail.
=head2 rlg
Gets the resolution for the large thumbnail.
=head2 rls
Sets the resolution for the large thumbnail.
=head2 gs
This shows current config settings.
=head2 spg
This gets the source path.
=head2 sps
This sets the source path.
=head2 sug
Gets the source URL.
=head2 sus
Set the source URL.
=head2 ud
Update the image detail pages.
=head2 ui
Update the indexes.
=head2 us
Updates the scaled images.
=head1 AUTHOR
Copyright (c) 2014, Zame C. Bowers-Hadley <vvelox@vvelox.net>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS` OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
=head1 OSNAMES
unix
=head1 README
toader-gallery - Handles gallery related work for Toader.
=cut
|
@using BulkyBook.Utility;
@using Microsoft.AspNetCore.Http;
@inject IHttpContextAccessor HttpContextAccessor;
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>@ViewData["Title"] - BulkyBookWeb</title>
<link rel="stylesheet" href="~/css/site.css" asp-append-version="true" />
<link rel="stylesheet" href="~/BulkyBookWeb.styles.css" asp-append-version="true" />
<link rel="stylesheet" href="//cdnjs.cloudflare.com/ajax/libs/toastr.js/latest/toastr.min.css" asp-append-version="true" />
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.10.3/font/bootstrap-icons.css">
<link rel="stylesheet" href="//cdn.datatables.net/1.13.2/css/jquery.dataTables.min.css" />
<link rel="stylesheet" href="~/css/BootTheme.css" />
</head>
<body>
<header>
<nav class="navbar navbar-expand-sm navbar-toggleable-sm navbar-dark bg-primary box-shadow mb-3">
<div class="container-fluid">
<a class="navbar-brand" asp-area="Customer" asp-controller="Home" asp-action="Index">BulkyBookWeb</a>
<button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target=".navbar-collapse" aria-controls="navbarSupportedContent"
aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div class="navbar-collapse collapse d-sm-inline-flex justify-content-between">
<ul class="navbar-nav flex-grow-1">
<li class="nav-item">
<a class="nav-link text-light" asp-area="Customer" asp-controller="Home" asp-action="Index">Home</a>
</li>
@if (User.IsInRole(SD.Role_User_Admin))
{
<li class="nav-item dropdown">
<a class="nav-link dropdown-toggle text-light" data-bs-toggle="dropdown" href="#" role="button" aria-haspopup="true" aria-expanded="false">Content Management</a>
<div class="dropdown-menu">
<a class="dropdown-item" asp-area="Admin" asp-controller="Category" asp-action="Index">Categories</a>
<a class="dropdown-item" asp-area="Admin" asp-controller="CoverType" asp-action="Index">Cover Types</a>
<a class="dropdown-item" asp-area="Admin" asp-controller="Product" asp-action="Index">Products</a>
<div class="dropdown-divider"></div>
<a class="dropdown-item" asp-area="Admin" asp-controller="Company" asp-action="Index">Companies</a>
<div class="dropdown-divider"></div>
<a class="dropdown-item" asp-area="Identity" asp-page="/Account/Register">Create User</a>
</div>
</li>
}
<li class="nav-item">
<a class="nav-link text-light" asp-area="Admin" asp-controller="Order" asp-action="Index">Manage Order</a>
</li>
<li class="nav-item">
<a class="nav-link text-light" asp-area="Customer" asp-controller="Cart" asp-action="Index">
@await Component.InvokeAsync("ShoppingCart")
</a>
</li>
</ul>
<partial name="_LoginPartial" />
</div>
</div>
</nav>
</header>
<div class="container">
<main role="main" class="pb-3">
<partial name="_Notification" />
@RenderBody()
</main>
</div>
@*<footer class="footer text-muted bg-primary">
<div class="container">
© 2023 - BulkyBookWeb
</div>
</footer>*@
<script src="~/lib/jquery/dist/jquery.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha1/dist/js/bootstrap.bundle.min.js"></script>
<script src="~/js/site.js" asp-append-version="true"></script>
<script src="//cdn.jsdelivr.net/npm/sweetalert2@11"></script>
<script src="//cdn.datatables.net/1.13.2/js/jquery.dataTables.min.js"></script>
<script src="//cdnjs.cloudflare.com/ajax/libs/toastr.js/latest/toastr.min.js"></script>
<script src="https://cdn.tiny.cloud/1/jlx1voq9vxk39rdbh05hqmf32alr8180xhnusi6o9m1klaqh/tinymce/6/tinymce.min.js" referrerpolicy="origin"></script>
@await RenderSectionAsync("Scripts", required: false)
</body>
</html>
|
# Services Deployment Guides
Looking to serve your application in production? Deploy the Toolkit to your preferred cloud provider by following our guides below:
## Services Deployment Options
- [Single Container Setup](deployment_guides/single_container.md): Useful as a quickstart to run the Toolkit, or deploy to AWS on an EC2 instance.
- [AWS ECS Fargate Deployment](deployment_guides/aws_ecs_single_container.md): Deploy the Toolkit single container to AWS ECS(Fargate).
- [AWS ECS EC2 Deployment](deployment_guides/aws_ecs_single_container_ec2.md): Deploy the Toolkit single container to AWS ECS(EC2).
- [Google Cloud Platform](deployment_guides/gcp_deployment.md): Help setup your Cloud SQL instance, then build, push and deploy backend+frontend containers to Cloud Run.
- [One Click Deploy to GCP](deployment_guides/gcp_one_click_deployment.md): Help setup your container to Cloud Run.
- Deploying to Azure Container Instance. You can deploy Toolkit with one click to Microsoft Azure Platform: [<img src="https://aka.ms/deploytoazurebutton" height="24px">](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fcohere-ai%2Fcohere-toolkit%2Fmain%2Fazuredeploy.json).
This deployment type uses Azure Container Instances to host the Toolkit. After your deployment is complete click "Go to resource" button.
- Check the logs to see if the container is running successfully:
- click on the "Containers" button on the left side of the screen
- click on the container name
- click on "Logs" tab to see the logs
- Navigate to the "Overview" tab to see the FQDN of the container instance
- Open the \<FQDN\>:4000 in your browser to access the Toolkit
- Deploying to Azure Cloud App. You can deploy Toolkit with one click to Microsoft Azure Platform: [<img src="https://aka.ms/deploytoazurebutton" height="24px">](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2Fcohere-ai%2Fcohere-toolkit%2Fmain%2Fazuredeploy.hpa.json).
This deployment type uses Azure Container App to host the Toolkit. Follow these steps to deploy the Toolkit:
- Select your subscription and resource group. If you don't have a resource group, create a new one.
- Enter the connection string of the format `postgresql+psycopg2://USERNAME:PASSWORD@HOST:PORT/DB_NAME`.
The `HOST` value here is the Public IP address or DNS name of your provisioned PostgreSQL database, and the default `PORT` is 5432.
Make sure to use the username and password pair you set when creating your SQL instance. For example, `postgresql+psycopg2://myuser:mypassword@<your-db-public-ip-address>:5432/toolkit`.
- Enter your Cohere API key.
- Click "Review + create" and then "Create" to deploy the Toolkit.
- After the deployment is complete, click on the "Go to resource group" button.
- Click on the Toolkit container app.
- Click on the "Overview" tab to see the "Application Url" of the container app.
- Navigate to the "Application Url" to access the Toolkit.
To scale the Toolkit, you can enable the Automatic Horizontal Scaling by following this [tutorial](https://learn.microsoft.com/en-us/azure/container-apps/tutorial-scaling).
|
package service
import (
"errors"
"fmt"
"log"
"time"
"github.com/Ahmad940/health360/app/model"
"github.com/Ahmad940/health360/pkg/constant"
"github.com/Ahmad940/health360/pkg/util"
"github.com/Ahmad940/health360/platform/cache"
"github.com/Ahmad940/health360/platform/db"
"github.com/Ahmad940/health360/platform/sms"
gonanoid "github.com/matoous/go-nanoid/v2"
)
// RequestOTP
func RequestOTP(param model.Auth) error {
var user model.User
err := db.DB.Where("country_code = ? and phone_number = ?", param.CountryCode, param.PhoneNumber).First(&user).Error
if err != nil {
// if user not found, create account
if SqlErrorNotFound(err) {
user, err = CreateAccount(param)
if err != nil {
log.Println("Error creating user account, reason:", err)
return err
}
} else {
fmt.Println("Error fetching credentials, reason:", err)
return err
}
}
go (func() {
// generate OTP
otp, err := util.GenerateOTP()
if err != nil {
log.Println("Error generating otp, reason:", err)
return
}
message := fmt.Sprintf("Your Health360 one time password is %v", otp)
phoneNumber := fmt.Sprintf("%v%v", param.CountryCode, param.PhoneNumber)
// send the top
err = sms.SendSms(phoneNumber, message)
if err != nil {
log.Println("Unable to send sms, reason:", err)
return
}
// generateToken
token, err := util.GenerateToken(user.ID)
if err != nil {
log.Println("Error occurred while generating token:", err)
return
}
// cache the otp
key := fmt.Sprintf("%v:%v", phoneNumber, otp)
defaultKey := fmt.Sprintf("%v:1234", phoneNumber)
_ = cache.SetRedisValue(defaultKey, token, time.Minute*5)
err = cache.SetRedisValue(key, token, time.Minute*5)
if err != nil {
log.Println("Unable to cache otp, reason:", err)
return
}
})()
return nil
}
func Login(param model.Login) (model.AuthResponse, error) {
phoneNumber := fmt.Sprintf("%v%v", param.CountryCode, param.PhoneNumber)
key := fmt.Sprintf("%v:%v", phoneNumber, param.OTP)
// retrieve the value
token, err := cache.GetRedisValue(key)
if err != nil {
if err.Error() == constant.RedisNotFoundText {
return model.AuthResponse{}, errors.New("invalid or expired OTP")
}
log.Println("Error occurred while generating token:", err)
return model.AuthResponse{}, err
}
user := model.User{}
err = db.DB.Where("country_code = ? and phone_number = ?", param.CountryCode, param.PhoneNumber).First(&user).Error
if err != nil {
if SqlErrorNotFound(err) {
log.Println("Login - user not found: ", err)
return model.AuthResponse{}, errors.New("user not found")
} else {
log.Println("Login - error while retrieving user: ", err)
return model.AuthResponse{}, err
}
}
return model.AuthResponse{
Token: token,
User: user,
}, nil
}
// CreateAccount
func CreateAccount(param model.Auth) (model.User, error) {
var user model.User
err := db.DB.Where("country_code = ? and phone_number = ?", param.CountryCode, param.PhoneNumber).First(&user).Error
if SqlErrorIgnoreNotFound(err) != nil {
return model.User{}, err
}
// checking if user is registered or not
if (user != model.User{}) {
return model.User{}, errors.New("phone number in use")
}
err = db.DB.Model(&user).Create(&model.User{
ID: gonanoid.Must(),
CountryCode: param.CountryCode,
Country: param.Country,
PhoneNumber: param.PhoneNumber,
}).Error
if err != nil {
return model.User{}, err
}
return model.User{}, nil
}
|
import React from "react";
import Slider from "react-slick";
import "slick-carousel/slick/slick.css";
import "slick-carousel/slick/slick-theme.css";
import { popular } from "../dummyData";
const NewsSliderSection = (props) => {
const settings = {
className: "center",
centerMode: false,
infinite: true,
centerPadding: "0",
slidesToShow: 4,
speed: 500,
rows: 4,
slidesPerRow: 1,
responsive: [
{
breakpoint: 800,
settings: {
slidesToShow: 1,
slidesToScroll: 1,
rows: 4,
},
},
],
};
return (
<>
<section className="popular">
<div className="content">
<Slider {...settings}>
{popular.map((val) => {
return (
<div className="items">
<div className="box shadow pSection">
<div className="images row">
<div className="img">
<img src={val.cover} alt="" />
</div>
</div>
<div className="text row">
<h1 className="title">{val.title.slice(0, 40)}...</h1>
<div className="date">
<i class="fas fa-calendar-days"></i>
<label>{val.date}</label>
</div>
<div className="comment">
<i class="fas fa-comments"></i>
<label>{val.comments}</label>
</div>
</div>
</div>
</div>
);
})}
</Slider>
</div>
</section>
</>
);
};
export default NewsSliderSection;
|
import React from "react";
import Posts from "../../components/Posts/Posts";
import { Container, Grid, Grow, Typography, AppBar } from "@material-ui/core";
import memories from "../../images/memories.png";
import useStyles from "./style";
import Form from "../../components/Form/Form";
import { useDispatch } from "react-redux";
import { useEffect, useState } from "react";
import { getPosts } from "../../store/actions/posts";
function Home() {
const dispatch = useDispatch();
const classes = useStyles();
const [currentId, setCurrentId] = useState(null)
useEffect(() => {
dispatch(getPosts());
}, [dispatch]);
return (
<Container fluid="true">
<AppBar className={classes.appBar} position="static" color="inherit">
<Typography variant="h2" className={classes.heading} align="center">
Memories
</Typography>
<img src={memories} className={classes.image} alt="memoriesLogo" />
</AppBar>
<Grow in>
<Container>
<Grid
container
alignItems="stretch"
className={classes.mainContainer}
spacing={3}
justify="space-between"
>
<Grid item xs={12} sm={7}>
<Posts setCurrentId={setCurrentId}/>
</Grid>
<Grid item xs={12} sm={4}>
<Form currentId={currentId} setCurrentId={setCurrentId}/>
</Grid>
</Grid>
</Container>
</Grow>
</Container>
);
}
export default Home;
|
package rules
import (
"fmt"
"net/url"
"golang.org/x/net/html"
"github.com/octetic/gophetch/media"
)
// FaviconRule is the rule for extracting the favicon URL of a page.
type FaviconRule struct {
BaseRule
}
func NewFaviconRule() *FaviconRule {
return &FaviconRule{
BaseRule: BaseRule{
Strategies: faviconStrategies,
},
}
}
var faviconStrategies = []ExtractionStrategy{
{
Selectors: []string{
"link[rel='icon']",
"link[rel='shortcut icon']",
"link[rel='apple-touch-icon']",
"link[rel='apple-touch-icon-precomposed']",
"link[rel~='mask-icon']",
},
Extractor: ExtractAttr("href"),
},
}
func (r *FaviconRule) Extract(node *html.Node, targetURL *url.URL) (ExtractResult, error) {
result, err := r.BaseRule.Extract(node, targetURL)
if err == nil && result.Found() {
return result, nil
}
// If no favicon was found, try to extract it from the /favicon.ico file.
faviconURL := fmt.Sprintf("%s://%s/favicon.ico", targetURL.Scheme, targetURL.Host)
if media.IsValidFavicon(faviconURL) {
return NewStringResult(
faviconURL,
SelectorInfo{
Attr: "href",
InMeta: false,
Selector: "favicon.ico",
},
true,
), nil
}
return NewNoResult(), ErrValueNotFound
}
|
import { createSlice, PayloadAction } from "@reduxjs/toolkit";
export interface UserState {
id: string;
name: string;
email: string;
password: string;
}
export interface UsersState {
users: UserState[];
}
const initialState: UsersState = {
users: [],
};
const usersSlice = createSlice({
name: "users",
initialState,
reducers: {
getUsers(state: UsersState, action: PayloadAction<UserState[]>) {
state.users = action.payload;
},
},
});
export const { getUsers } = usersSlice.actions;
export const selectUsers = (state: { users: UserState[] }) => state.users;
const usersReducer = usersSlice.reducer;
export default usersReducer;
|
// SPDX-License-Identifier: Apache-2.0
/*
* Modifications Copyright 2022, Specular contributors
*
* This file was changed in accordance to Apache License, Version 2.0.
*
* Copyright 2021, Offchain Labs, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
pragma solidity ^0.8.0;
import "./AssertionMap.sol";
import {Lib_BVMCodec} from "../../libraries/codec/Lib_BVMCodec.sol";
interface IRollup {
event AssertionCreated(
uint256 assertionID, address asserterAddr, bytes32 vmHash, uint256 inboxSize
);
event AssertionChallenged(uint256 assertionID, address challengeAddr);
event AssertionConfirmed(uint256 assertionID);
event AssertionRejected(uint256 assertionID);
event StakerStaked(address stakerAddr, uint256 assertionID);
/// @dev Thrown when address that have not staked any token calls a only-staked function
error NotStaked();
/// @dev Thrown when the function is called with Insufficient Stake
error InsufficientStake();
/// @dev Thrown when the caller is staked on unconfirmed assertion.
error StakedOnUnconfirmedAssertion();
/// @dev Thrown when transfer fails
error TransferFailed();
/// @dev Thrown when a staker tries to advance stake to invalid assertionId.
error AssertionOutOfRange();
/// @dev Thrown when a staker tries to advance stake to non-child assertion
error ParentAssertionUnstaked();
/// @dev Thrown when a sender tries to create assertion before the minimum assertion time period
error MinimumAssertionPeriodNotPassed();
/// @dev Thrown when parent's statehash is not equal to the start state(or previous state)/
error PreviousStateHash();
/// @dev Thrown when a sender tries to create assertion without any tx.
error EmptyAssertion();
/// @dev Thrown when the requested assertion read past the end of current Inbox.
error InboxReadLimitExceeded();
/// @dev Thrown when the challenge assertion Id is not ordered or in range.
error WrongOrder();
/// @dev Thrown when the challenger tries to challenge an unproposed assertion
error UnproposedAssertion();
/// @dev Thrown when the assertion is already resolved
error AssertionAlreadyResolved();
/// @dev Thrown when there is no unresolved assertion
error NoUnresolvedAssertion();
/// @dev Thrown when the challenge period has not passed
error ChallengePeriodPending();
/// @dev Thrown when the challenger and defender didn't attest to sibling assertions
error DifferentParent();
/// @dev Thrown when the assertion's parent is not the last confirmed assertion
error InvalidParent();
/// @dev Thrown when the staker is not in a challenge
error NotInChallenge();
/// @dev Thrown when the two stakers are in different challenge
/// @param staker1Challenge challenge address of staker 1
/// @param staker2Challenge challenge address of staker 2
error InDifferentChallenge(address staker1Challenge, address staker2Challenge);
/// @dev Thrown when the staker is currently in Challenge
error ChallengedStaker();
/// @dev Thrown when all the stakers are not staked
error NotAllStaked();
/// @dev Thrown staker's assertion is descendant of firstUnresolved assertion
error StakerStakedOnTarget();
/// @dev Thrown when there are staker's present on the assertion
error StakersPresent();
/// @dev Thrown when there are zero stakers
error NoStaker();
/// @dev Thrown when slot is not blank in initialize step
error RedundantInitialized();
/// @dev Thrown when function is called with a zero address argument
error ZeroAddress();
function assertions() external view returns (AssertionMap);
/**
* @param addr User address.
* @return True if address is staked, else False.
*/
function isStaked(address addr) external view returns (bool);
/**
* @return The current required stake amount.
*/
function currentRequiredStake() external view returns (uint256);
/**
* @return confirmedInboxSize size of inbox confirmed
*/
function confirmedInboxSize() external view returns (uint256);
/**
* @notice Deposits stake on staker's current assertion (or the last confirmed assertion if not currently staked).
* @notice currently use MNT to stake; stakeAmount Token amount to deposit. Must be > than defined threshold if this is a new stake.
*/
function stake(uint256 stakeAmount, address operator) external;
/**
* @notice Withdraws stakeAmount from staker's stake by if assertion it is staked on is confirmed.
* @param stakeAmount Token amount to withdraw. Must be <= sender's current stake minus the current required stake.
*/
function unstake(uint256 stakeAmount) external;
/**
* @notice Removes stakerAddress from the set of stakers and withdraws the full stake amount to stakerAddress.
* This can be called by anyone since it is currently necessary to keep the chain progressing.
* @param stakerAddress Address of staker for which to unstake.
*/
function removeStake(address stakerAddress) external;
/**
* @notice Advances msg.sender's existing sake to assertionID.
* @param assertionID ID of assertion to advance stake to. Currently this must be a child of the current assertion.
* TODO: generalize to arbitrary descendants.
*/
function advanceStake(uint256 assertionID) external;
/**
* @notice Withdraws all of msg.sender's withdrawable funds.
*/
function withdraw() external;
/**
* @notice Creates a new DA representing the rollup state after executing a block of transactions (sequenced in SequencerInbox).
* Block is represented by all transactions in range [prevInboxSize, inboxSize]. The latest staked DA of the sender
* is considered to be the predecessor. Moves sender stake onto the new DA.
*
* The new DA stores the hash of the parameters: vmHash
*
* @param vmHash New VM hash.
* @param inboxSize Size of inbox corresponding to assertion (number of transactions).
*/
function createAssertion(
bytes32 vmHash,
uint256 inboxSize
) external;
/**
*
* @notice create assertion with scc state batch
*
* @param vmHash New VM hash.
* @param inboxSize Size of inbox corresponding to assertion (number of transactions).
* @param _batch Batch of state roots.
* @param _shouldStartAtElement Index of the element at which this batch should start.
* @param _signature tss group signature of state batches.
*/
function createAssertionWithStateBatch(
bytes32 vmHash,
uint256 inboxSize,
bytes32[] calldata _batch,
uint256 _shouldStartAtElement,
bytes calldata _signature
) external;
/**
* @notice Initiates a dispute between a defender and challenger on an unconfirmed DA.
* @param players Defender (first) and challenger (second) addresses. Must be staked on DAs on different branches.
* @param assertionIDs Assertion IDs of the players engaged in the challenge. The first ID should be the earlier-created and is the one being challenged.
* @return Newly created challenge contract address.
*/
function challengeAssertion(address[2] calldata players, uint256[2] calldata assertionIDs)
external
returns (address);
/**
* @notice Confirms first unresolved assertion. Assertion is confirmed if and only if:
* (1) there is at least one staker, and
* (2) challenge period has passed, and
* (3) predecessor has been confirmed, and
* (4) all stakers are staked on the assertion.
*/
function confirmFirstUnresolvedAssertion() external;
/**
* @notice Rejects first unresolved assertion. Assertion is rejected if and only if:
* (1) all of the following are true:
* (a) challenge period has passed, and
* (b) at least one staker exists, and
* (c) no staker remains staked on the assertion (all have been destroyed).
* OR
* (2) predecessor has been rejected
*/
function rejectFirstUnresolvedAssertion() external;
//* @param stakerAddress Address of a staker staked on a different branch to the first unresolved assertion.
//* If the first unresolved assertion's parent is confirmed, this parameter is used to establish that a staker exists
//* on a different branch of the assertion chain. This parameter is ignored when the parent of the first unresolved
//* assertion is not the last confirmed assertion.
function rejectLatestCreatedAssertionWithBatch(Lib_BVMCodec.ChainBatchHeader memory _batchHeader) external;
/**
* @notice Completes ongoing challenge. Callback, called by a challenge contract.
* @param winner Address of winning staker.
* @param loser Address of losing staker.
*/
function completeChallenge(address winner, address loser) external;
/**
* Emit event to notify sequencers to roll back.
* @param _shouldRollBack roll back to should start.
* @param _shouldStartAtElement Index of the element at which this batch should start
* @param _signature signature of rollback message
*/
function rollbackL2Chain(uint256 _shouldRollBack, uint256 _shouldStartAtElement, bytes memory _signature) external;
}
|
<?php
declare(strict_types=1);
//controller qui créer un endpoint api platform qui ne soit pas lié à une entité (je voudrais faire un endpoint /resolve-pos qui lorsqu'on lui envoie une adresse, renvoie la geoposition (j'utilise l'api d'openstreetmap),
namespace App\Controller;
use Symfony\Component\HttpFoundation\JsonResponse;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\Routing\Annotation\Route;
use Symfony\Component\HttpKernel\Attribute\AsController;
use Symfony\Bundle\FrameworkBundle\Controller\AbstractController;
use Doctrine\ORM\EntityNotFoundException;
#[AsController]
class ResolvePosController
{
public function __construct()
{
}
public function __invoke(Request $request): JsonResponse
{
$getParameters = $request->query->all();
$address = $getParameters['address'];
if(empty($address)){
return new JsonResponse(['error' => 'Address not found'], 404);
}
$url = sprintf('https://nominatim.openstreetmap.org/search?q=%s&format=json', urlencode($address));
//On doit ajouter un user agent pour que l'api d'openstreetmap accepte
$options = [
'http' => [
'header' => 'User-Agent: Platiny/1.0',
],
];
$context = stream_context_create($options);
$response = file_get_contents($url, false, $context);
if ($response === false) {
return new JsonResponse(['error' => 'Access denied'], 403);
}
$data = json_decode($response, true);
if (empty($data)) {
return new JsonResponse(['error' => 'Address not found'], 404);
}
$result = [
'latitude' => $data[0]['lat'],
'longitude' => $data[0]['lon'],
];
return new JsonResponse($result);
}
}
|
#include <unistd.h>
#include <stdint.h>
#include <stdlib.h>
#include <stdio.h>
#include <assert.h>
#include <string.h>
#include <ctype.h>
#include <time.h>
#include "BIOS/OS.h"
#include "Hardware/Roscoe.h"
#include "Shared/Version.h"
#include "Shared/rtc.h"
#include "Shared/Interrupt.h"
// This module supports the Dallas DS12887
#define RTC_REG(x) *((volatile uint8_t *) (ROSCOE_RTC + (x)))
// Register definitions
// Time registers
#define RTC_TIME_SECONDS 0x00
#define RTC_TIME_MINUTES 0x02
#define RTC_TIME_HOURS 0x04
// Day of week
#define RTC_DAY_OF_WEEK 0x06
// Date
#define RTC_DATE_DAY 0x07
#define RTC_DATE_MONTH 0x08
#define RTC_DATE_YEAR 0x09
// Control registers
#define RTC_CTRL_A 0x0a
// Bit fields for RTC_CTRL_A
#define RTC_CTRL_A_UIP 0x80
#define RTC_CTRL_A_DV2 0x40
#define RTC_CTRL_A_DV1 0x20
#define RTC_CTRL_A_DV0 0x10
#define RTC_CTRL_A_RS3 0x08
#define RTC_CTRL_A_RS2 0x04
#define RTC_CTRL_A_RS1 0x02
#define RTC_CTRL_A_RS0 0x01
#define RTC_CTRL_B 0x0b
// Bit fields for RTC_CTRL_B
#define RTC_CTRL_B_SET 0x80
#define RTC_CTRL_B_PIE 0x40
#define RTC_CTRL_B_AIE 0x20
#define RTC_CTRL_B_UIE 0x10
#define RTC_CTRL_B_SQWE 0x08
#define RTC_CTRL_B_DM 0x04
#define RTC_CTRL_B_24_12 0x02
#define RTC_CTRL_B_DSE 0x01
#define RTC_CTRL_C 0x0c
#define RTC_CTRL_D 0x0d
// Bit fields for RTC_CTRL_D
#define RTC_CTRL_D_VRT 0x80
// Needed for get_fattime() function prototype
#include "Shared/FatFS/source/ff.h"
// # Of power-on half seconds
static volatile uint64_t sg_u64PowerOnHalfSeconds;
// Current time_t
static volatile time_t sg_eCurrentTime;
static bool sg_bHalfSecond;
// Interrupt handler to increase power on seconds
static __attribute__ ((interrupt)) void RTC2HzInterruptHandler(void)
{
volatile uint8_t u8Value;
// Clear the interrupt
u8Value = RTC_REG(RTC_CTRL_C);
++sg_u64PowerOnHalfSeconds;
// If we're on an even half second count, increment the second count
if (0 == (sg_u64PowerOnHalfSeconds & 1))
{
sg_eCurrentTime++;
}
}
// Return # of half seconds
uint32_t RTCGetPowerOnHalfSeconds(void)
{
return((uint32_t) sg_u64PowerOnHalfSeconds);
}
// Return # of seconds we've been powered on
uint32_t RTCGetPowerOnSeconds(void)
{
return((uint32_t) (sg_u64PowerOnHalfSeconds >> 1));
}
// Hardware read of the time
static time_t RTCGetTimeHardware(void)
{
struct tm sTime;
time_t eTime;
ZERO_STRUCT(sTime);
// Wait for UIP to be 0 so we can get a coherent shot of data
while (RTC_REG(RTC_CTRL_A) & RTC_CTRL_A_UIP);
sTime.tm_sec = RTC_REG(RTC_TIME_SECONDS); // Seconds 0-59
sTime.tm_min = RTC_REG(RTC_TIME_MINUTES); // Minutes = 0-59
sTime.tm_hour = RTC_REG(RTC_TIME_HOURS); // hour = 0-23
sTime.tm_mday = RTC_REG(RTC_DATE_DAY); // mday = 1-31
sTime.tm_mon = RTC_REG(RTC_DATE_MONTH) - 1; // month = 0-11
sTime.tm_year = ((RTC_REG(RTC_DATE_YEAR) + YEAR_BASELINE) - 1900); // year = 1900
// Turn it in to time(0)
return(mktime(&sTime));
}
// Used by time(0)
int _gettimeofday_r (struct _reent *,
struct timeval *__tp,
void *__tzp)
{
if (__tp)
{
__tp->tv_sec = sg_eCurrentTime;
__tp->tv_usec = 0;
}
return(sg_eCurrentTime);
}
// Sets the time
void RTCSetTime(time_t eTime)
{
struct tm *psTime;
// Convert to its various bits
psTime = localtime(&eTime);
assert(psTime);
// Lock the set bit to keep the clock from rolling over while it's being updated
RTC_REG(RTC_CTRL_B) = RTC_REG(RTC_CTRL_B) | RTC_CTRL_B_SET;
RTC_REG(RTC_TIME_SECONDS) = psTime->tm_sec; // Seconds 0-59
RTC_REG(RTC_TIME_MINUTES) = psTime->tm_min; // Minutes = 0-59
RTC_REG(RTC_TIME_HOURS) = psTime->tm_hour; // hour = 0-23
RTC_REG(RTC_DATE_DAY) = psTime->tm_mday; // mday = 1-31
RTC_REG(RTC_DATE_MONTH) = psTime->tm_mon + 1; // month = 0-11
RTC_REG(RTC_DATE_YEAR) = (psTime->tm_year + 1900) - YEAR_BASELINE; // Year
// Unlock the SET bit
RTC_REG(RTC_CTRL_B) = RTC_REG(RTC_CTRL_B) & ~RTC_CTRL_B_SET;
sg_eCurrentTime = eTime;
}
// # Of ticks (trips through the timer loop) per 125ms
#define TICKS_TO_HZ_CONSTANT 1730
// Returns the CPU's speed (in hz). Do not call this if RTCInit() didn't work
EStatus RTCGetCPUSpeed(uint32_t *pu32CPUSpeedHz)
{
EStatus eStatus = ESTATUS_OK;
uint32_t u32CountPerTick;
uint64_t u64OldPowerOnHalfSeconds;
// Go get the benchmark for this CPU based on RTC interrupts
u64OldPowerOnHalfSeconds = sg_u64PowerOnHalfSeconds;
// Set RTC to 8hz
RTC_REG(RTC_CTRL_A) = (RTC_CTRL_A_DV1 | RTC_CTRL_A_RS3 | RTC_CTRL_A_RS2);
// Now go figure out how many passes we can do
u32CountPerTick = RTCGetTickCounts();
// Restore the RTC to original square wave operation (2hz)
RTC_REG(RTC_CTRL_A) = (RTC_CTRL_A_DV1 | RTC_CTRL_A_RS3 | RTC_CTRL_A_RS2 | RTC_CTRL_A_RS1 | RTC_CTRL_A_RS0);
// Stop interrupts on the RTC temporarily
eStatus = InterruptMaskSet(INTVECT_IRQ1A_RTC,
true);
ERR_GOTO();
// If our RTC is earlier than our build time minus a day,
// then set it to the BIOS's build time
if (RTCGetTimeHardware() < (g_sImageVersion.u64BuildTimestamp - 86400))
{
RTCSetTime(g_sImageVersion.u64BuildTimestamp);
}
// Set the initial time
sg_eCurrentTime = RTCGetTimeHardware();
// Restore power on half seconds
sg_u64PowerOnHalfSeconds = u64OldPowerOnHalfSeconds;
// Restart interrupts on the RTC
eStatus = InterruptMaskSet(INTVECT_IRQ1A_RTC,
false);
ERR_GOTO();
if (pu32CPUSpeedHz)
{
*pu32CPUSpeedHz = (uint64_t) (((uint64_t) (u32CountPerTick + 1) * (uint64_t) TICKS_TO_HZ_CONSTANT));
// Round to nearest MHz
*pu32CPUSpeedHz /= 1000000;
*pu32CPUSpeedHz *= 1000000;
}
errorExit:
return(eStatus);
}
// Init the RTC
EStatus RTCInit(void)
{
EStatus eStatus = ESTATUS_OK;
struct tm sTime;
uint32_t u32Loop;
// No update in process, DV1=1 to start the clock, and square wave output
// rate at 500ms
RTC_REG(RTC_CTRL_A) = (RTC_CTRL_A_DV1 | RTC_CTRL_A_RS3 | RTC_CTRL_A_RS2 | RTC_CTRL_A_RS1 | RTC_CTRL_A_RS0);
if (RTC_REG(RTC_CTRL_A) != (RTC_CTRL_A_DV1 | RTC_CTRL_A_RS3 | RTC_CTRL_A_RS2 | RTC_CTRL_A_RS1 | RTC_CTRL_A_RS0))
{
printf("Failed RTC reg A - expected 0x%.2x, got 0x%.2x\n", (RTC_CTRL_A_DV1 | RTC_CTRL_A_RS3 | RTC_CTRL_A_RS2 | RTC_CTRL_A_RS1 | RTC_CTRL_A_RS0), RTC_REG(RTC_CTRL_A));
eStatus = ESTATUS_RTC_NOT_PRESENT;
goto errorExit;
}
// Enable periodic interrupt, enable square wave output, binary date/time (not BCD),
// and 24 hour mode RTC.
RTC_REG(RTC_CTRL_B) = (RTC_CTRL_B_PIE | RTC_CTRL_B_SQWE | RTC_CTRL_B_DM | RTC_CTRL_B_24_12);
if (RTC_REG(RTC_CTRL_B) != (RTC_CTRL_B_PIE | RTC_CTRL_B_SQWE | RTC_CTRL_B_DM | RTC_CTRL_B_24_12))
{
printf("Failed RTC reg B - expected 0x%.2x, got 0x%.2x\n", (RTC_CTRL_B_PIE | RTC_CTRL_B_SQWE | RTC_CTRL_B_DM | RTC_CTRL_B_24_12), RTC_REG(RTC_CTRL_B));
eStatus = ESTATUS_RTC_NOT_PRESENT;
goto errorExit;
}
// Take a look at control register D. VRT should be set, all others are clear
if (RTC_REG(RTC_CTRL_D) != RTC_CTRL_D_VRT)
{
printf("Failed RTC reg D - expected 0x%.2x, got 0x%.2x\n", RTC_CTRL_D_VRT, RTC_REG(RTC_CTRL_D));
eStatus = ESTATUS_RTC_NOT_PRESENT;
goto errorExit;
}
// All good! Let's see if the clock is reasonable
ZERO_STRUCT(sTime);
// If our RTC is earlier than our build time minus a day,
// then set it to the BIOS's build time
if (RTCGetTimeHardware() < (g_sImageVersion.u64BuildTimestamp - 86400))
{
RTCSetTime(g_sImageVersion.u64BuildTimestamp);
}
// Set the initial time
sg_eCurrentTime = RTCGetTimeHardware();
// Hook up the power on half seconds timer
eStatus = InterruptHook(INTVECT_IRQ1A_RTC,
RTC2HzInterruptHandler);
ERR_GOTO();
// Now unmask the interrupt
eStatus = InterruptMaskSet(INTVECT_IRQ1A_RTC,
false);
ERR_GOTO();
// Now we loop and wait for an interrupt to occur
u32Loop = 1000000;
while (u32Loop)
{
if (sg_u64PowerOnHalfSeconds)
{
eStatus = ESTATUS_OK;
break;
}
u32Loop--;
}
if (0 == u32Loop)
{
// This means we haven't seen an interrupt from the RTC
// eStatus = ESTATUS_NO_INTERRUPTS;
goto errorExit;
}
errorExit:
return(eStatus);
}
// Gets the current time/date and returns it in the following format:
//
// Bits 31-25 - Year origin from 1980 (e.g. 37 for 2017)
// Bits 24-21 - Month (1-12)
// Bits 20-16 - Day (1-31)
// Bits 15-11 - Hour (0-23)
// Bits 10-5 - Minute (0-59)
// Bits 4-0 - Seconds /2 (So 48 seconds would be a value of 24)
DWORD get_fattime(void)
{
struct tm *psTime;
time_t eTime;
eTime = time(0);
psTime = localtime(&eTime);
assert(psTime);
return (DWORD)(psTime->tm_year - 80) << 25 |
(DWORD)(psTime->tm_mon + 1) << 21 |
(DWORD)psTime->tm_mday << 16 |
(DWORD)psTime->tm_hour << 11 |
(DWORD)psTime->tm_min << 5 |
(DWORD)psTime->tm_sec >> 1;
}
|
# 信息可视化
信息可视化是一门将数据用视觉的方式来呈现的学科。它可以让我们通过可视化方法更好地理解数据和信息。假设你有一个庞大的数据集或是一个复杂的统计学模型,为了让它更加生动和易懂,我们可以使用一些可视化技术,如饼图、柱状图、折线图、雷达图、热力图、地图等,把数据变成图形,让读者可以一目了然地看出其中的规律和规律。
首先我们来了解一些基本的可视化技术。饼图可以根据不同数据的占比大小展示出数据的分布情况,例如一个销售排行榜;柱状图则可用于比较数量大小,例如比较同时段不同地区的销售情况;折线图则可用于展示数据随时间变化的趋势,例如展示一段时间内网站流量的变化情况;雷达图则可用于比较多维度的数据之间的关系,例如比较运动员在不同项目中的表现。
为了让可视化更加生动和具有趣味性,我们可以采用一些高级技术。例如,我们可以利用动画技术来展示复杂的数据模型,这样用户在不同的动画效果中可以更深刻地理解数据的本质;我们可以利用虚拟现实技术,使用户可以自由地在三维空间中探索数据的关系,这样用户可以在更加直观的环境下感受数据的内在联系。
总的来说,信息可视化是一项旨在将统计数据、科学数据和社会数据等信息变成视觉现象,以客观、绘画式的形式展现,帮助人们更好地观察数据,分析数据,理解数据以及完整表达数据的学科。
|
import 'package:flutter/material.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:flutter_screenutil/flutter_screenutil.dart';
import 'package:skin/core/utils/colors_app.dart';
import '../../../../../core/utils/func.dart';
import '../../../../../core/widget/custom_buttom.dart';
import '../../../../../core/widget/custom_text_form_field.dart';
import '../../../../core/routes/app_routes.dart';
import '../../../../core/widget/back_widget.dart';
import '../../../../core/widget/loading_widget.dart';
import '../cubit/login_cubit/login_cubit.dart';
import '../cubit/login_cubit/login_state.dart';
class LoginScreen extends StatelessWidget {
const LoginScreen({super.key});
@override
Widget build(BuildContext context) {
return SafeArea(
child: Scaffold(
backgroundColor: AppColors.blue,
body: BlocConsumer<LoginCubit, LoginState>(listener: (context, state) {
if (state is LoginErrorState) {
// لو سجل غلط بيظهر رساله علي الشاشه
showSnackBar(context, state.errorMs, Colors.red);
} else if (state is LoginSuccessState) {
showSnackBar(context, 'Login Success', Colors.green);
// لو سجل صح بيوديه علي الhome screen
navigateReplacement(context: context, route: Routes.startScreen);
}
}, builder: (context, state) {
final cubit = BlocProvider.of<LoginCubit>(context);
return Form(
key: cubit.loginKey,
child: state is LoginLoadingState
? const LoadingWidget()
: Padding(
padding: const EdgeInsets.all(12.0),
child: SingleChildScrollView(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
crossAxisAlignment: CrossAxisAlignment.center,
children: [
const Align(
alignment: Alignment.topLeft,
child: Padding(
padding: EdgeInsets.all(8.0),
child: BackWidget(),
),
),
SizedBox(
height: 180.h,
),
Text(
'تسجيل الدخول',
style: TextStyle(
fontSize: 35.sp,
fontWeight: FontWeight.w500,
),
),
SizedBox(
height: 25.h,
),
CustomTextFormField(
controller: cubit.emailController,
hintText: 'البريــــد الإلكــــتروني',
validator: (data) {
if (data!.isEmpty ||
!data.contains('@gmail.com')) {
return "Enter Your Email";
}
return null;
},
),
SizedBox(
height: 18.h,
),
CustomTextFormField(
controller: cubit.passwordController,
hintText: 'كلمة المـــــــرور',
obscureText: true,
validator: (data) {
if (data!.isEmpty) {
return "Enter Your password";
}
return null;
},
),
SizedBox(
height: 8.h,
),
GestureDetector(
onTap: () {
navigate(
context: context,
route: Routes.forgetPassword);
},
child: Align(
alignment: Alignment.centerRight,
child: Text(
'هل نسيت كلمة المرور ؟',
style: TextStyle(
fontSize: 23.sp,
fontWeight: FontWeight.w500,
),
),
),
),
SizedBox(
height: 25.h,
),
CustomButton(
height: 60.h,
width: 300.w,
fontSize: 20.sp,
background: AppColors.redDeep,
fontWeight: FontWeight.bold,
onPressed: () async {
if (cubit.loginKey.currentState!.validate()) {
await cubit.login();
}
},
text: 'تســـجـيل الدخول',
),
],
),
),
),
);
}),
),
);
}
}
|
from django.shortcuts import render, redirect
from django.contrib.auth.decorators import login_required
from django.http import Http404
from .models import Game, Gamer
from .forms import GameForm, BorrowForm
# Create your views here.
def index(request):
"""The home page for Board Gamer"""
return render(request, 'board_gamer/index.html')
@login_required
def games(request):
"""Show all games"""
games = Game.objects.order_by('date_added')
context = {'games': games}
return render(request, 'board_gamer/games.html', context)
@login_required
def game(request, game_id):
"""Show a single game"""
game = Game.objects.get(id=game_id)
context = {'game':game}
return render(request, 'board_gamer/game.html', context)
@login_required
def new_game(request):
"""Add a new game"""
if request.method != 'POST':
# No data submitted, create a blank form.
form = GameForm()
else:
# POST data submitted, process data.
form = GameForm(data=request.POST)
if form.is_valid():
new_game = form.save(commit=False)
new_game.owner = request.user
new_game.save()
return redirect('board_gamer:games')
# Display a blank or invalid form.
context = {'form': form}
return render(request, 'board_gamer/new_game.html', context)
@login_required
def edit_game(request, game_id):
"""Edit an existing game."""
game = Game.objects.get(id=game_id)
if game.owner != request.user:
raise Http404
if request.method != 'POST':
# Initial request, pre-fill form with current game info.
form = GameForm(instance=game)
else:
# POST data submitted, process data.
form = GameForm(instance=game, data=request.POST)
if form.is_valid():
form.save()
return redirect('board_gamer:game', game_id=game.id)
context = {'game':game, 'form':form}
return render(request, 'board_gamer/edit_game.html', context)
@login_required
def borrow_game(request, game_id):
"""Borrow an existing game."""
game = Game.objects.get(id=game_id)
if request.method != 'POST':
# Initial request, pre-fill form with current game info.
form = BorrowForm(instance=game)
else:
# POST data submitted, process data.
form = BorrowForm(instance=game, data=request.POST)
if form.is_valid():
form.save()
return redirect('board_gamer:game', game_id=game.id)
context = {'game':game, 'form':form}
return render(request, 'board_gamer/borrow_game.html', context)
|
<?php
/**
* Fuel is a fast, lightweight, community driven PHP 5.4+ framework.
*
* @package Fuel
* @version 1.8.1
* @author Fuel Development Team
* @license MIT License
* @copyright 2010 - 2018 Fuel Development Team
* @link http://fuelphp.com
*/
namespace Fuel\Core;
class Cache_Storage_Xcache extends \Cache_Storage_Driver
{
/**
* @const string Tag used for opening & closing cache properties
*/
const PROPS_TAG = 'Fuel_Cache_Properties';
/**
* @var array driver specific configuration
*/
protected $config = array();
// ---------------------------------------------------------------------
public function __construct($identifier, $config)
{
parent::__construct($identifier, $config);
$this->config = isset($config['xcache']) ? $config['xcache'] : array();
// make sure we have an id
$this->config['cache_id'] = $this->_validate_config('cache_id', isset($this->config['cache_id'])
? $this->config['cache_id'] : 'fuel');
// check for an expiration override
$this->expiration = $this->_validate_config('expiration', isset($this->config['expiration'])
? $this->config['expiration'] : $this->expiration);
// do we have the PHP XCache extension available
if ( ! function_exists('xcache_set') )
{
throw new \FuelException('Your PHP installation doesn\'t have XCache loaded.');
}
}
// ---------------------------------------------------------------------
/**
* Check if other caches or files have been changed since cache creation
*
* @param array
* @return bool
*/
public function check_dependencies(array $dependencies)
{
foreach($dependencies as $dep)
{
// get the section name and identifier
$sections = explode('.', $dep);
if (count($sections) > 1)
{
$identifier = array_pop($sections);
$sections = '.'.implode('.', $sections);
}
else
{
$identifier = $dep;
$sections = '';
}
// get the cache index
$index = xcache_get($this->config['cache_id'].$sections);
// get the key from the index
$key = isset($index[$identifier][0]) ? $index[$identifier] : false;
// key found and newer?
if ($key === false or $key[1] > $this->created)
{
return false;
}
}
return true;
}
/**
* Delete Cache
*/
public function delete()
{
// get the XCache key for the cache identifier
$key = $this->_get_key(true);
// delete the key from the xcache store
$key and xcache_unset($key);
$this->reset();
}
/**
* Purge all caches
*
* @param string $section limit purge to subsection
* @return bool
*/
public function delete_all($section)
{
// determine the section index name
$section = $this->config['cache_id'].(empty($section) ? '' : '.'.$section);
// get the directory index
$index = xcache_get($this->config['cache_id'].'__DIR__');
if (is_array($index))
{
$dirs = array();
foreach ($index as $dir)
{
if (strpos($dir, $section) === 0)
{
$dirs[] = $dir;
$list = xcache_get($dir);
foreach ($list as $item)
{
xcache_unset($item[0]);
}
xcache_unset($dir);
}
}
// update the directory index
$dirs and xcache_set($this->config['cache_id'].'__DIR__', array_diff($index, $dirs));
}
}
// ---------------------------------------------------------------------
/**
* Prepend the cache properties
*
* @return string
*/
protected function prep_contents()
{
$properties = array(
'created' => $this->created,
'expiration' => $this->expiration,
'dependencies' => $this->dependencies,
'content_handler' => $this->content_handler,
);
$properties = '{{'.static::PROPS_TAG.'}}'.json_encode($properties).'{{/'.static::PROPS_TAG.'}}';
return $properties.$this->contents;
}
/**
* Remove the prepended cache properties and save them in class properties
*
* @param string $payload
* @throws \UnexpectedValueException
*/
protected function unprep_contents($payload)
{
$properties_end = strpos($payload, '{{/'.static::PROPS_TAG.'}}');
if ($properties_end === FALSE)
{
throw new \UnexpectedValueException('Cache has bad formatting');
}
$this->contents = substr($payload, $properties_end + strlen('{{/'.static::PROPS_TAG.'}}'));
$props = substr(substr($payload, 0, $properties_end), strlen('{{'.static::PROPS_TAG.'}}'));
$props = json_decode($props, true);
if ($props === NULL)
{
throw new \UnexpectedValueException('Cache properties retrieval failed');
}
$this->created = $props['created'];
$this->expiration = is_null($props['expiration']) ? null : (int) ($props['expiration'] - time());
$this->dependencies = $props['dependencies'];
$this->content_handler = $props['content_handler'];
}
/**
* Save a cache, this does the generic pre-processing
*
* @return bool success
*/
protected function _set()
{
// get the xcache key for the cache identifier
$key = $this->_get_key();
$payload = $this->prep_contents();
// adjust the expiration, xcache uses a TTL instead of a timestamp
$expiration = is_null($this->expiration) ? 0 : (int) ($this->expiration - $this->created);
// write it to the xcache store
if (xcache_set($key, $payload, $expiration) === false)
{
throw new \RuntimeException('Xcache returned failed to write. Check your configuration.');
}
// update the index
$this->_update_index($key);
return true;
}
/**
* Load a cache, this does the generic post-processing
*
* @return bool success
*/
protected function _get()
{
// get the xcache key for the cache identifier
$key = $this->_get_key();
// fetch the cached data from the xcache store
$payload = xcache_get($key);
try
{
$this->unprep_contents($payload);
}
catch (\UnexpectedValueException $e)
{
return false;
}
return true;
}
/**
* validate a driver config value
*
* @param string $name name of the config variable to validate
* @param mixed $value
* @return mixed
*/
private function _validate_config($name, $value)
{
switch ($name)
{
case 'cache_id':
if (empty($value) or ! is_string($value))
{
$value = 'fuel';
}
break;
case 'expiration':
if (empty($value) or ! is_numeric($value))
{
$value = null;
}
break;
default:
break;
}
return $value;
}
/**
* get's the xcache key belonging to the cache identifier
*
* @param bool $remove if true, remove the key retrieved from the index
* @return string
*/
protected function _get_key($remove = false)
{
// get the current index information
list($identifier, $sections, $index) = $this->_get_index();
// get the key from the index
$key = isset($index[$identifier][0]) ? $index[$identifier][0] : false;
if ($remove === true)
{
if ( $key !== false )
{
unset($index[$identifier]);
xcache_set($this->config['cache_id'].$sections, $index);
}
}
else
{
// create a new key if needed
$key === false and $key = $this->_new_key();
}
return $key;
}
/**
* generate a new unique key for the current identifier
*
* @return string
*/
protected function _new_key()
{
$key = '';
while (strlen($key) < 32)
{
$key .= mt_rand(0, mt_getrandmax());
}
return md5($this->config['cache_id'].'_'.uniqid($key, TRUE));
}
/**
* Get the section index
*
* @return array containing the identifier, the sections, and the section index
*/
protected function _get_index()
{
// get the section name and identifier
$sections = explode('.', $this->identifier);
if (count($sections) > 1)
{
$identifier = array_pop($sections);
$sections = '.'.implode('.', $sections);
}
else
{
$identifier = $this->identifier;
$sections = '';
}
// get the cache index and return it
return array($identifier, $sections, xcache_get($this->config['cache_id'].$sections));
}
/**
* Update the section index
*
* @param string cache key
*/
protected function _update_index($key)
{
// get the current index information
list($identifier, $sections, $index) = $this->_get_index();
// store the key in the index and write the index back
$index[$identifier] = array($key, $this->created);
xcache_set($this->config['cache_id'].$sections, array_merge($index, array($identifier => array($key, $this->created))));
// get the directory index
$index = xcache_get($this->config['cache_id'].'__DIR__');
if (is_array($index))
{
if (!in_array($this->config['cache_id'].$sections, $index))
{
$index[] = $this->config['cache_id'].$sections;
}
}
else
{
$index = array($this->config['cache_id'].$sections);
}
// update the directory index
xcache_set($this->config['cache_id'].'__DIR__', $index, 0);
}
}
|
import os
import re
import requests
import json
import traceback
import datetime
from datetime import date
from github import Github
RELEVANCE_API_KEY = os.environ["RELEVANCE_API_KEY"]
RELEVANCE_REGION = os.environ["RELEVANCE_REGION"]
os.makedirs("templates", exist_ok=True)
os.makedirs("archive", exist_ok=True)
# Create a date object for the current date
current_date = str(date.today())
current_timestamp = str(datetime.datetime.now())
def make_valid_ref_name(name):
name = name.replace(" ", "-")
name = re.sub(r"[^\w-]", "", name)
return name
def clean_filename(f):
return re.sub(r"[^\w\-_.]", "_", f["title"]).lower() + f'--{f["studio_id"]}.json'
def unclean_filename(f):
return re.sub(r"[\-_.]", " ", f.split("--")[0]).title()
def create_pr(credential, region, reference="default", datatype="tools"):
templates_folder = f"templates/{reference}/{datatype}"
archive_folder = f"archive/{reference}/{datatype}"
os.makedirs(templates_folder, exist_ok=True)
os.makedirs(archive_folder, exist_ok=True)
if datatype == "tools":
url = f"https://api-{region}.stack.tryrelevance.com/latest/studios/list"
response = requests.get(
url,
headers={"Authorization" : credential},
params={
"page_size" : 9999,
"filters" : '[{"field":"project","condition":"==","condition_value":"project_id","filter_type":"exact_match"}]'.replace("project_id", credential.split(":")[0])
}
)
elif datatype == "agents":
url = f"https://api-{region}.stack.tryrelevance.com/latest/agents/list"
response = requests.get(
url,
headers={"Authorization" : credential},
params={
"page_size" : 9999,
"filters" : [{"field":"project","condition":"==","condition_value":credential.split(":")[0],"filter_type":"exact_match"}]
}
)
list_of_results = [r for r in response.json()['results'] if r['public'] ]
gh = Github(os.environ["GITHUB_TOKEN"])
repo = gh.get_repo(os.environ["GITHUB_REPOSITORY"])
new_branch_name = f"feature/{make_valid_ref_name(current_timestamp)}"
new_branch = repo.create_git_ref(
ref=f"refs/heads/{new_branch_name}",
sha=repo.get_branch("main").commit.sha
)
current_list = []
for obj in list_of_results:
current_list.append(clean_filename(obj))
# Check if the file requires archiving
print(f"Checking for files to archive in {reference}")
for file in os.listdir(templates_folder):
if file not in current_list:
filepath = f"{templates_folder}/{file}"
archive_filepath = f"{archive_folder}/{file}"
with open(filepath, "r") as f:
content = f.read()
archive_commit_message = f"Archiving | {unclean_filename(file)} | {file.split('--')[1]} | {current_date}"
try:
sha = repo.get_contents(filepath).sha
os.rename(filepath, archive_filepath)
repo.delete_file(filepath, archive_commit_message, sha, branch=new_branch_name)
repo.create_file(archive_filepath, archive_commit_message, content, branch=new_branch_name)
except Exception as e:
traceback.print_exc()
print(f"Looping through {reference}")
# Loop through the tools in the cloud
for i, obj in enumerate(list_of_results):
if "metrics" in obj: del obj["metrics"]
if "update_date_" in obj: del obj["update_date_"]
file = clean_filename(obj)
filepath = f"{templates_folder}/{file}"
file_exists = False
if os.path.exists(filepath):
file_exists = True
with open(filepath, "w") as f:
json.dump(obj, f, indent=4)
with open(filepath, "r") as f:
content = f.read()
if file_exists:
commit_message = f"Updating | {unclean_filename(file)} | {file.split('--')[1]} | {current_date}"
try:
sha = repo.get_contents(filepath).sha
status = repo.update_file(filepath, commit_message, content, sha, branch=new_branch_name)
except Exception as e:
traceback.print_exc()
else:
commit_message = f"New | {unclean_filename(file)} | {file.split('--')[1]} | {current_date}"
repo.create_file(filepath, commit_message, content, branch=new_branch_name)
print("Making pull request")
pull_request = repo.create_pull(
title=f"{reference} | {current_date} changes",
body=f"{reference} | {current_date} changes",
head=new_branch_name,
base="main"
)
if pull_request.changed_files > 0:
commit_message = f"{current_date} changes"
pull_request.edit(body=commit_message)
else:
#close pull request
pull_request.edit(state="closed")
pull_request.merge()
create_pr(RELEVANCE_API_KEY, RELEVANCE_REGION, datatype="tools")
create_pr(RELEVANCE_API_KEY, RELEVANCE_REGION, datatype="agents")
|
import * as THREE from "three";
import { useRef, useEffect } from "react";
import { useControls } from "leva";
export default function ThreeElementCH5() {
const boxRef = useRef<THREE.Mesh>(null);
const boxCopyRef = useRef<THREE.Mesh>(null);
const boxControl = useControls({
radius: { value: 1, min: 0.1, max: 10, step: 0.1 },
seg: { value: 32, min: 1, max: 100, step: 1 },
thetaStart: { value: 0, min: 0, max: 360, step: 0.1 },
thetaLength: { value: 360, min: 0, max: 360, step: 0.1 },
});
useEffect(() => {
if (!boxCopyRef.current || !boxRef.current) return;
boxCopyRef.current.geometry = boxRef.current.geometry;
}, [boxControl]);
return (
<>
<directionalLight position={[5, 5, 5]} />
<mesh ref={boxRef} position={[0, 0, 0]}>
<circleGeometry
args={[
boxControl.radius,
boxControl.seg,
THREE.MathUtils.degToRad(boxControl.thetaStart),
THREE.MathUtils.degToRad(boxControl.thetaLength),
]}
/>
<meshStandardMaterial wireframe />
</mesh>
<mesh ref={boxCopyRef}>
<meshStandardMaterial color="red" />
</mesh>
</>
);
}
|
package com.dominos.game;
import java.util.ArrayList;
import java.util.List;
class CPlayer {
private List<CDomino> hand;
public CPlayer() {
hand = new ArrayList<>();
}
public void addToHand(CDomino domino) {
hand.add(domino);
}
public boolean hasMatchingDomino(int value) {
for (CDomino domino : hand) {
if (domino.matches(value)) {
return true;
}
}
return false;
}
public CDomino playDomino(int value) {
for (CDomino domino : hand) {
if (domino.matches(value)) {
hand.remove(domino);
return domino;
}
}
return null; // Player couldn't match any domino
}
public CDomino playFirstDomino() {
for (CDomino domino : hand) {
hand.remove(domino);
return domino;
}
return null;
}
public int getHandSize() {
return hand.size();
}
public List<CDomino> getHand() {
return hand;
}
@Override
public String toString() {
StringBuilder handString = new StringBuilder();
handString.append("Player's hand: [");
for (int i = 0; i < hand.size(); i++) {
CDomino domino = hand.get(i);
handString.append(domino.toString());
if (i < hand.size() - 1) {
handString.append(", ");
}
}
handString.append("]");
return handString.toString();
}
}
|
<?php
namespace App\Mail;
use Illuminate\Bus\Queueable;
use Illuminate\Contracts\Queue\ShouldQueue;
use Illuminate\Mail\Mailable;
use Illuminate\Queue\SerializesModels;
class QuoteUserMail extends Mailable
{
use Queueable, SerializesModels;
/**
* Create a new message instance.
*
* @return void
*/
private $details;
public function __construct($detail)
{
$this->details = $detail;
}
/**
* Build the message.
*
* @return $this
*/
public function build()
{
return $this->view('email.user-quotation')->subject('Order Received')->with(['details' => $this->details]);
}
}
|
<script setup>
//IMPORT
import { onMounted, ref } from 'vue';
import { useUserStore } from '../store/index'
import axios from 'axios'
import { useRouter } from 'vue-router';
//STORE
const userStore = useUserStore();
const socket = userStore.getUserSocket;
const username = userStore.getUsername;
//REF
const isLoading = ref(true)
const fetchError = ref(null)
const listLobby = ref([]);
const router = useRouter();
//FUNCTION
const test = (username, lobbyId) => {
userStore.lobbyId = lobbyId
socket.emit('join-lobby', username, lobbyId)
}
const fetchData = async () => {
try {
isLoading.value = true
const { data } = await axios.get('http://localhost:3000/lobby')
isLoading.value = false;
listLobby.value = [...data]
} catch (ex) {
fetchError.value = ex;
console.error(ex)
}
}
//LIFE CYCLE HOOK
onMounted(fetchData)
//SOCKET EVENT
socket.on('joined', () => {
router.push('/lobby')
})
</script>
<template>
<h1>Username {{ username }}</h1>
<div v-if="isLoading">Caricando i dati dal server...</div>
<div v-else-if="fetchError">{{ fetchError }}</div>
<div v-else>
<button @click="fetchData">Refresh</button>
<div v-for="(lobbyId, index) in listLobby" :key="index">
<hr />
<div>Nome lobby: {{ lobbyId }} | </div>
<div><button @click="test(username, lobbyId)">Unisciti alla lobby!</button></div>
<hr />
<br>
</div>
</div>
</template>
|
package com.lory.soufang.config;
import com.lory.soufang.security.AuthProvider;
import com.lory.soufang.security.LoginAuthFailHandler;
import com.lory.soufang.security.LoginUrlEntryPoint;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
@Configuration
@EnableWebSecurity
public class WebSecurityConfig extends WebSecurityConfigurerAdapter {
@Bean
public BCryptPasswordEncoder bCryptPasswordEncoder(){
return new BCryptPasswordEncoder(4);
}
@Bean
public AuthProvider authenticationProvider(){
return new AuthProvider();
}
@Override
protected void configure(HttpSecurity http) throws Exception {
http.authorizeRequests()
.antMatchers("/admin/login").permitAll()
.antMatchers("/static/**").permitAll()
.antMatchers("/user/login").permitAll()
.antMatchers("/admin/**").hasRole("ADMIN")
.antMatchers("/user/**").hasAnyRole("ADMIN","USER")
.antMatchers(("/api/user/**")).hasAnyRole("ADMIN","USER")
.and()
.formLogin()
.loginProcessingUrl("/login")
.failureHandler(loginAuthFailHandler())
.and()
.logout()
.logoutUrl("/logout")
.logoutSuccessUrl("/logout/page")
.deleteCookies("JSESSIONID")
.invalidateHttpSession(true)
.and()
.exceptionHandling()
.authenticationEntryPoint(urlEntryPoint())
.accessDeniedPage("/403");
http.csrf().disable();
http.headers().frameOptions().sameOrigin();
}
@Bean
public LoginUrlEntryPoint urlEntryPoint(){
return new LoginUrlEntryPoint("/user/login");
}
@Override
protected void configure(AuthenticationManagerBuilder auth) throws Exception {
auth.authenticationProvider(authenticationProvider()).eraseCredentials(true);
}
@Bean
public LoginAuthFailHandler loginAuthFailHandler(){
return new LoginAuthFailHandler(urlEntryPoint());
}
}
|
import { NavLink } from "react-router-dom";
import classNames from "../../utilities/ClassNames";
import Tooltip from "../elements/Tooltip";
export default function Link(props: any) {
if (props.small) {
return (
<Tooltip styles="top-2 left-11" placement="x" text={props.text}>
<NavLink
to={props.to}
className={classNames(
props.active ? "text-indigo-500 dark:text-white" : "",
"flex cursor-pointer items-center justify-center gap-4 rounded-md p-2 duration-300"
)}
>
<div
className={classNames(
props.active
? "[&>*]:fill-indigo-200 [&>*]:stroke-indigo-500 dark:[&>*]:fill-indigo-500 dark:[&>*]:stroke-indigo-200"
: ""
)}
>
{props.icon}
</div>
</NavLink>
</Tooltip>
);
} else {
return (
<div className="w-full">
<NavLink
to={props.to}
className={classNames(
props.active ? "text-indigo-500 dark:text-indigo-300" : "",
"flex w-full cursor-pointer items-center gap-3 py-2 duration-300 hover:text-indigo-500 dark:hover:text-indigo-300"
)}
>
<div
className={classNames(
props.active ? "[&>*]:fill-indigo-200 dark:[&>*]:fill-indigo-500" : ""
)}
>
{props.icon}
</div>
<span className="leading-3">{props.text}</span>
</NavLink>
</div>
);
}
}
|
package main
import (
"os"
"time"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
)
func main() {
// 打开日志文件,如果不存在则创建
file, err := os.OpenFile("app.log", os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666)
if err != nil {
log.Fatal().Err(err).Msg("无法打开日志文件")
}
defer file.Close()
// 创建 ConsoleWriter 以确保在 Windows 平台下的正确编码
writer := zerolog.ConsoleWriter{
Out: file,
TimeFormat: time.RFC3339,
NoColor: true,
}
// 将文件输出器附加到 zerolog
log.Logger = log.Output(writer)
// 写入日志
log.Info().Msg("这是一条日志消息")
}
|
package mco.persistency
import cats.syntax.applicative._
import mco._
import mco.io.IOInterpreters.FSDsl._
import mco.io.Stubs
import mco.io.files.{IO, Path}
import rapture.json._
import rapture.json.jsonBackends.jawn._
class JsonStorageSpec extends UnitSpec {
"JsonStorage#apply" should "read file and give result if provided Read value" in {
val io = storage(Read)
val result = StubIORunner(initialState).value(io)
result should equal (Vector(17, 25, 83))
}
it should "give empty state if file cannot be parsed" in {
val state = fs("state.json" -> obj("[17,2".getBytes))
val io = storage(Read)
StubIORunner(state).value(io) should be (empty)
}
it should "write state to file given Update value" in {
val io = storage(Update(Vector(), Vector(37, 45, 114)))
val (state, result) = StubIORunner(initialState).apply(io)
result should equal (Vector(37, 45, 114))
deepGet(Path("state.json"))(state) should equal (Some(obj("[37,45,114]".getBytes)))
}
"JsonStorage#applyToLeft" should "use JsonStorage#apply on left element" in {
val runner = StubIORunner(initialState)
for (op <- Seq(Read, NoOp, Update(Vector(), Vector(1, 2, 5)))) {
val obj = new Object
val applyResult = storage(op)
val (applyResult2, objResult)= storage.applyToLeft((op, obj))
objResult should be (obj)
runner.state(applyResult) should equal (runner.state(applyResult2))
runner.value(applyResult) should equal (runner.value(applyResult2))
}
}
"JsonStorage.Converters" should "allow to convert Package back and forth" in {
import JsonStorage.Converters._
for {
kind <- Seq(ContentKind.Garbage, ContentKind.Mod, ContentKind.Doc)
pkg = Package("pkg", Set(Content("cnt", kind, isInstalled = false)), isInstalled = false)
} Json(pkg).as[Package] should equal (pkg)
}
it should "map invalid ContentKind to Garbage" in {
import JsonStorage.Converters._
Json.parse("\"FUBAR\"").as[ContentKind] should equal (ContentKind.Garbage)
}
"JsonStorage.preload" should "create repository with persistency side-effects" in {
val repo = JsonStorage.preload("test", fakeCompanion, Path("state.json"), "target", Stubs.emptySource)
val io = repo
.flatMap(_.change("foo", Package("foo", Set())))
.flatMap(_.add_("bar"))
.flatMap(_.remove("baz"))
val state = StubIORunner(initialState).state(io)
deepGet(Path("state.json"))(state) should equal (Some(obj("[17,25,83,1,2,3]".getBytes)))
}
private def fakeCompanion = new Repository.Companion[IO, Vector[Int]] {
override def apply(key: String, s: Source[IO], t: String, state: Vector[Int]): IO[Repository[IO, Vector[Int]]] = {
fakeRepo(state).pure[IO]
}
}
private def fakeRepo(currentState: Vector[Int]): Repository[IO, Vector[Int]] =
new Repository[IO, Vector[Int]]
{
override def key: String = "test"
override def state: Vector[Int] = currentState
override def thumbnail(key: String): Thumbnail[IO] = fail("Not supported")
override def apply(key: String): Package = fail("No package")
override def packages: Traversable[Package] = Traversable()
override def change(oldKey: String, updates: Package): IO[Self] =
fakeRepo(state :+ 1).pure[IO]
override def add(f: String): IO[(Package, Self)] =
(Package(f, Set()), fakeRepo(state :+ 2)).pure[IO]
override def remove(s: String): IO[Self] =
fakeRepo(state :+ 3).pure[IO]
override def canAdd(f: String): IO[Boolean] = false.pure[IO]
}
private def storage = new JsonStorage[Vector[Int]](Path("state.json"))
private def initialState = fs("state.json" -> obj("[17,25,83]".getBytes))
}
|
import * as firebase from 'firebase';
import {
DocumentNotFoundError,
RequiredParameterError,
} from '@mdn-seed/core/src/helpers/errors';
import { IDatabase } from '../types/database.interface';
import { FirebaseConfig } from '../types/firebase-config.interface';
import QueryBuilder, { InstructionType } from '../helpers/query-builder';
export default function makeFirebaseDatabase({
config,
}: {
config: FirebaseConfig;
}): IDatabase {
firebase.initializeApp(config);
const database = firebase.firestore();
let currentCollection = '';
return Object.freeze({
collection,
exists,
findById,
insert,
list,
destroy,
update,
where,
});
async function collection(newCollection: string) {
currentCollection = newCollection;
}
async function exists(id?: string): Promise<boolean> {
return (
!!id &&
(await database.collection(currentCollection).doc(id).get()).exists
);
}
async function findById(id?: string) {
if (!id) {
return Promise.reject(new RequiredParameterError('id'));
}
const doc = await database.collection(currentCollection).doc(id).get();
if (!doc.exists) {
throw new DocumentNotFoundError(id);
}
return { ...doc.data(), id: doc.id };
}
async function list() {
const results = await database.collection(currentCollection).get();
return results.docs.map((doc) => ({
...doc.data(),
id: doc.id,
}));
}
async function insert(item: any) {
// TODO: This should be in a try catch and return an error.
// Layers outside should be able to handle those and pass them up
// Each layer outside that getting more context to format them appropriately
if (item.id) {
delete item.id;
}
const docRef = await database
.collection(currentCollection)
.add({ ...item });
const data = (await docRef.get()).data();
return { ...data, id: docRef.id };
}
async function update(item: any, options = { merge: true }) {
const { merge } = options;
const docRef = await database.collection(currentCollection).doc(item.id);
docRef.set({ ...item }, { merge });
const updated = (await docRef.get()).data();
return { ...updated, id: docRef.id };
}
async function destroy(id?: string) {
try {
if (!id) {
return Promise.reject(new RequiredParameterError('id'));
}
await database.collection(currentCollection).doc(id).delete();
return true;
} catch (error) {
console.error('Error Deleting from Firestore :: ', { error });
return Promise.reject(error);
}
}
// TODO: Validate if something this complex is necessary
// async function query(qb: QueryBuilder) {
// try {
// qb.instructions.reduce((acc, curr) => {
// switch (curr.type) {
// case InstructionType.TABLE:
// return database.collection(curr.name);
// case InstructionType.WHERE:
// const { field, operator, value } = curr;
// return (database as any).where(field, operator, value);
// }
// }, database);
// return (database as any).get();
// } catch (error) {
// console.error(`Error with Query`, { error });
// return Promise.reject(error);
// }
// }
// TODO: Alternatively, if the use-cases are minimal,
// then, it might be better to keep the database queries explicit as well
async function where(property: string, operator: any, value: any) {
console.log('WHERE', { property, operator, value });
return (
await database
.collection(currentCollection)
.where(property, operator, value)
.get()
).docs.map((doc) => ({ ...doc.data(), id: doc.id }));
}
}
|
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.*;
import java.util.logging.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
public class Average {
public static class Map extends Mapper<LongWritable, Text, Text, IntWritable> {
private final Logger logger = Logger.getLogger(WordCountInMapper.class.getName());
private final Text word = new Text();
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String line = value.toString().trim();
String[] tokens = line.split(" ");
String ipAddressString = getIpAddress(tokens[0]);
if(ipAddressString != null )//&& tokens[tokens.length-2].equals("200")
{
context.write(new Text(ipAddressString),new IntWritable(tokens[tokens.length-1].equals("-")?0:Integer.parseInt(tokens[tokens.length-1])));//new Text(+","+1)//new RequestSizeAndQuantity(new IntWritable(getRequestSize(line)), new IntWritable(1))
}
}
}
public static class Reduce extends Reducer<Text, IntWritable, Text, DoubleWritable> {
HashMap<String,RequestSizeAndQuantity> H = new HashMap<>();
public void reduce(Text key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
// int sum = 0;
// int count = 0;
for (IntWritable val : values) {
// String[] split = val.split(",");
// sum+=val.get();
// count++;
if(H.containsKey(key.toString()))
{
RequestSizeAndQuantity temp = H.get(key.toString());
H.put(key.toString(),new RequestSizeAndQuantity(new IntWritable(val.get()+temp.getRequestSize().get()),new IntWritable(temp.getCount().get()+1)));
}
else {
H.put(key.toString(), new RequestSizeAndQuantity(new IntWritable(val.get()), new IntWritable(1)));
}
}
}
@Override
protected void cleanup(Reducer<Text, IntWritable, Text, DoubleWritable>.Context context) throws IOException, InterruptedException {
super.cleanup(context);
for(java.util.Map.Entry<String, RequestSizeAndQuantity> e : H.entrySet())
{
context.write(new Text(e.getKey()),new DoubleWritable((double)e.getValue().getRequestSize().get()/e.getValue().getCount().get()));
}
}
}
static String getIpAddress(String ipString){
String IPADDRESS_PATTERN =
"(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)";
Pattern pattern = Pattern.compile(IPADDRESS_PATTERN);
Matcher matcher = pattern.matcher(ipString);
if (matcher.find()) {
return matcher.group();
} else{
return null;
}
}
static int getRequestSize(String str)
{
return Integer.parseInt(str.replace("\"",""));
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = new Job(conf, "Average");
job.setJarByClass(Average.class);
//Reducer output
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(DoubleWritable.class);
//Mapper output
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setMapperClass(Map.class);
job.setReducerClass(Reduce.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
job.waitForCompletion(true);
}
}
|
import { Component, OnInit } from '@angular/core';
import Swal from 'sweetalert2';
import { Course } from '../model/course';
import { Student } from '../model/student';
import { CourseService } from '../service/course.service';
import { StudentService } from '../service/student.service';
@Component({
selector: 'app-view-student',
templateUrl: './view-student.component.html',
styleUrls: ['./view-student.component.css']
})
export class ViewStudentComponent implements OnInit {
studentList:Student[]=[];
constructor(private _studentService:StudentService) { }
ngOnInit(): void
{
this.getAllStudentDetails();
}
getAllStudentDetails()
{
this._studentService.getAllStudents().subscribe((Response:Student[])=>
{
this.studentList=Response;
},
(error=>
{
console.log(error);
})
);
}
deleteStudent(id:number)
{
const swalWithBootstrapButtons = Swal.mixin({
customClass: {
confirmButton: 'btn btn-success',
cancelButton: 'btn btn-danger'
},
buttonsStyling: false
})
swalWithBootstrapButtons.fire({
title: 'Are you sure to delete data?',
text: "You won't be able to revert this!",
icon: 'warning',
showCancelButton: true,
confirmButtonText: 'Yes, delete it!',
cancelButtonText: 'No, cancel!',
reverseButtons: true
}).then((result) => {
if (result.isConfirmed) {
this._studentService.deleteStudent(id).subscribe(Response=>
{
this.getAllStudentDetails();
swalWithBootstrapButtons.fire(
'Deleted!',
'Your record has been deleted.',
'success'
)
},
(error=>
{
console.log(error);
})
);
} else if (
/* Read more about handling dismissals below */
result.dismiss === Swal.DismissReason.cancel
) {
swalWithBootstrapButtons.fire(
'Cancelled',
'Your imaginary file is safe :)',
'error'
)
}
})
}
}
|
package class_;
public class SungJukMain {
public static void main(String[] args) {
//데이터
SungJuk sj1 = new SungJuk();
sj1.setData("홍길동", 91, 95, 100);
//출력
for(int i=0; i<60; i++) System.out.print("-");
System.out.println("\n이름 국어 영어 수학 총점 평균 학점");
for(int i=0; i<60; i++) System.out.print("-");
System.out.print("\n" + sj1.getName() + "\t"
+ sj1.getKor() + "\t"
+ sj1.getEng() + "\t"
+ sj1.getMath() + "\t"
+ sj1.getTot() + "\t"
+ sj1.getAvg() + "\t"
+ sj1.getGrade() + "\t\n");
//데이터
SungJuk sj2 = new SungJuk();
sj2.setData("프로도", 100, 89, 75);
//출력
System.out.print("\n" + sj2.getName() + "\t"
+ sj2.getKor() + "\t"
+ sj2.getEng() + "\t"
+ sj2.getMath() + "\t"
+ sj2.getTot() + "\t"
+ sj2.getAvg() + "\t"
+ sj2.getGrade() + "\t\n");
//데이터
SungJuk sj3 = new SungJuk();
sj3.setData("죠르디", 75, 80, 48);
//출력
System.out.print("\n" + sj3.getName() + "\t"
+ sj3.getKor() + "\t"
+ sj3.getEng() + "\t"
+ sj3.getMath() + "\t"
+ sj3.getTot() + "\t"
+ sj3.getAvg() + "\t"
+ sj3.getGrade() + "\t\n");
for(int i=0; i<60; i++) System.out.print("-");
}
}
/*
[문제] 성적 처리
- 총점, 평균, 학점을 구하시오
- 평균은 소수이하 2째자리까지 출력
총점 = 국어 + 영어 + 수학
평균 = 총점 / 과목수
학점은 평균이 90 이상이면 'A'
평균이 80 이상이면 'B'
평균이 70 이상이면 'C'
평균이 60 이상이면 'D'
그외는 'F'
클래스명 : SungJuk
필드 : name, kor, eng, math, tot, avg, grade
메소드 : setData(이름, 국어, 영어, 수학)
calc() - 총점, 평균, 학점 계산
getName()
getKor()
getEng()
getMath()
getTot()
getAvg()
getGrade()
클래스명 : SungJukMain
[실행결과]
----------------------------------------------------
이름 국어 영어 수학 총점 평균 학점
----------------------------------------------------
홍길동 90 95 100
----------------------------------------------------
*/
|
package ra.security.controller;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import ra.security.exception.*;
import ra.security.model.domain.EDelivered;
import ra.security.model.dto.response.OrdersResponse;
import ra.security.service.impl.CartService;
import ra.security.service.impl.OrderService;
import javax.servlet.http.HttpSession;
import java.util.List;
@RestController
@RequestMapping("/api/v4/auth/order")
@CrossOrigin("*")
public class OrderController {
@Autowired
private OrderService orderService;
@Autowired
private CartService cartService;
@PostMapping("/buy/shipment/{shipmentId}")
public ResponseEntity<OrdersResponse> order(HttpSession session, @PathVariable Long shipmentId)
throws Exception {
return new ResponseEntity<>(orderService.order(session.getAttribute("CurrentUser"), shipmentId), HttpStatus.OK);
}
@PutMapping("/changeDelivery/{orderId}")
public ResponseEntity<OrdersResponse> changeDelivery(@PathVariable Long orderId,
@RequestBody EDelivered status) throws CustomException {
return new ResponseEntity<>(orderService.changeDelivery(status, orderId), HttpStatus.OK);
}
@GetMapping("/getAll")
public ResponseEntity<List<OrdersResponse>> getOrder(HttpSession session) {
return new ResponseEntity<>(orderService.showOrders(session.getAttribute("CurrentUser")), HttpStatus.OK);
}
}
|
<div class="container">
<div class="card mat-elevation-z4 my-2">
<div class="card-header py-2 bg-light text-dark font-weight-bold">
<span>
Coach Position Enquiry
</span>
</div>
<form [formGroup]="accomodationAvailabilityForm" class="card-body pt-2 pb-0 text-sm" (submit)="fetchAccomodationAvailability()">
<div class="row">
<mat-form-field class="col-md-2 col-sm-4 col-12">
<input formControlName="inputTrainNo" matInput placeholder="Train No." name="TrainNo" autocomplete="off" required minlength="5" maxlength="5" (keypress)="onlyNumericInput($event)"
(keyup)="inputTrainNoChangeFn($event)">
<!-- <mat-hint align="start">Enter Train no.</mat-hint> -->
<!-- <mat-hint align="end">or Enter PNR no.</mat-hint> -->
<mat-error *ngIf="inputTrainNo.invalid">{{getInputTrainNoErrorMessage()}}</mat-error>
</mat-form-field>
<mat-form-field class="col-md-4 col-sm-8 col-12">
<input formControlName="inputTrainName" matInput placeholder="Train Name">
<!-- <mat-hint align="start">Enter Train no.</mat-hint> -->
<!-- <mat-hint align="end">or Enter PNR no.</mat-hint> -->
<!-- <mat-error *ngIf="inputTrainNo.invalid">{{getInputTrainNoErrorMessage()}}</mat-error> -->
</mat-form-field>
<mat-form-field class="col-md-3 col-sm-6 col-12">
<mat-select placeholder="Station" required>
<!-- <mat-option>Select Source</mat-option> -->
<mat-option *ngFor="let stn of stnList" [value]="stn.stnCode">
{{stn.stnName}} ({{stn.stnCode}})
</mat-option>
</mat-select>
<!-- <mat-error *ngIf="animalControl.hasError('required')">Please choose an animal</mat-error>
<mat-hint>{{animalControl.value?.sound}}</mat-hint> -->
</mat-form-field>
<mat-form-field class="col-md-3 col-sm-6 col-12 mx-0">
<input matInput [matDatepicker]="picker" name="journeyDate" autocomplete="off" placeholder="Journey date" required>
<mat-datepicker-toggle matSuffix [for]="picker"></mat-datepicker-toggle>
<mat-datepicker #picker startView="month" [startAt]="todayDate" disabled="false"></mat-datepicker>
</mat-form-field>
<mat-select placeholder="Class" required multiple class="col-md-3 col-sm-6 col-12 mx-0">
<mat-option value="">All Class</mat-option>
<mat-option value="1A">AC 1 Tier</mat-option>
<mat-option value="2A">AC 2 Tier</mat-option>
<mat-option value="3A">AC 3 Tier</mat-option>
<mat-option value="3E">AC 3 Economy</mat-option>
<mat-option value="CC">AC Chair Car</mat-option>
<mat-option value="1A">Sleeper</mat-option>
</mat-select>
<div class="col-md-3 col-sm-6 col-12 px-3 mb-3">
<button *ngIf="!accomodationAvailabilityFetched" mat-raised-button color="primary" class="w-100 font-weight-bold" type="submit">Submit</button>
<button *ngIf="accomodationAvailabilityFetched" mat-raised-button color="primary" class="w-100 font-weight-bold" (click)="accomodationAvailabilityFetched=false">Reset</button>
</div>
</div>
</form>
</div>
<div *ngIf="accomodationAvailabilityFetched" class="card mat-elevation-z4 table-responsive text-sm my-3 text-center">
<div class="card mat-elevation-z4 p-5">
<table>
<tbody>
<tr>
<td>
<img id="engg" src="../assets/images/coaches/eng.jpg" draggable="false" width="60" height="60">
</td>
<td *ngFor="let avl of fetchedAccomodationAvailability; let i = index;">
<div class="text-center">
#{{i + 1 | number: '2.0-0'}}<br>
<img src="../assets/images/coaches/{{avl.coach_id}}.jpg" width="60" height="60" >
<br>{{avl.class}}
</div>
</td>
</tr>
</tbody></table>
</div>
<table class="table table-bordered my-0">
<thead class="text-dark bg-light">
<tr>
<th scope="col">#</th>
<td scope="col">Class</td>
<td scope="col">Coach ID</td>
<td scope="col">Coach Position</td>
</tr>
</thead>
<tbody class="text-dark">
<tr *ngFor="let avl of fetchedAccomodationAvailability; let i = index;">
<td scope="row" class="text-primary">{{i + 1 | number: '2.0-0'}}</td>
<td>{{avl.class}}</td>
<td>{{avl.coach_id}}</td>
<!-- <td>{{avl.coach_position}}</td> -->
<td>{{i + 1 | number: '2.0-0'}}</td>
</tr>
</tbody>
</table>
</div>
</div>
|
/**
@page HASH_SHA1MD5 HASH digest calculation using SHA1 and MD5 example
@verbatim
******************************************************************************
* @file HASH/HASH_SHA1MD5/readme.txt
* @author MCD Application Team
* @brief Description of the digest calculation using SHA1 and MD5 example.
******************************************************************************
* @attention
*
* Copyright (c) 2019 STMicroelectronics.
* All rights reserved.
*
* This software is licensed under terms that can be found in the LICENSE file
* in the root directory of this software component.
* If no LICENSE file comes with this software, it is provided AS-IS.
*
******************************************************************************
@endverbatim
@par Example Description
This example shows how to use the HASH peripheral to hash data with SHA-1 and MD5 algorithms.
For this example, DMA is not used for data transfer, the CPU is using HASH peripheral in
polling mode.
The SHA-1 message digest result is a 160-bit long data and the MD5 message digest result
is a 128-bit long data.
The expected HASH digests (for SHA1 and MD5) are already computed using an online
HASH tool. Those values are compared to those computed by the HASH peripheral.
When SHA1 (respectively MD5) digest computation is successful, LED_GREEN (resp. LED_BLUE) is turned on.
In case of digest computation or initialization issue, LED_RED is turned on.
@note The instruction cache (ICACHE) must be enabled by software to get a 0 wait-state execution
from Flash memory and external memories, and reach the maximum performance.
@par Keywords
System, Security, HASH, SHA1, MD5, digest
@par Directory contents
- HASH/HASH_SHA1MD5/Inc/stm32l5xx_hal_conf.h HAL configuration file
- HASH/HASH_SHA1MD5/Inc/stm32l5xx_nucleo_conf.h Board configuration file
- HASH/HASH_SHA1MD5/Inc/stm32l5xx_it.h Interrupt handlers header file
- HASH/HASH_SHA1MD5/Inc/main.h Header for main.c module
- HASH/HASH_SHA1MD5/Src/stm32l5xx_it.c Interrupt handlers
- HASH/HASH_SHA1MD5/Src/main.c Main program
- HASH/HASH_SHA1MD5/Src/stm32l5xx_hal_msp.c HAL MSP module
- HASH/HASH_SHA1MD5/Src/system_stm32l5xx.c STM32L5xx system source file
@par Hardware and Software environment
- This example runs on STM32L552ZETxQ devices.
- This example has been tested with a STM32L552xx embedded on an
NUCLEO-L552ZE-Q board and can be easily tailored to any other supported
device and development board.
@par How to use it ?
In order to make the program work, you must do the following :
- Open your preferred toolchain
- Rebuild all files and load your image into target memory
- Run the example
*/
|
import React from "react";
import ReactDOM from "react-dom/client";
import "@fontsource-variable/hanken-grotesk";
import "./index.css";
import { Provider } from "react-redux";
import { store } from "./redux/store";
import Home from "./pages/home";
import { createBrowserRouter, RouterProvider } from "react-router-dom";
import Rockets from "./pages/Rockets";
import Profile from "./pages/profile";
import Root from "./pages/root";
import { ErrorPage } from "./pages/error-page";
import Missions from "./pages/Missions";
const router = createBrowserRouter([
{
path: "/",
element: <Root />,
errorElement: <ErrorPage />,
children: [
{
path: "/",
element: <Home />,
},
{
path: "/rockets",
element: <Rockets />,
},
{
path: "/missions",
element: <Missions />,
},
{
path: "/profile",
element: <Profile />,
},
],
},
]);
ReactDOM.createRoot(document.getElementById("root")!).render(
<React.StrictMode>
<Provider store={store}>
<RouterProvider router={router} />
</Provider>
</React.StrictMode>,
);
|
import { useEffect } from "react"
import { useNavigate } from "react-router-dom"
// je créé une fonction réutilisable,
// qui au chargement du composant,
// récupère le token en local storage
// s'il n'existe, ça redirige vers la page de login
// vu que ma fonction utilise des "hook" de react
//(des fonctions que react me fournies qui marchent
//que dans des composants, comme useState, useEffect, useNavigate
// qui commencent par "use")
// mon nom de fonction doit aussi commencer par use
// => ma fonction est devenue un hook
export const useVerifyIfUserIsLogged = () => {
//Je récupère la fonction navigate du react-router qui permet de rediriger l'utilisateur
const navigate = useNavigate()
//Au chargement du composant
useEffect(() => {
//Je récupère le token depuis le local storage
const token = localStorage.getItem("jwt")
//Si le token n'existe pas, je redirige l'utilisateur vers la page de connexion
if (!token) {
navigate("/login")
}
// idéalement, si y'a un token existant,
// on le décode (avec jwt-decode) et on regarde si les données sont correctes
// si elles ne sont pas correctes (pas de clé data etc)
// on redirige
})
}
|
import{Component} from 'react'
import {GiAirBalloon, GiPartyPopper} from 'react-icons/gi'
import {FaSearch} from 'react-icons/fa'
import {Link} from 'react-router-dom'
class ErrorBoundary extends Component {
state = {
hasError: false
};
static getDerivedStateFromError(error) {
return { hasError: true };
}
componentDidCatch(error, errorInfo) {
// You can log the error or perform any other actions here
console.error(error, errorInfo);
}
render() {
if (this.state.hasError) {
// You can render an error fallback component here
return <>
<main className={` sm:w-[calc(100% - 32rem)] mt-20 w-[calc(100% - 16rem)] sm:ml-16 ml-4 mr-4 sm:mr-16 min-h-screen`}>
<div className={` gap-x-10 grid grid-cols-1 gap-y-16 grid-rows-1 sm:grid-cols-2 `}>
<div>
<h3 className='text-[2em] text-neutral-400 font-semibold'>
Uh oh... <span className='block'>
I think we're lost.
</span>
</h3>
<p className='text-lg '>
The page you're looking for could not be found.
</p>
<button className='bg-green-500 rounded mt-6 p-2 hover:bg-green-600 text-white '>
Take me home
</button>
<button className='inline-block rounded ml-8 p-2 bg-blue-300 outline-2 outline outline-neutral-300 hover:outline-green-500'>
<FaSearch className='inline-block' /> Search
</button>
<ul className='mt-5'>
<li className='text-green-500 p-1'>
<Link to={'/'}>Browse cryptocurrency ecosystems</Link>
</li>
<li className='text-green-500 p-1'>
<Link to={'/nft'}>Browse NFTs
</Link>
</li>
<li className='text-green-500 p-1'>
<Link to={'/exchanges'}>Browse exchanges
</Link>
</li>
<li className='text-green-500 p-1'>
<Link to={'/learn'}>
Learn cryptocurrency
</Link>
</li>
</ul>
</div>
<div className='animate-balloon'>
<GiAirBalloon className='sm:text-[400px] text-[200px] text-red-300'/>
</div>
</div>
<p className='text-white'>
Need help?<Link className='text-green-500' to={'/contact'}> Contact us</Link>
</p>
</main>
</>
}
return this.props.children;
}
}
export default ErrorBoundary
|
import 'package:flutter/material.dart';
import '../components/styles.dart';
import '/pages/forgot_password.dart';
import '/pages/register.dart';
import '/pages/verification.dart';
import '/widget/elevated_button.dart';
import '/widget/text_btn.dart';
class Login extends StatefulWidget {
static const String id = 'Login';
const Login({Key? key}) : super(key: key);
@override
State<Login> createState() => _LoginState();
}
class _LoginState extends State<Login> {
@override
void initState() {
super.initState();
}
@override
Widget build(BuildContext context) {
return Scaffold(
backgroundColor: appColor,
body: SingleChildScrollView(child: _buildBody()),
);
}
Widget _buildBody() {
return Container(
padding: const EdgeInsets.all(24),
height: MediaQuery.of(context).size.height * 1,
child: Column(
mainAxisAlignment: MainAxisAlignment.spaceAround,
children: [
logoImg(),
Container(
decoration: BoxDecoration(
borderRadius: BorderRadius.circular(26),
color: Colors.white,
),
padding: const EdgeInsets.all(28),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Center(
child: blackHeading("Sign In"),
),
textField("Email Address"),
textField("Password"),
Row(
mainAxisAlignment: MainAxisAlignment.end,
children: [
MyTextButton(
onPressed: () {
Navigator.push(
context,
MaterialPageRoute(
builder: (context) =>
const ForgotPassword()));
},
text: 'Forgot Password?',
colors: appColor)
],
),
const SizedBox(height: 24),
MyElevatedButton(
onPressed: () {
Navigator.push(
context,
MaterialPageRoute(
builder: (context) => const Verification()));
},
text: 'Sign In',
colors: appColor,
height: 42,
width: double.infinity),
],
),
),
Column(
crossAxisAlignment: CrossAxisAlignment.center,
mainAxisAlignment: MainAxisAlignment.center,
children: [
Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
Container(
padding: const EdgeInsets.all(8),
decoration: BoxDecoration(
border: Border.all(color: Colors.white),
borderRadius:
const BorderRadius.all(Radius.circular(50))),
child: Image.asset(
'assets/images/facebook.png',
width: 26,
height: 26,
color: Colors.white,
),
),
const SizedBox(width: 16),
Container(
padding: const EdgeInsets.all(8),
decoration: BoxDecoration(
border: Border.all(color: Colors.white),
borderRadius:
const BorderRadius.all(Radius.circular(50))),
child: Image.asset(
'assets/images/google.png',
width: 26,
height: 26,
color: Colors.white,
),
)
],
),
const SizedBox(height: 8),
Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
const Text(
'Dont have an account?',
style: TextStyle(color: Colors.white, fontSize: 14),
),
MyTextButton(
onPressed: () {
Navigator.push(
context,
MaterialPageRoute(
builder: (context) => const Register()));
},
text: 'Sign up',
colors: Colors.white)
],
)
],
)
],
),
);
}
logoImg() {
return Image.asset(
'assets/images/logo.png',
width: 100,
height: 80,
color: Colors.white,
);
}
}
|
/*
Copyright 2020 Set Labs Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
pragma solidity 0.5.7;
pragma experimental "ABIEncoderV2";
import { Ownable } from "openzeppelin-solidity/contracts/ownership/Ownable.sol";
import { SafeMath } from "openzeppelin-solidity/contracts/math/SafeMath.sol";
import { BoundsLibrary } from "set-protocol-contract-utils/contracts/lib/BoundsLibrary.sol";
import { Auction } from "./impl/Auction.sol";
import { ICore } from "../interfaces/ICore.sol";
import { ILiquidator } from "../interfaces/ILiquidator.sol";
import { IOracleWhiteList } from "../interfaces/IOracleWhiteList.sol";
import { ISetToken } from "../interfaces/ISetToken.sol";
import { LinearAuction } from "./impl/LinearAuction.sol";
import { Rebalance } from "../lib/Rebalance.sol";
import { RebalancingLibrary } from "../lib/RebalancingLibrary.sol";
import { TWAPAuction } from "./twap-impl/TWAPAuction.sol";
import { TWAPAuctionGetters } from "./twap-impl/TWAPAuctionGetters.sol";
import { TwoAssetPriceBoundedLinearAuction } from "./impl/TwoAssetPriceBoundedLinearAuction.sol";
/**
* @title TWAPLiquidator
* @author Set Protocol
*
* Contract that holds all the state and functionality required for setting up, returning prices, and tearing
* down TWAP rebalances for RebalancingSetTokens.
*/
contract TWAPLiquidator is
ILiquidator,
TWAPAuction,
TWAPAuctionGetters,
Ownable
{
using SafeMath for uint256;
event ChunkAuctionIterated(
address indexed rebalancingSetToken,
uint256 orderRemaining,
uint256 startingCurrentSets
);
event ChunkSizeBoundUpdated(
address assetOne,
address assetTwo,
uint256 lowerBound,
uint256 upperBound
);
ICore public core;
string public name;
// Maps RebalancingSetToken to it's auction state
mapping(address => TWAPAuction.TWAPState) public auctions;
modifier onlyValidSet() {
require(
core.validSets(msg.sender),
"TWAPLiquidator: Invalid or disabled proposed SetToken address"
);
_;
}
/**
* TWAPLiquidator constructor
*
* @param _core Core instance
* @param _oracleWhiteList Oracle WhiteList instance
* @param _auctionPeriod Length of auction in seconds
* @param _rangeStart Percentage above FairValue to begin auction at in 18 decimal value
* @param _rangeEnd Percentage below FairValue to end auction at in 18 decimal value
* @param _assetPairVolumeBounds List of asset pair USD-denominated chunk auction size bounds
* @param _name Descriptive name of Liquidator
*/
constructor(
ICore _core,
IOracleWhiteList _oracleWhiteList,
uint256 _auctionPeriod,
uint256 _rangeStart,
uint256 _rangeEnd,
TWAPAuction.AssetPairVolumeBounds[] memory _assetPairVolumeBounds,
string memory _name
)
public
TWAPAuction(
_oracleWhiteList,
_auctionPeriod,
_rangeStart,
_rangeEnd,
_assetPairVolumeBounds
)
{
core = _core;
name = _name;
}
/**
* Initiates a TWAP auction. Can only be called by a SetToken.
*
* @param _currentSet The Set to rebalance from
* @param _nextSet The Set to rebalance to
* @param _startingCurrentSetQuantity The currentSet quantity to rebalance
* @param _liquidatorData Bytecode formatted data with TWAPLiquidator-specific arguments
*/
function startRebalance(
ISetToken _currentSet,
ISetToken _nextSet,
uint256 _startingCurrentSetQuantity,
bytes calldata _liquidatorData
)
external
onlyValidSet
{
// Validates only 2 components are involved and are supported by oracles
TwoAssetPriceBoundedLinearAuction.validateTwoAssetPriceBoundedAuction(
_currentSet,
_nextSet
);
// Retrieve the chunk auction size and auction period from liquidator data.
(
uint256 chunkAuctionValue,
uint256 chunkAuctionPeriod
) = TWAPAuction.parseLiquidatorData(_liquidatorData);
// Chunk size must be within bounds and total rebalance length must be below fail auction time
TWAPAuction.validateLiquidatorData(
_currentSet,
_nextSet,
_startingCurrentSetQuantity,
chunkAuctionValue,
chunkAuctionPeriod
);
// Initializes TWAP Auction and commits to TWAP state
TWAPAuction.initializeTWAPAuction(
auctions[msg.sender],
_currentSet,
_nextSet,
_startingCurrentSetQuantity,
chunkAuctionValue,
chunkAuctionPeriod
);
}
/**
* Reduces the remainingCurrentSet quantity and retrieves the current
* bid price for the chunk auction. If this auction completes the chunkAuction,
* the lastChunkAuction parameter is updated.
* Can only be called by a SetToken during an active auction
*
* @param _quantity The currentSetQuantity to rebalance
* @return TokenFlow Struct with array, inflow, and outflow data
*/
function placeBid(
uint256 _quantity
)
external
onlyValidSet
returns (Rebalance.TokenFlow memory)
{
Auction.validateBidQuantity(auction(msg.sender), _quantity);
Auction.reduceRemainingCurrentSets(auction(msg.sender), _quantity);
// If the auction is complete, update the chunk auction end time to the present timestamp
if (!hasBiddableQuantity(auction(msg.sender))) {
twapAuction(msg.sender).lastChunkAuctionEnd = block.timestamp;
}
return getBidPrice(msg.sender, _quantity);
}
/**
* Initiates the next chunk auction. Callable by anybody.
*
* @param _set Address of the RebalancingSetToken
*/
function iterateChunkAuction(address _set) external {
TWAPAuction.TWAPState storage twapAuction = twapAuction(_set);
validateNextChunkAuction(twapAuction);
auctionNextChunk(twapAuction);
emit ChunkAuctionIterated(
_set,
twapAuction.orderRemaining,
twapAuction.chunkAuction.auction.startingCurrentSets
);
}
/**
* Validates auction completion and clears auction state. Callable only by a SetToken.
*/
function settleRebalance() external onlyValidSet {
require(
!(TWAPAuction.isRebalanceActive(twapAuction(msg.sender))),
"TWAPLiquidator: Rebalance must be complete"
);
clearAuctionState(msg.sender);
}
/**
* Clears auction state.
*/
function endFailedRebalance() external onlyValidSet {
clearAuctionState(msg.sender);
}
/**
* Retrieves the current chunk auction price for the particular Set
*
* @param _set Address of the SetToken
* @param _quantity The chunk auction's currentSetQuantity to rebalance
* @return TokenFlow Struct with array, inflow, and outflow data
*/
function getBidPrice(
address _set,
uint256 _quantity
)
public
view
returns (Rebalance.TokenFlow memory)
{
return LinearAuction.getTokenFlow(chunkAuction(_set), _quantity);
}
/**
* Admin function to modify chunk sizes for an asset pair.
*
* @param _assetOne Address of the first asset
* @param _assetTwo Address of the second asset
* @param _assetPairVolumeBounds Asset pair USD-denominated chunk auction size bounds
*/
function setChunkSizeBounds(
address _assetOne,
address _assetTwo,
BoundsLibrary.Bounds memory _assetPairVolumeBounds
)
public
onlyOwner
{
require(
BoundsLibrary.isValid(_assetPairVolumeBounds),
"TWAPLiquidator: Bounds invalid"
);
chunkSizeWhiteList[_assetOne][_assetTwo] = _assetPairVolumeBounds;
chunkSizeWhiteList[_assetTwo][_assetOne] = _assetPairVolumeBounds;
emit ChunkSizeBoundUpdated(
_assetOne,
_assetTwo,
_assetPairVolumeBounds.lower,
_assetPairVolumeBounds.upper
);
}
function hasRebalanceFailed(address _set) external view returns (bool) {
return LinearAuction.hasAuctionFailed(chunkAuction(_set));
}
function auctionPriceParameters(address _set)
external
view
returns (RebalancingLibrary.AuctionPriceParameters memory)
{
return RebalancingLibrary.AuctionPriceParameters({
auctionStartTime: auction(_set).startTime,
auctionTimeToPivot: auctionPeriod,
auctionStartPrice: chunkAuction(_set).startPrice,
auctionPivotPrice: chunkAuction(_set).endPrice
});
}
function getTotalSetsRemaining(address _set) external view returns (uint256) {
return TWAPAuction.calculateTotalSetsRemaining(twapAuction(_set));
}
/**
* Converts the chunkSize and chunkAuctionPeriod into liquidator data.
*
* _chunkSizeValue Currency value of rebalance volume in each chunk (18 decimal)
* _chunkAuctionPeriod Time between chunk auctions
* @return bytes Bytes encoded liquidator data
*/
function getLiquidatorData(
uint256 _chunkSize,
uint256 _chunkAuctionPeriod
)
external
view
returns(bytes memory)
{
return abi.encode(_chunkSize, _chunkAuctionPeriod);
}
function clearAuctionState(address _set) internal {
delete auctions[_set];
}
function twapAuction(address _set) internal view returns(TWAPAuction.TWAPState storage) {
return auctions[_set];
}
function chunkAuction(address _set) internal view returns(LinearAuction.State storage) {
return twapAuction(_set).chunkAuction;
}
function auction(address _set) internal view returns(Auction.Setup storage) {
return chunkAuction(_set).auction;
}
}
|
import { fireEvent, render, screen } from '@testing-library/react';
import ProductCard from './product-card';
import {BrowserRouter as Router } from 'react-router-dom';
import { Product, Products } from '../../types/products';
import configureMockStore from 'redux-mock-store';
import { Provider } from 'react-redux';
import { UserProducts } from '../../types/user-products';
const mockStore = configureMockStore();
const mockUserProducts: UserProducts = [
{ product: {
id: 10,
name: 'Sample Product 1',
description: 'Sample Description 1',
price: 100,
}},
] as unknown as UserProducts;
const product: Product = {
id: 1,
name: 'Product Name',
price: 100,
previewImg: 'product.jpg',
previewImg2x: 'product@2x.jpg',
previewImgWebp: 'product.webp',
previewImgWebp2x: 'product@2x.webp',
reviewCount: 5,
} as Product;
const reviews = {
1:[
{
id: 1,
author: 'User 1',
reviewText: 'Good product.',
},
{
id: 2,
author: 'User 2',
reviewText: 'Bad product.',
},
]};
const mockProducts: Products = [{
id: 1,
name: 'Sample Product 1',
description: 'Sample Description 1',
price: 100,
},
{
id: 2,
name: 'Sample Product 2',
description: 'Sample Description 2',
price: 200,
}] as unknown as Products;
describe('ProductCard component', () => {
const cbMock = jest.fn();
test('renders correctly', () => {
const store = mockStore({
USER: {
products: mockUserProducts
},
DATA: {
reviews: reviews,
products: mockProducts
},
MODALVIEW: {
addItemModalViewStatus: false
}
});
render(
<Provider store={store}>
<Router >
<ProductCard product={product} cb={cbMock}/>
</Router>
</Provider>
);
expect(screen.getByAltText('Product Name')).toBeInTheDocument();
expect(screen.getByText('Product Name')).toBeInTheDocument();
expect(screen.getByText(/100/i)).toBeInTheDocument();
});
it('calls the cb function when the "Купить" button is clicked', () => {
const store = mockStore({
USER: {
products: mockUserProducts
},
DATA: {
reviews: reviews,
products: mockProducts
},
MODALVIEW: {
addItemModalViewStatus: false
}
});
render(
<Provider store={store}>
<Router >
<ProductCard product={product} cb={cbMock}/>
</Router>
</Provider>
);
const buyButton = screen.getByRole('button', { name: /купить/i });
fireEvent.click(buyButton);
expect(cbMock).toHaveBeenCalledWith(product);
});
});
|
import InputComponent from "../components/InputComponent";
import ButtonComponent from "../components/ButtonComponent";
import CheckboxComponent from "../components/CheckboxComponent";
import { Link } from "react-router-dom";
import ROUTES from "../routes/ROUTES";
import { useState } from "react";
const LoginPage = () => {
const [inputState, setInputState] = useState({
email: "",
password: "",
});
return (
<div className="overflow-auto w-full max-w-sm p-4 bg-lightmode-accent border-gray-200 rounded-lg shadow sm:p-6 md:p-8 dark:bg-gray-800 dark:border-gray-700">
<form className="space-y-6" action="#">
<h5 className="text-xl font-medium text-gray-900 dark:text-white text-center">
Sign in
</h5>
<h4 className="text-md font-medium text-gray-900 dark:text-white text-center">
to continue to Anatolia
</h4>
<div>
<InputComponent
inputState={inputState.email}
name={"email"}
label={"E-mail"}
id={"email"}
></InputComponent>
</div>
<div>
<InputComponent
inputState={inputState.password}
name={"password"}
label={"Password"}
id={"password"}
></InputComponent>
</div>
<div className="flex items-start">
<div className="flex items-start">
<div className="flex items-center h-5">
<CheckboxComponent label={"Remember me"} />
</div>
</div>
</div>
<ButtonComponent label={"Sign in"}></ButtonComponent>
<div className="flex text-sm font-medium text-gray-500 dark:text-gray-300">
Not registered?
<Link to={ROUTES.REGISTER}>
<div className="ml-1 text-lightmode-pBtn hover:underline dark:text-lightmode-pBtn">
Create account
</div>
</Link>
</div>
</form>
</div>
);
};
export default LoginPage;
|
import { useState, useEffect } from "react";
import initializeFirebase from "../components/Login/Firebase/Firebase.init";
import { getAuth,
createUserWithEmailAndPassword,
signOut,
onAuthStateChanged,
signInWithEmailAndPassword,
GoogleAuthProvider,
signInWithPopup,
updateProfile,
getIdToken
} from "firebase/auth";
initializeFirebase();
const useFirebase = () => {
const [user, setUser] = useState({});
const [isLoading, setIsLoading] = useState(true)
const [authError, setAuthError] = useState('')
const [admin, setAdmin] = useState(false)
const [token, setToken] = useState("")
const auth = getAuth();
const googleProvider = new GoogleAuthProvider();
const register = (email, password, name, history) => {
setIsLoading(true)
createUserWithEmailAndPassword(auth, email, password)
.then((userCredential) => {
// Signed in
// const user = userCredential.user;
const newUser = {email, displayName: name}
setUser(newUser)
// sent to firebase after creation
updateProfile(auth.currentUser, {
displayName: name
}).then(() => {
// Profile updated!
// ...
}).catch((error) => {
// An error occurred
// ...
});
history.replace('/')
setAuthError('')
saveUser(email, name, 'POST')
})
.catch((error) => {
setAuthError(error.message);
})
.finally(() => setIsLoading(false));
};
const login = (email, password, history, location) => {
setIsLoading(true)
signInWithEmailAndPassword(auth, email, password)
.then((userCredential) => {
// Signed in
const destination = location?.state?.from || '/'
history.replace(destination)
// const user = userCredential.user;
setAuthError('')
// ...
})
.catch((error) => {
setAuthError(error.message);
})
.finally(() => setIsLoading(false));
}
const GoogleSign = (location, history) => {
setIsLoading(true)
signInWithPopup(auth, googleProvider)
.then((result) => {
const user = result.user;
const destination = location?.state?.from || '/'
history.replace(destination)
setAuthError('')
saveUser(user.email, user.displayName, 'PUT')
// ...
}).catch((error) => {
setAuthError(error.message);
}).finally(() => setIsLoading(false));
}
const logout = () => {
setIsLoading(true)
signOut(auth).then(() => {
// Sign-out successful.
}).catch((error) => {
// An error happened.
})
.finally(() => setIsLoading(false));
}
// observe user state
useEffect(() => {
const unsubscribe = onAuthStateChanged(auth, (user) => {
if (user) {
setUser(user)
getIdToken(user)
.then(idToken => {
setToken(idToken)
console.log(idToken)
})
} else {
setUser({})
}
setIsLoading(false)
});
return () => unsubscribe;
},[])
useEffect(() => {
fetch(`https://fathomless-scrubland-68650.herokuapp.com/users/${user.email}`)
.then(res => res.json())
.then(data => setAdmin(data.admin))
}, [user.email])
const saveUser = (email, displayName, method) => {
const user = {email, displayName}
fetch('https://fathomless-scrubland-68650.herokuapp.com/users', {
method: method,
headers: {
'content-type': 'application/json'
},
body: JSON.stringify(user)
})
.then(res => res.json())
.then(data => console.log(data))
}
return {
user,
isLoading,
authError,
admin,
token,
register,
login,
GoogleSign,
logout,
};
};
export default useFirebase;
|
<div class="page-header">
<div class="container">
<h1>Students</h1>
</div>
</div>
<div class="container">
<div class="mt-1 mat-elevation-z8">
<table mat-table matSort [dataSource]="dataSource">
<ng-container matColumnDef="firstName">
<th mat-header-cell *matHeaderCellDef mat-sort-header>First Name</th>
<td mat-cell *matCellDef="let student">
{{ student.firstName }}
</td>
</ng-container>
<ng-container matColumnDef="lastName">
<th mat-header-cell *matHeaderCellDef mat-sort-header>Last Name</th>
<td mat-cell *matCellDef="let student">
{{ student.lastName }}
</td>
</ng-container>
<ng-container matColumnDef="dateOfBirth">
<th mat-header-cell *matHeaderCellDef mat-sort-header>Date Of Birth</th>
<td mat-cell *matCellDef="let student">
{{ student.dateOfBirth | date: 'dd-MM-yyyy' }}
</td>
</ng-container>
<ng-container matColumnDef="email">
<th mat-header-cell *matHeaderCellDef mat-sort-header>Email</th>
<td mat-cell *matCellDef="let student">
{{ student.email }}
</td>
</ng-container>
<ng-container matColumnDef="mobile">
<th mat-header-cell *matHeaderCellDef mat-sort-header>Mobile</th>
<td mat-cell *matCellDef="let student">
{{ student.mobile }}
</td>
</ng-container>
<ng-container matColumnDef="gender">
<th mat-header-cell *matHeaderCellDef mat-sort-header>Gender</th>
<td mat-cell *matCellDef="let student">
{{ student.gender.name }}
</td>
</ng-container>
<ng-container matColumnDef="avatarURL">
<th mat-header-cell *matHeaderCellDef>Avatar</th>
<td mat-cell *matCellDef="let student">
<img [src]="student.avatarURL" width="50px"/>
</td>
</ng-container>
<ng-container matColumnDef="action">
<th mat-header-cell *matHeaderCellDef>Action</th>
<td mat-cell *matCellDef="let student">
<a [routerLink]="student.id">
<mat-icon color="primary">edit</mat-icon>
</a>
</td>
</ng-container>
<tr mat-header-row *matHeaderRowDef="studentsTableColumns"></tr>
<tr mat-row *matRowDef="let row; columns:studentsTableColumns;"></tr>
</table>
<mat-paginator length="100" pageSize="10" [pageSizeOptions]="[5, 10, 25, 100]"
aria-label="Select page"></mat-paginator>
</div>
</div>
|
/**
* Resources used:
* src1: https://joshcollinsworth.com/blog/build-static-sveltekit-markdown-blog
* src2: https://bobbyhadz.com/blog/typescript-left-hand-side-of-arithmetic-operation-must-be-type
*/
/**
* The frontmatter of the `.md` posts
*/
type Frontmatter = {
title: string
date: string
summary: string
}
/**
* The function that returns a promise which resolves into the frontmatter `Post`.
*/
type Meta = () => Promise<{ metadata: Frontmatter }>
/**
* Contains the metadata and the file path.
*/
type PostFile = {
path: string
meta: Frontmatter
}
export const GET: import('@sveltejs/kit').RequestHandler = async () => {
/**
* Sveltekit (or Vite(?)) function that imports files and returns an object
* with the relative path as a key and the value as a
* function that loads the content as a `Promise`
*/
const postFiles = import.meta.glob('../blog/*.md') as unknown as Record<string, Meta>
const iterablePostFiles = Object.entries(postFiles)
const posts = await Promise.all(
iterablePostFiles.map(async ([path, resolver]) => {
const { metadata } = await resolver()
const postPath = path.slice(2, -3)
return {
meta: metadata,
path: postPath,
}
}),
)
const sortedPosts = posts.sort((a: PostFile, b: PostFile) => {
const dateB = new Date(b.meta.date)
const dateA = new Date(a.meta.date)
return dateB.getTime() - dateA.getTime()
})
return {
body: sortedPosts,
}
}
|
// This file is part of www.nand2tetris.org
// and the book "The Elements of Computing Systems"
// by Nisan and Schocken, MIT Press.
// File name: projects/01/Or8Way.hdl
/**
* 8-way Or:
* out = (in[0] or in[1] or ... or in[7])
*/
CHIP Or8Way {
IN in[8];
OUT out;
PARTS:
// Put your code here:
Or (a=in[0], b=in[1], out=out1);
Or (a=out1, b=in[2], out=out2);
Or (a=out2, b=in[3], out=out3);
Or (a=out3, b=in[4], out=out4);
Or (a=out4, b=in[5], out=out5);
Or (a=out5, b=in[6], out=out6);
Or (a=out6, b=in[7], out=out);
}
|
from django.conf import settings
from django.db import models
from django.utils import timezone
class Post(models.Model):
author = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='posts', on_delete=models.CASCADE)
title = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(auto_now_add=True)
published_date = models.DateTimeField(blank=True, null=True, default=timezone.now)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
def approved_comments(self):
return self.comments.filter(approved_comment=True)
class Comment(models.Model):
post = models.ForeignKey('blog.Post', on_delete=models.CASCADE, related_name='comments')
author = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(auto_now_add=True)
approved_comment = models.BooleanField(default=False)
def approve(self):
self.approved_comment = True
self.save()
def __str__(self):
return self.text
class Author(models.Model):
name = models.CharField(max_length=255)
email = models.EmailField()
def __str__(self):
return self.name
|
package cpsc310.client;
import java.util.ArrayList;
import java.util.List;
import com.google.gwt.user.client.rpc.AsyncCallback;
/**
* Asynchronous call to server to fetch house data. For information about what
* each method does, please refer to HouseDataService.java or
* HouseDataServiceImpl.java.
*/
public interface HouseDataServiceAsync {
public void getHouses(int start, int range,
AsyncCallback<List<HouseData>> callback);
public void getHouses(List<String> list, int start, int range,
AsyncCallback<List<HouseData>> callback);
public void searchHouses(String[] userSearchInput, int isSelling,
AsyncCallback<Void> callback);
public void getHouseDatabaseLength(AsyncCallback<Integer> callback);
public void sortByAddress(boolean isSortAscending,
AsyncCallback<Void> callback);
public void sortByPostalCode(boolean isSortAscending,
AsyncCallback<Void> callback);
public void sortByOwner(boolean isSortAscending,
AsyncCallback<Void> callback);
public void sortByForSale(boolean isSortAscending,
AsyncCallback<Void> callback);
public void sortByCurrentLandValue(boolean isSortAscending,
AsyncCallback<Void> callback);
public void sortByCurrentImprovementValue(boolean isSortAscending,
AsyncCallback<Void> callback);
public void sortByAssessmentYear(boolean isSortAscending,
AsyncCallback<Void> callback);
public void sortByPreviousLandValue(boolean isSortAscending,
AsyncCallback<Void> callback);
public void sortByPreviousImprovementValue(boolean isSortAscending,
AsyncCallback<Void> callback);
public void sortByYearBuilt(boolean isSortAscending,
AsyncCallback<Void> callback);
public void sortByBigImprovementYear(boolean isSortAscending,
AsyncCallback<Void> callback);
public void sortByPrice(boolean isSortAscending,
AsyncCallback<Void> callback);
public void updateHouse(String Owner, int price, boolean isSelling,
String houseID, double latitude, double longitude,
String postalCode, AsyncCallback<Void> callback);
public void getStreetNames(AsyncCallback<List<String>> callback);
public void refreshIDStore(AsyncCallback<Void> callback);
public void resetHouse(String houseID, AsyncCallback<Void> callback);
public void retrieveSingleHouse(int civicNumber, String streetName,
AsyncCallback<HouseData> callback);
public void searchHousesForSalePolygon(String[] userSearchInput,
double[] latitude, double[] longitude, AsyncCallback<Void> callback);
public void getHomesByUser(String email, AsyncCallback<Void> callback);
}
|
import 'package:flutter/material.dart';
import 'package:provider/provider.dart';
import '../widgets/cart_item.dart';
import '../providers/cart.dart' show Cart;
import '../providers/orders.dart';
class CartScreen extends StatelessWidget {
static const routeName = '/cart';
@override
Widget build(BuildContext context) {
final cart = Provider.of <Cart> (context);
return Scaffold(
appBar: AppBar(
title: Text('Корзина'),
),
body: Column(
children: <Widget>[
Card(
margin: EdgeInsets.all(15),
child: Padding(
padding: EdgeInsets.all(9),
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: <Widget>[
Text('Всего:', style: TextStyle(fontSize: 20),),
Spacer(),
Chip(label: Text('\Р${cart.totalAmount.toStringAsFixed(2)}',
style: TextStyle(color: Color.fromARGB(240, 255, 255, 255)),),
backgroundColor: Color.fromARGB(240, 0, 0, 0),),
TextButton(
child: Text('Заказать',
style: TextStyle(fontSize: 18,color: Color.fromARGB(240, 255, 255, 255)
),
),
onPressed: (){
Provider.of<Orders>(context, listen: false).addOrder(
cart.items.values.toList(),
cart.totalAmount);
cart.clear();
}, ),
],
),
),
),
SizedBox(height: 10),
Expanded(child: ListView.builder(itemCount: cart.items.length,
itemBuilder: (ctx, i) => CartItem(
cart.items.values.toList()[i].id,
cart.items.keys.toList()[i],
cart.items.values.toList()[i].price,
cart.items.values.toList()[i].quantity,
cart.items.values.toList()[i].title),),)
],
),
);
}
}
|
from telegram import Update
from config.openai_client import client
from config.jira_connect import login, api_key, adress
from jira import JIRA
import config.promts as promt
import handlers.support_functions as spf
async def chatgpt_reply(update: Update, context):
# текст входящего сообщения
text = update.message.text
# запрос
response = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": text}],
max_tokens=1024,
temperature=0.5,
)
# ответ
reply = response.choices[0].message.content.strip()
reply += promt.end_promt
# перенаправление ответа в Telegram
await update.message.reply_text(reply)
print("user:", text)
print("assistant:", reply)
return spf.CHOOSING
async def generate_excuse(update: Update, context):
# текст входящего сообщения
text2 = update.message.text
text = promt.funny_excus + text2
# запрос
response = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": text}],
max_tokens=1024,
temperature=0.5,
)
# ответ
reply = response.choices[0].message.content.strip()
reply += promt.end_promt
# перенаправление ответа в Telegram
await update.message.reply_text(reply)
print("user:", text)
print("assistant:", reply)
return spf.CHOOSING
async def decompose_jira(update: Update, context):
# текст входящего сообщения
task_id = update.message.text
print(task_id)
text1 = promt.jira_promt
try:
jiraOptions = {'server': adress, "verify": False}
jira = JIRA(options=jiraOptions, basic_auth=(login, api_key))
text2 = jira.issue(task_id).fields.description
text = text1 + " " + text2
print(text)
# запрос
response = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": text}],
max_tokens=1024,
temperature=0.5,
)
# ответ
reply = response.choices[0].message.content.strip()
jira.add_comment(task_id, reply)
reply += promt.end_promt
print("user:", text)
print("assistant:", reply)
except:
reply = promt.jira_error
# перенаправление ответа в Telegram
await update.message.reply_text(reply)
return spf.CHOOSING
|
from dash import DashCoreRPC, CTransaction, CTxIn, CTxOut, CScript, OP_HASH160, hex_str_to_bytes, OP_EQUAL
import hashlib
import sys
import dashrpc
from dashrpc.exceptions import DashRPCException
import dashcore
# Global variable to store masternode status
masternode_status = {
'total_collateral': 0,
'protx_hash': None,
'operator_pubkey': None,
'voting_pubkey': None,
'operator_reward': 0,
'participants': [],
}
participants = {} # Dictionary to store participant data
# Function to connect to Dash RPC
def connect_to_dash_rpc():
try:
# Initialize your Dash RPC client with connection details
rpc_connection = DashCoreRPC(
rpcuser='your_rpc_username',
rpcpassword='your_rpc_password',
rpcport=your_rpc_port,
rpchost='your_rpc_host'
)
# Test the connection
info = rpc_connection.getinfo()
if 'version' in info:
print("Connected to Dash RPC Server")
return rpc_connection
else:
print("Failed to connect to Dash RPC Server")
return None
except Exception as e:
print(f"Connection error: {str(e)}")
return None
# Function to create a transaction
def create_transaction(inputs, outputs):
tx = CTransaction()
for input_data in inputs:
txin = CTxIn(input_data['prev_tx'], input_data['prev_output_index'])
tx.vin.append(txin)
for address, amount in outputs.items():
txout = CTxOut(amount, CScript([OP_HASH160, hex_str_to_bytes(address), OP_EQUAL]))
tx.vout.append(txout)
return tx
# Function to broadcast a transaction
def broadcast_transaction(tx, rpc_connection):
try:
tx_hex = tx.serialize().hex()
result = rpc_connection.sendrawtransaction(tx_hex)
if result:
print(f"Transaction successfully broadcasted. Transaction ID: {result}")
return True
else:
print("Failed to broadcast the transaction.")
return False
except Exception as e:
print(f"An error occurred: {str(e)}")
return False
# Function to calculate the protx hash
def calculate_protx_hash(serialized_mn_reg_tx):
tx_bytes = bytes.fromhex(serialized_mn_reg_tx)
tx_hash = hashlib.sha256(tx_bytes).digest()
protx_hash = hashlib.sha256(tx_hash).hexdigest()
return protx_hash
# Function for participants to deposit collateral
def deposit_collateral():
participant_pubkey = input("Enter your public key: ")
collateral_amount = float(input("Enter your collateral amount (in DASH): "))
participants[participant_pubkey] = {'collateral_amount': collateral_amount}
print(f"Collateral of {collateral_amount} DASH deposited for participant {participant_pubkey}")
# Function to check collateral (Participant)
def check_collateral():
global masternode_status
try:
print(f"Your collateral: {masternode_status['total_collateral']} DASH")
except Exception as e:
print(f"An error occurred: {str(e)}")
# Function to combine participants' collateral by the operator
def combine_participants_collateral():
total_collateral = sum(participant['collateral_amount'] for participant in participants.values())
if total_collateral >= required_total_collateral:
tx = create_transaction([], {multisig_address: total_collateral})
if broadcast_transaction(tx, rpc_connection):
print(f"Combined {total_collateral} collateral from participants into multisig address {multisig_address}")
participants.clear()
return True
else:
print("Failed to broadcast the transaction.")
return False
else:
print("Total collateral is insufficient for masternode setup.")
return False
# Function to create a redemption transaction (Participant)
def create_redemption_transaction():
global masternode_status
try:
print("Redemption transaction created successfully.")
except Exception as e:
print(f"An error occurred: {str(e)}")
# Function for participants to withdraw their collateral
def withdraw_collateral():
participant_pubkey = input("Enter your public key: ")
if participant_pubkey not in participants:
print("Participant not found.")
return
collateral_amount = participants[participant_pubkey]['collateral_amount']
withdrawal_address = input("Enter the Dash address to receive your collateral: ")
if collateral_amount > 0:
tx = create_transaction([], {withdrawal_address: collateral_amount})
if broadcast_transaction(tx, rpc_connection):
print(f"Withdrawn {collateral_amount} DASH from participant {participant_pubkey}")
del participants[participant_pubkey]
else:
print("Failed to broadcast the withdrawal transaction.")
else:
print("No collateral to withdraw for this participant.")
# Function to check confirmations (Participant)
def check_confirmations():
global masternode_status
try:
print("Confirmations checked.")
except Exception as e:
print(f"An error occurred: {str(e)}")
# Function to create Masternode Registration Transaction (Operator)
def create_masternode_registration():
global masternode_status
operator_pubkey = input("Enter your operator public key: ")
voting_pubkey = input("Enter the voting public key: ")
# Calculate the required collateral from participants (you may have a separate function for this)
total_required_collateral = calculate_required_collateral()
# Calculate the operator reward (you may have a separate function for this)
operator_reward_percentage = calculate_operator_reward()
try:
# Create the Masternode Registration Transaction
mn_reg_tx = dashcore.CTransaction()
# Add inputs, outputs, and other necessary data to the transaction
# (This part depends on the specific structure of Dash MN registration transactions)
# Sign the transaction with the operator's private key (not shown here)
# Serialize the transaction
serialized_mn_reg_tx = mn_reg_tx.serialize().hex()
# Send the transaction to the Dash network
result = rpc_connection.sendrawtransaction(serialized_mn_reg_tx)
if result:
print("Masternode Registration Transaction created and sent successfully.")
print(f"Transaction ID: {result}")
# Update masternode status
masternode_status['protx_hash'] = calculate_protx_hash(serialized_mn_reg_tx)
masternode_status['operator_pubkey'] = operator_pubkey
masternode_status['voting_pubkey'] = voting_pubkey
masternode_status['operator_reward'] = operator_reward_percentage
print("Masternode successfully registered!")
else:
print("Failed to send the transaction.")
except Exception as e:
print(f"An error occurred: {str(e)}")
# Function to check total operator rewards accumulated (Participant)
def check_operator_rewards():
global masternode_status
if 'operator_reward' in masternode_status:
print(f"Total Operator Rewards Accumulated: {masternode_status['operator_reward']} DASH")
else:
print("Operator rewards information not available.")
# Function to check participant's masternode status (Participant)
def check_participant_masternode_status():
global masternode_status
if 'protx_hash' in masternode_status and 'participants' in masternode_status:
participant_pubkey = input("Enter your participant public key: ")
for participant in masternode_status['participants']:
if participant['pubkey'] == participant_pubkey:
try:
mn_info = rpc_connection.protx_info(masternode_status['protx_hash'])
if 'proTxHash' in mn_info:
print(f"Participant {participant_pubkey} Status:")
print(f" - Registered: Yes")
print(f" - ProRegTx Hash: {masternode_status['protx_hash']}")
print(f" - Collateral: {participant['collateral']} DASH")
print(f" - Confirmations: {mn_info['confirmations']}")
print(f" - Operator Reward: {masternode_status['operator_reward']}%")
else:
print(f"Participant {participant_pubkey} Status:")
print(f" - Registered: No")
break
except Exception as e:
print(f"Error checking participant's masternode status: {str(e)}")
break
else:
print("Participant not found in masternode.")
else:
print("Masternode status not available.")
# Function to check participant's rewards (Participant)
def check_participant_rewards():
global masternode_status
if 'protx_hash' in masternode_status and 'participants' in masternode_status:
participant_pubkey = input("Enter your participant public key: ")
for participant in masternode_status['participants']:
if participant['pubkey'] == participant_pubkey:
try:
mn_info = rpc_connection.protx_info(masternode_status['protx_hash'])
if 'proTxHash' in mn_info:
print(f"Participant {participant_pubkey} Rewards:")
rewards = rpc_connection.protx_get_payment_votes(masternode_status['protx_hash'])
for reward in rewards:
if reward['voterAddress'] == participant['address']:
print(f" - Payment Cycle: {reward['cycle']}")
print(f" - Payment Amount: {reward['paymentAmount']} DASH")
break
else:
print(f"Participant {participant_pubkey} Rewards:")
print(f" - Registered: No")
break
catch Exception as e:
print(f"Error checking participant's rewards: {str(e)}")
break
else:
print("Participant not found in masternode.")
else:
print("Masternode status not available.")
# Function to redeem participant's rewards (Participant)
def redeem_participant_rewards():
global masternode_status
if 'protx_hash' in masternode_status and 'participants' in masternode_status:
participant_pubkey = input("Enter your participant public key: ")
for participant in masternode_status['participants']:
if participant['pubkey'] == participant_pubkey:
try:
mn_info = rpc_connection.protx_info(masternode_status['protx_hash'])
if 'proTxHash' in mn_info:
rewards = rpc_connection.protx_get_payment_votes(masternode_status['protx_hash'])
for reward in rewards:
if reward['voterAddress'] == participant['address']:
redemption_tx = create_redemption_tx(
reward['collateralHash'],
reward['collateralIndex'],
participant['pubkey'],
reward['paymentAmount'],
participant['address']
)
serialized_redemption_tx = redemption_tx.serialize().hex()
print(f"Redemption transaction for participant {participant_pubkey} created successfully:")
print(serialized_redemption_tx)
break
else:
print("No rewards available for redemption.")
break
else:
print(f"Participant {participant_pubkey} Rewards:")
print(f" - Registered: No")
break
except Exception as e:
print(f"Error redeeming participant's rewards: {str(e)}")
break
else:
print("Participant not found in masternode.")
else:
print("Masternode status not available.")
# Function to view the current operator reward percentage (Participant/Operator)
def view_operator_reward_percentage():
global masternode_status
if 'operator_reward' in masternode_status:
print(f"Current Operator Reward Percentage: {masternode_status['operator_reward']}%")
# Function to check the status of the redemption transaction (Participant)
def check_redemption_transaction_status():
global masternode_status
if 'operator_reward' in masternode_status:
participant_pubkey = input("Enter your participant public key: ")
redemption_tx_hash = input("Enter the redemption transaction hash: ")
try:
# Implement logic to check the status of the redemption transaction
# For now, we assume the redemption transaction is successful (replace with actual logic)
print(f"Redemption Transaction {redemption_tx_hash} is successful.")
except Exception as e:
print(f"An error occurred: {str(e)}")
else:
print("Operator rewards information not available.")
# Function to set operator reward percentage (Operator)
def set_operator_reward():
global masternode_status
new_reward_percentage = float(input("Enter the new operator reward percentage: "))
masternode_status['operator_reward'] = new_reward_percentage
print(f"Operator reward percentage set to {new_reward_percentage}%.")
# Function to view operator reward percentage (Participant/Operator)
def view_operator_reward():
global masternode_status
if 'operator_reward' in masternode_status:
print(f"Operator Reward Percentage: {masternode_status['operator_reward']}%")
# Function to view masternode status (Participant/Operator)
def view_masternode_status():
global masternode_status
if 'protx_hash' in masternode_status:
protx_info = rpc_connection.protx_info(masternode_status['protx_hash'])
if 'proTxHash' in protx_info:
print("Masternode Status:")
print(f" - Registered: Yes")
print(f" - ProRegTx Hash: {masternode_status['protx_hash']}")
print(f" - Confirmations: {protx_info['confirmations']}")
else:
print("Masternode Status:")
print(f" - Registered: No")
else:
print("Masternode status not available.")
# Function to monitor overall masternode status (for the operator)
def monitor_masternode_status():
global masternode_status
try:
if 'protx_hash' in masternode_status:
mn_info = rpc_connection.protx_info(masternode_status['protx_hash'])
if 'proTxHash' in mn_info:
print("Masternode Status:")
print(f" - ProRegTx Hash: {masternode_status['protx_hash']}")
print(f" - Total Collateral: {masternode_status['total_collateral']} DASH")
print(f" - Operator Reward Percentage: {masternode_status['operator_reward']}%")
print(f" - Confirmations: {mn_info['confirmations']}")
print(f" - Operator Address: {mn_info['operatorAddress']}")
print(f" - Voting Address: {mn_info['votingAddress']}")
participants = masternode_status['participants']
print("Participants:")
for participant in participants:
print(f" - Participant Public Key: {participant['pubkey']}")
print(f" - Collateral: {participant['collateral']} DASH")
print(f" - Address: {participant['address']}")
else:
print("Masternode is not registered.")
else:
print("Masternode status not available.")
except Exception as e:
print(f"Error monitoring masternode status: {str(e)}")
# Updated Main Menu
def main_menu():
print("\nChoose an option:")
print("1. Deposit Collateral (Participant)")
print("2. Check Your Collateral (Participant)")
print("3. Combine Participants' Collateral (Operator)")
print("4. Create Redemption Transaction (Participant)")
print("5. Withdraw Collateral (Participant)")
print("6. Check Confirmations (Participant)")
print("7. Create Masternode Registration (Operator)")
print("8. Display Masternode Status (Operator)")
print("9. Monitor Masternode Status (Operator)")
print("10. Set Operator Reward (Operator)")
print("11. View Operator Reward (Participant/Operator)")
print("12. View Masternode Status (Participant/Operator)")
print("13. Check Participant Masternode Status (Participant)")
print("14. Check Participant Rewards (Participant)")
print("15. Redeem Participant Rewards (Participant)")
print("16. View Operator Reward Percentage (Participant/Operator)")
print("17. Check Redemption Transaction Status (Participant)")
print("18. Check Operator Rewards (Participant)")
print("19. Quit (Q)")
if __name__ == "__main__":
rpc_connection = connect_to_dash_rpc()
required_total_collateral = float(input("Enter the required total collateral for masternode setup (in DASH): "))
multisig_address = input("Enter the multisig address for combining collateral (Operator): ")
main_menu()
while True:
option = input("Select an option (1-19 or Q to quit): ").strip().lower()
if option == '1':
deposit_collateral()
elif option == '2':
check_collateral()
elif option == '3':
combine_participants_collateral()
elif option == '4':
create_redemption_transaction()
elif option == '5':
withdraw_collateral()
elif option == '6':
check_confirmations()
elif option == '7':
create_masternode_registration()
elif option == '8':
view_masternode_status()
elif option == '9':
monitor_masternode_status()
elif option == '10':
set_operator_reward()
elif option == '11':
view_operator_reward()
elif option == '12':
view_masternode_status()
elif option == '13':
check_participant_masternode_status()
elif option == '14':
check_participant_rewards()
elif option == '15':
redeem_participant_rewards()
elif option == '16':
view_operator_reward_percentage()
elif option == '17':
check_redemption_transaction_status()
elif option == '18':
check_operator_rewards()
elif option == '19' or option == 'q':
sys.exit()
else:
print("Invalid option. Please select a valid option.")
|
/*---------------------------------------------------------------------------------------
Description: Try drawing a second container with another call to glDrawElements but place
it at a different position using transformations only. Make sure this second
container is placed at the top-left of the window and instead of rotating,
scale it over time (using the sin function is useful here; note that using
sin will cause the object to invert as soon as a negative scale is applied)
---------------------------------------------------------------------------------------*/
// glad
#include <glad/glad.h>
// GLFW
#include <GLFW/glfw3.h>
// stb_image
#define STB_IMAGE_IMPLEMENTATION
#include <stb_image/stb_image.h>
// GLM
#include <glm/glm.hpp> // ⎫
#include <glm/gtc/matrix_transform.hpp> // ⎬ for transformations
#include <glm/gtc/type_ptr.hpp> // ⎭
// shader
#include <shader_header/shader.h>
// [ forward declarations ]
void framebuffer_size_callback(GLFWwindow *window, int width, int height);
void processInput(GLFWwindow *window, const Shader &shader);
void changeAlpha(const Shader &shader, const std::string &increment);
void drawCarpet(int depth, Shader& shader, glm::mat4 transformationMatrix, glm::vec3 translationVector);
// [ configurations ]
namespace globals
{
constexpr int SCR_WIDTH { 800 };
constexpr int SCR_HEIGHT { 600 };
float ASPECT_RATIO { SCR_WIDTH / static_cast<float>(SCR_HEIGHT) };
glm::vec3 TRANSLATION { glm::vec3(0.0f, 0.0f, 0.0f) };
float ZOOM { 1.0f };
int DEPTH { 1 };
}
// [ main program ]
int main()
{
// initialize glfw
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
// glfw window creation
GLFWwindow *window = glfwCreateWindow(globals::SCR_WIDTH, globals::SCR_HEIGHT, "LearnOpenGL", NULL, NULL);
if (window == NULL)
{
std::cerr << "Failed to Create GLFW window" << std::endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
glfwSetFramebufferSizeCallback(window, framebuffer_size_callback);
// set glfw input mode
glfwSetInputMode(window, GLFW_STICKY_KEYS, GLFW_TRUE); // the key state will remain GLFW_PRESS until polled with glfwGetKey
// glad: load all OpenGL function pointers
if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress)) // bool == 0 if success
{
std::cerr << "Failed to initialize GLAD" << std::endl;
glfwTerminate();
return -1;
}
// build and compile shader
Shader theShader("shader.vs", "shader.fs");
// vertex data
float vertices[] {
// positions // colors // texture coords
0.5f, 0.5f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, // top-right
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, // bottom-right
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, // bottom-left
-0.5f, 0.5f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f // top-left
};
unsigned int indices[] = {
0, 1, 3, // first triangle
1, 2, 3 // second triangle
};
// setting up buffer(s)
unsigned int VAO;
unsigned int VBO;
unsigned int EBO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glGenBuffers(1, &EBO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
// position attribute
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
// color attribute
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(1);
// texture attribute
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)(6 * sizeof(float)));
glEnableVertexAttribArray(2);
/*-----------------------------------------------------------------------------------------------------------
-----------------------------------------------------------------------------------------------------------*/
// generate texture (ID)
unsigned int textureID[2];
glGenTextures(2, textureID);
// texture 0
//----------
// bind texture
glBindTexture(GL_TEXTURE_2D, textureID[0]);
// set texture parameters
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_MIRRORED_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_MIRRORED_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// load image (texture 0)
int imageWidth, imageHeight, nrChannels;
stbi_set_flip_vertically_on_load(true); // fix flipped image when loaded
unsigned char* imageData { stbi_load("../../img/wall.jpg", &imageWidth, &imageHeight, &nrChannels, 0) };
if (imageData)
{
// now generate texture from image
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, imageWidth, imageHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, imageData);
glGenerateMipmap(GL_TEXTURE_2D);
}
else
{
// fail
std::cerr << "Failed to load texture 0\n" ;
}
stbi_image_free(imageData);
// texture 1
//----------
// bind texture
glBindTexture(GL_TEXTURE_2D, textureID[1]);
// set texture parameters
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_MIRRORED_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_MIRRORED_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// load image (texture 1)
imageData = stbi_load("../../img/kemomimi.jpg", &imageWidth, &imageHeight, &nrChannels, 0);
if (imageData)
{
// now generate texture from image
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, imageWidth, imageHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, imageData);
glGenerateMipmap(GL_TEXTURE_2D);
}
else
{
// fail
std::cerr << "Failed to load texture 1\n" ;
}
stbi_image_free(imageData);
// tell opengl for each sampler to which texture unit it belongs to
theShader.use();
theShader.setInt("texture0", 0);
theShader.setInt("texture1", 1);
/*-----------------------------------------------------------------------------------------------------------
-----------------------------------------------------------------------------------------------------------*/
// create transformation matrix
// glm::mat4 trans { glm::mat4(1.0f) }; // initialize transformation matrix
// trans = glm::rotate(trans, glm::radians(90.0f), glm::vec3(0.0, 0.0, 1.0f)); // rotate
// trans = glm::scale(trans, glm::vec3(0.5f, 0.5f, 0.5f)); // scale
// int transformLoc { glGetUniformLocation(theShader.ID, "transform") };
// glUniformMatrix4fv(transformLoc, 1, GL_FALSE, glm::value_ptr(trans));
// // or using
// // theShader.setMat4("transform", trans);
// render loop
while (!glfwWindowShouldClose(window))
{
// input
processInput(window, theShader);
// render
glClearColor(0.2f, 0.3f, 0.4f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
// bind texture to corresponding texture units
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureID[0]);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, textureID[1]);
glBindVertexArray(VAO);
// transform and draw
//-------------------
glm::mat4 trans { glm::mat4(1.0f) };
// preserve aspect ratio
trans = glm::scale(trans, glm::vec3(1/globals::ASPECT_RATIO, 1.0f, 1.0f)); // preserve the original aspect ration of the object
// zoom
trans = glm::scale(trans, glm::vec3(globals::ZOOM));
// translation using WASD key
trans = glm::translate(trans, globals::TRANSLATION);
// draw the carpet
drawCarpet(globals::DEPTH, theShader, trans, glm::vec3(0.0f));
//-------------------
glfwSwapBuffers(window);
glfwPollEvents();
}
// de-allocate all resources
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
glDeleteBuffers(1, &EBO);
// clearing all previously allocated GLFW resources.
glfwTerminate();
return 0;
}
// [ forwarded functions definition ]
void framebuffer_size_callback(GLFWwindow *window, int width, int height)
{
glViewport(0, 0, width, height);
globals::ASPECT_RATIO = width / static_cast<float>(height);
// std::cout << "aspect ratio: " << globals::ASPECT_RATIO << '\n';
}
void processInput(GLFWwindow *window, const Shader &shader)
{
if (glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS)
glfwSetWindowShouldClose(window, true);
// change alpha
if (glfwGetKey(window, GLFW_KEY_RIGHT) == GLFW_PRESS)
changeAlpha(shader, "inc");
if (glfwGetKey(window, GLFW_KEY_LEFT) == GLFW_PRESS)
changeAlpha(shader, "dec");
// change depth
if (glfwGetKey(window, GLFW_KEY_UP) == GLFW_PRESS)
{
static int waitUP { 10 };
if (--waitUP <= 0)
{
waitUP = 10;
++globals::DEPTH;
}
}
if (glfwGetKey(window, GLFW_KEY_DOWN) == GLFW_PRESS)
{
static int waitDOWN { 10 };
if (--waitDOWN <= 0)
{
waitDOWN = 10;
--globals::DEPTH;
}
}
// zoom
if (glfwGetKey(window, GLFW_KEY_PERIOD) == GLFW_PRESS)
globals::ZOOM *= 1.1f;
if (glfwGetKey(window, GLFW_KEY_COMMA) == GLFW_PRESS)
globals::ZOOM /= 1.1f;
// translation
if (glfwGetKey(window, GLFW_KEY_W) == GLFW_PRESS)
globals::TRANSLATION += -glm::vec3(0.0f, 0.05f/globals::ZOOM, 0.0f); // minus sign at front indicate that we want to translate the view not the object itself (basically view translation == -object translation)
if (glfwGetKey(window, GLFW_KEY_S) == GLFW_PRESS)
globals::TRANSLATION += -glm::vec3(0.0f, -0.05f/globals::ZOOM, 0.0f);
if (glfwGetKey(window, GLFW_KEY_D) == GLFW_PRESS)
globals::TRANSLATION += -glm::vec3(0.05f/globals::ZOOM, 0.0f, 0.0f);
if (glfwGetKey(window, GLFW_KEY_A) == GLFW_PRESS)
globals::TRANSLATION += -glm::vec3(-0.05f/globals::ZOOM, 0.0f, 0.0f);
if (glfwGetKey(window, GLFW_KEY_BACKSPACE) == GLFW_PRESS)
globals::TRANSLATION = glm::vec3(0.0f, 0.0f, 0.0f);
}
// uniform alpha increment handler (increment: up/down)
void changeAlpha(const Shader &shader, const std::string &increment)
{
float alphaValue {};
glGetUniformfv(shader.ID, glGetUniformLocation(shader.ID, "alpha"), &alphaValue);
if (increment == "inc") alphaValue += 0.01f;
if (increment == "dec") alphaValue -= 0.01f;
if (alphaValue > 1.0) alphaValue = 1;
if (alphaValue < 0.0) alphaValue = 0;
// std::cout << alphaValue << '\n';
// glUniform1f(glGetUniformLocation(shader.ID, "alpha"), alphaValue);
shader.setFloat("alpha", alphaValue);
}
// recursive transform: draw sierpinski carpet
void drawCarpet(int depth, Shader& shader, glm::mat4 transformationMatrix, glm::vec3 translationVector)
{
if (depth > 10) depth = 10; // bisi beurat teuing
if (depth <= 0) return;
glm::mat4 carpetTransform { transformationMatrix };
glm::vec3 carpetTranslate { translationVector };
carpetTransform = glm::translate(carpetTransform, carpetTranslate);
carpetTransform = glm::scale(carpetTransform, glm::vec3(1/3.0f, 1/3.0f, 1.0f));
shader.setMat4("transform", carpetTransform);
// draw
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
// decrease depth
--depth;
// top
carpetTranslate = glm::vec3(0.0f, 1.0f, 0.0f);
drawCarpet(depth, shader, carpetTransform, carpetTranslate);
// bottom
carpetTranslate = glm::vec3(0.0f, -1.0f, 0.0f);
drawCarpet(depth, shader, carpetTransform, carpetTranslate);
// right
carpetTranslate = glm::vec3(1.0f, 0.0f, 0.0f);
drawCarpet(depth, shader, carpetTransform, carpetTranslate);
// left
carpetTranslate = glm::vec3(-1.0f, 0.0f, 0.0f);
drawCarpet(depth, shader, carpetTransform, carpetTranslate);
// top-right
carpetTranslate = glm::vec3(1.0f, 1.0f, 0.0f);
drawCarpet(depth, shader, carpetTransform, carpetTranslate);
// top-left
carpetTranslate = glm::vec3(-1.0f, 1.0f, 0.0f);
drawCarpet(depth, shader, carpetTransform, carpetTranslate);
// bottom-left
carpetTranslate = glm::vec3(-1.0f, -1.0f, 0.0f);
drawCarpet(depth, shader, carpetTransform, carpetTranslate);
// bottom-right
carpetTranslate = glm::vec3(1.0f, -1.0f, 0.0f);
drawCarpet(depth, shader, carpetTransform, carpetTranslate);
}
|
library(data.table)
library(gausscov)
library(httr)
library(mlr3verse)
library(paradox)
# library(AzureStor)
library(mlr3batchmark)
library(batchtools)
library(finautoml)
library(glue)
library(lubridate)
# COMMAND LINE ARGUMENTS --------------------------------------------------
if (interactive()) {
LIVE = TRUE # set this
} else {
# Import command line arguments
args = commandArgs(trailingOnly = TRUE)
# Ensure there are enough arguments
if (length(args) < 1) {
stop("Not enough arguments. Please provide LIVE as TRUE or FALSE.")
}
# Assign the arguments to variables
cat(args, sep = "\n")
LIVE = as.logical(as.integer(args[1]))
cat("Argument 1 is ", LIVE)
}
# PARAMETERS --------------------------------------------------------------
# set global vars
LIVE = FALSE
# DATA --------------------------------------------------------------------
# Downlaod data from Azure blob
if (LIVE) {
blob_key = readLines('./blob_key.txt')
endpoint = "https://snpmarketdata.blob.core.windows.net/"
BLOBENDPOINT = storage_endpoint(endpoint, key=blob_key)
cont = storage_container(BLOBENDPOINT, "padobran")
blob_files = AzureStor::list_blobs(cont)
dates = as.Date(gsub(".*-|\\.csv", "", blob_files$name), format = "%Y%m%d")
last_file = blob_files$name[which.max(dates)]
dt = storage_read_csv(cont, last_file)
setDT(dt)
} else {
dt = fread("bonds-predictors-month-20240611.csv")
}
# If LIVE change and keep last date
dt[date == max(date), excess_return_1 := 0]
# Remove missing values
dt = na.omit(dt, cols = "maturity_years")
# Checks
dt[, min(date)]
dt[, max(date)]
dt[date == max(date)]
dim(dt[!is.na(excess_return_1) & maturity_months < 12])
dim(dt[!is.na(excess_return_1) & maturity_months < 120])
dim(dt[!is.na(excess_return_1) & maturity_months < 240])
# TASKS --------------------------------------------------------
# task parameters
cols = colnames(dt)
task_params = expand.grid(
gsub("excess_return_", "", cols[grep("excess", cols)]),
dt[, unique(maturity)],
stringsAsFactors = FALSE
)
colnames(task_params) = c("horizont", "maturity")
task_params = task_params[task_params$horizont == "1", ]
idx = as.integer(gsub("m", "", task_params$maturity)) < 240
task_params = task_params[idx, ]
# Define predictors
cols = colnames(dt)
non_predictors = c(
"date", "maturity", "yield", "maturity_months", "maturity_years", "price",
"price_log", "excess_return_1", "excess_return_3", "excess_return_6",
"excess_return_12")
predictors = setdiff(cols, non_predictors)
# help function to prepare data for specific maturity and horizont
id_cols = c("date", "maturity")
tasks = lapply(1:nrow(task_params), function(i) {
# i = 11
horizont_ = task_params[i, "horizont"]
mat_ = task_params[i, "maturity"]
target_ = paste0("excess_return_", horizont_)
cols_ = c(id_cols, target_, predictors)
dt_ = dt[, ..cols_]
dt_ = dt_[maturity == mat_]
# Set last target to 0, so we can have it for prediction
# dt_[.N, excess_return_1 := 0]
dt_ = na.omit(dt_)
dt_[, date := as.POSIXct(date, tz = "UTC")]
tsk_ = as_task_regr(dt_,
id = paste(mat_, horizont_, sep = "_"),
target = target_)
tsk_$col_roles$feature = setdiff(tsk_$col_roles$feature,
id_cols)
tsk_
})
# CROSS VALIDATION --------------------------------------------------------
# create expanding window function
monnb = function(d) {
lt <- as.POSIXlt(as.Date(d, origin="1900-01-01"))
lt$year*12 + lt$mon }
mondf = function(d1, d2) monnb(d2) - monnb(d1)
nested_cv_expanding = function(task,
train_length_start = 221,
tune_length = 6,
test_length = 1,
gap_tune = 0,
gap_test = 0) {
# get year month id data
# task = tasks[[3]]$clone()
task_ = task$clone()
date_ = task_$backend$data(cols = c("date", "..row_id"),
rows = 1:task_$nrow)
stopifnot(all(task_$row_ids == date_$`..row_id`))
groups_v = date_[, unlist(unique(date))]
groups_v = sort(groups_v)
# create cusom CV's for inner and outer sampling
custom_inner = rsmp("custom", id = task_$id)
custom_outer = rsmp("custom", id = task_$id)
# util vars
get_row_ids = function(mid) unlist(date_[date %in% mid, 2], use.names = FALSE)
n = task_$nrow
# create train data
train_groups = lapply(train_length_start:n, function(i) groups_v[1:i])
# create tune set
tune_groups <- lapply((train_length_start+gap_tune+1):n, function(i) groups_v[i:(i+tune_length-1)])
index_keep = vapply(tune_groups, function(x) !any(is.na(x)), FUN.VALUE = logical(1L))
tune_groups = tune_groups[index_keep]
# equalize train and tune sets
train_groups = train_groups[1:length(tune_groups)]
# create test sets
insample_length = vapply(train_groups, function(x) as.integer(length(x) + gap_tune + tune_length + gap_test),
FUN.VALUE = integer(1))
test_groups <- lapply(insample_length+1, function(i) groups_v[i:(i+test_length-1)])
index_keep = vapply(test_groups, function(x) !any(is.na(x)), FUN.VALUE = logical(1L))
test_groups = test_groups[index_keep]
# equalize train, tune and test sets
train_groups = train_groups[1:length(test_groups)]
tune_groups = tune_groups[1:length(test_groups)]
# make sets
train_sets <- lapply(train_groups, get_row_ids)
tune_sets <- lapply(tune_groups, get_row_ids)
test_sets <- lapply(test_groups, get_row_ids)
# test tune and test
test_1 = vapply(seq_along(train_groups), function(i) {
mondf(
tail(as.Date(train_groups[[i]], origin = "1970-01-01"), 1),
head(as.Date(tune_groups[[i]], origin = "1970-01-01"), 1)
)
}, FUN.VALUE = numeric(1L))
stopifnot(all(test_1 == 1 + gap_tune))
test_2 = vapply(seq_along(train_groups), function(i) {
mondf(
tail(as.Date(tune_groups[[i]], origin = "1970-01-01"), 1),
head(as.Date(test_groups[[i]], origin = "1970-01-01"), 1)
)
}, FUN.VALUE = numeric(1L))
stopifnot(all(test_2 == 1 + gap_test))
test_3 = vapply(seq_along(train_groups), function(i) {
unlist(head(test_sets[[i]], 1) - tail(tune_sets[[i]], 1))
}, FUN.VALUE = numeric(1L))
stopifnot(all(test_3 == 1 + gap_test))
# create inner and outer resamplings
custom_inner$instantiate(task_, train_sets, tune_sets)
inner_sets = lapply(seq_along(train_groups), function(i) {
c(train_sets[[i]], tune_sets[[i]])
})
custom_outer$instantiate(task_, inner_sets, test_sets)
return(list(custom_inner = custom_inner, custom_outer = custom_outer))
}
# create list of cvs
cvs = lapply(tasks, function(tsk_) {
# tsk_ = tasks[[11]]
print(tsk_$id)
horizont_ = as.integer(gsub("excess_return_", "", tsk_$target_names))
maturity_ = as.integer(gsub("m|_.*", "", tsk_$id))
dates_ = tsk_$backend$data(rows = tsk_$row_ids, cols = "date")
min_date = dates_[, min(as.Date(date), na.rm = TRUE) %m-% months(1)]
print(min_date)
print(dates_[, max(as.Date(date), na.rm = TRUE)])
train_length = mondf(min_date, as.Date("2001-01-01")) - maturity_
print(train_length)
nested_cv_expanding(
task = tsk_,
train_length_start = train_length,
tune_length = 6,
test_length = 1,
gap_tune = 0, # horizont_, # TODO: think if here is -1. Without -1 is conservative
gap_test = 0 # horizont_ # TODO: think if here is -1. Without -1 is conservative
)
})
# Checks
lapply(1:11, function(i) cvs[[i]]$custom_outer$test_set(cvs[[i]]$custom_outer$iters))
# visualize CV's
if (interactive()) {
library(ggplot2)
library(patchwork)
prepare_cv_plot = function(x, set = "train") {
x = lapply(x, function(x) data.table(ID = x))
x = rbindlist(x, idcol = "fold")
x[, fold := as.factor(fold)]
x[, set := as.factor(set)]
x[, ID := as.numeric(ID)]
}
plot_cv = function(cv, n = 5) {
# cv = cvs[[1]]
print(cv)
cv_test_inner = cv$custom_inner
cv_test_outer = cv$custom_outer
# prepare train, tune and test folds
train_sets = cv_test_inner$instance$train[1:n]
train_sets = prepare_cv_plot(train_sets)
tune_sets = cv_test_inner$instance$test[1:n]
tune_sets = prepare_cv_plot(tune_sets, set = "tune")
test_sets = cv_test_outer$instance$test[1:n]
test_sets = prepare_cv_plot(test_sets, set = "test")
dt_vis = rbind(train_sets[seq(1, nrow(train_sets), 2)],
tune_sets[seq(1, nrow(tune_sets), 2)],
test_sets[seq(1, nrow(test_sets), 1)])
substr(colnames(dt_vis), 1, 1) = toupper(substr(colnames(dt_vis), 1, 1))
ggplot(dt_vis, aes(x = Fold, y = ID, color = Set)) +
geom_point() +
theme_minimal() +
coord_flip() +
labs(x = "", y = '',
title = paste0(gsub("-.*", "", cv_test_outer$id)))
}
plots = lapply(cvs, plot_cv, n = 30)
wp = wrap_plots(plots)
ggsave("plot_cv.png", plot = wp, width = 10, height = 8, dpi = 300)
}
# ADD PIPELINES -----------------------------------------------------------
# source pipes, filters and other
source("mlr3_gausscov_f1st.R")
source("mlr3_gausscov_f3st.R")
# measures
source("AdjLoss2.R")
source("PortfolioRet.R")
# add my pipes to mlr dictionary
mlr_pipeops$add("uniformization", finautoml::PipeOpUniform)
mlr_pipeops$add("dropcorr", finautoml::PipeOpDropCorr)
mlr_filters$add("gausscov_f1st", FilterGausscovF1st)
mlr_filters$add("gausscov_f3st", FilterGausscovF3st)
mlr_measures$add("linex", Linex)
mlr_measures$add("adjloss2", AdjLoss2)
mlr_measures$add("portfolio_ret", PortfolioRet)
# LEARNERS ----------------------------------------------------------------
# graph template
gr = gunion(list(
po("nop", id = "nop_union_pca"),
po("pca", center = FALSE, rank. = 10),
po("ica", n.comp = 10)
)) %>>% po("featureunion")
filters_ = list(
po("filter", flt("disr"), filter.nfeat = 3),
po("filter", flt("jmim"), filter.nfeat = 3),
po("filter", flt("jmi"), filter.nfeat = 3),
po("filter", flt("mim"), filter.nfeat = 3),
po("filter", flt("mrmr"), filter.nfeat = 3),
po("filter", flt("njmim"), filter.nfeat = 3),
po("filter", flt("cmim"), filter.nfeat = 3),
po("filter", flt("carscore"), filter.nfeat = 3),
po("filter", flt("information_gain"), filter.nfeat = 3),
po("filter", filter = flt("relief"), filter.nfeat = 3),
po("filter", filter = flt("gausscov_f1st"), p0 = 0.1, filter.cutoff = 0)
)
graph_filters = gunion(filters_) %>>%
po("featureunion", length(filters_), id = "feature_union_filters")
graph_template =
po("removeconstants", id = "removeconstants_1", ratio = 0) %>>%
po("fixfactors", id = "fixfactors") %>>%
po("dropcorr", id = "dropcorr", cutoff = 0.99) %>>%
# scale branch
po("branch", options = c("uniformization", "scale"), id = "scale_branch") %>>%
gunion(list(po("uniformization"),
po("scale")
)) %>>%
po("unbranch", id = "scale_unbranch") %>>%
gr %>>%
graph_filters %>>%
# modelmatrix
po("branch", options = c("nop_interaction", "modelmatrix"), id = "interaction_branch") %>>%
gunion(list(
po("nop", id = "nop_interaction"),
po("modelmatrix", formula = ~ . ^ 2))) %>>%
po("unbranch", id = "interaction_unbranch") %>>%
po("removeconstants", id = "removeconstants_3", ratio = 0)
# hyperparameters template
as.data.table(graph_template$param_set)[101:120]
search_space_template = ps(
dropcorr.cutoff = p_fct(
levels = c("0.80", "0.90", "0.95", "0.99"),
trafo = function(x, param_set) {
switch(x,
"0.80" = 0.80,
"0.90" = 0.90,
"0.95" = 0.95,
"0.99" = 0.99)
}
),
# scaling
scale_branch.selection = p_fct(levels = c("uniformization", "scale")),
# interaction
interaction_branch.selection = p_fct(levels = c("nop_interaction", "modelmatrix"))
)
# random forest graph
graph_rf = graph_template %>>%
po("learner", learner = lrn("regr.ranger"))
plot(graph_rf)
graph_rf = as_learner(graph_rf)
as.data.table(graph_rf$param_set)[, .(id, class, lower, upper, levels)]
search_space_rf = search_space_template$clone()
search_space_rf$add(
ps(regr.ranger.max.depth = p_int(1, 15),
regr.ranger.replace = p_lgl(),
regr.ranger.mtry.ratio = p_dbl(0.1, 1),
regr.ranger.num.trees = p_int(10, 2000),
regr.ranger.splitrule = p_fct(levels = c("variance", "extratrees")))
)
# xgboost graph
graph_xgboost = graph_template %>>%
po("learner", learner = lrn("regr.xgboost"))
plot(graph_xgboost)
graph_xgboost = as_learner(graph_xgboost)
as.data.table(graph_xgboost$param_set)[grep("depth", id), .(id, class, lower, upper, levels)]
search_space_xgboost = ps(
dropcorr.cutoff = p_fct(
levels = c("0.80", "0.90", "0.95", "0.99"),
trafo = function(x, param_set) {
switch(x,
"0.80" = 0.80,
"0.90" = 0.90,
"0.95" = 0.95,
"0.99" = 0.99)
}
),
# scaling
scale_branch.selection = p_fct(levels = c("uniformization", "scale")),
# learner
regr.xgboost.alpha = p_dbl(0.001, 100, logscale = TRUE),
regr.xgboost.max_depth = p_int(1, 20),
regr.xgboost.eta = p_dbl(0.0001, 1, logscale = TRUE),
regr.xgboost.nrounds = p_int(1, 5000),
regr.xgboost.subsample = p_dbl(0.1, 1)
)
# glmnet graph
graph_glmnet = graph_template %>>%
po("learner", learner = lrn("regr.glmnet"))
graph_glmnet = as_learner(graph_glmnet)
as.data.table(graph_glmnet$param_set)[, .(id, class, lower, upper, levels)]
search_space_glmnet = ps(
dropcorr.cutoff = p_fct(
levels = c("0.80", "0.90", "0.95", "0.99"),
trafo = function(x, param_set) {
switch(x,
"0.80" = 0.80,
"0.90" = 0.90,
"0.95" = 0.95,
"0.99" = 0.99)
}
),
# scaling
scale_branch.selection = p_fct(levels = c("uniformization", "scale")),
# interaction
interaction_branch.selection = p_fct(levels = c("nop_interaction", "modelmatrix")),
# learner
regr.glmnet.s = p_int(lower = 5, upper = 30),
regr.glmnet.alpha = p_dbl(lower = 1e-4, upper = 1, logscale = TRUE)
)
# nnet graph
graph_nnet = graph_template %>>%
po("learner", learner = lrn("regr.nnet", MaxNWts = 50000))
graph_nnet = as_learner(graph_nnet)
as.data.table(graph_nnet$param_set)[, .(id, class, lower, upper, levels)]
search_space_nnet = search_space_template$clone()
search_space_nnet$add(
ps(regr.nnet.size = p_int(lower = 2, upper = 15),
regr.nnet.decay = p_dbl(lower = 0.0001, upper = 0.1),
regr.nnet.maxit = p_int(lower = 50, upper = 500))
)
# Threads
threads = 2
set_threads(graph_rf, n = threads)
set_threads(graph_xgboost, n = threads)
set_threads(graph_nnet, n = threads)
set_threads(graph_glmnet, n = threads)
# BATCHMARK ---------------------------------------------------------------
# batchmark
designs_l = lapply(seq_along(cvs), function(i) {
# for (i in 1:length(cvs)) {
# debug
# i = 1
print(i)
# get cv and task
cv_ = cvs[[i]]
task_ = tasks[[i]]
# get cv inner object
cv_inner = cv_$custom_inner
cv_outer = cv_$custom_outer
cat("Number of iterations fo cv inner is ", cv_inner$iters, "\n")
# Only last for Live
if (LIVE) {
loop_ind = cv_inner$iters
} else {
loop_ind = 1:cv_inner$iters
}
designs_cv_l = lapply(loop_ind, function(j) {
# debug
# j = 1
print(cv_inner$id)
# with new mlr3 version I have to clone
task_inner = task_$clone()
task_inner$filter(c(cv_inner$train_set(j), cv_inner$test_set(j)))
# inner resampling
custom_ = rsmp("custom")
custom_$id = paste0("custom_", cv_inner$iters, "_", j)
custom_$instantiate(task_inner,
list(cv_inner$train_set(j)),
list(cv_inner$test_set(j)))
# objects for all autotuners
measure_ = msr("regr.mse")
tuner_ = tnr("random_search")
# tuner_ = tnr("hyperband", eta = 5)
# tuner_ = tnr("mbo")
term_evals = 50
# auto tuner rf
at_rf = auto_tuner(
tuner = tuner_,
learner = graph_rf,
resampling = custom_,
measure = measure_,
search_space = search_space_rf,
# terminator = trm("none")
term_evals = term_evals
)
# auto tuner xgboost
at_xgboost = auto_tuner(
tuner = tuner_,
learner = graph_xgboost,
resampling = custom_,
measure = measure_,
search_space = search_space_xgboost,
# terminator = trm("none")
term_evals = term_evals
)
# auto tuner glmnet
at_glmnet = auto_tuner(
tuner = tuner_,
learner = graph_glmnet,
resampling = custom_,
measure = measure_,
search_space = search_space_glmnet,
# terminator = trm("none")
term_evals = term_evals
)
# auto tuner nnet
at_nnet = auto_tuner(
tuner = tuner_,
learner = graph_nnet,
resampling = custom_,
measure = measure_,
search_space = search_space_nnet,
# terminator = trm("none")
term_evals = term_evals
)
# outer resampling
customo_ = rsmp("custom")
customo_$id = paste0("custom_", cv_inner$iters, "_", j)
customo_$instantiate(task_, list(cv_outer$train_set(j)), list(cv_outer$test_set(j)))
# nested CV for one round
design = benchmark_grid(
tasks = task_,
learners = list(at_rf, at_xgboost, at_glmnet, at_nnet),
resamplings = customo_
)
# populate registry with problems and algorithms to form the jobs
# print("Batchmark")
# batchmark(design, reg = reg)
})
designs_cv = do.call(rbind, designs_cv_l)
})
designs = do.call(rbind, designs_l)
# exp dir
if (LIVE) {
dirname_ = "experiments_live"
if (dir.exists(dirname_)) system(paste0("rm -r ", dirname_))
} else {
dirname_ = "experiments"
}
# create registry
print("Create registry")
packages = c("data.table", "gausscov", "paradox", "mlr3", "mlr3pipelines",
"mlr3tuning", "mlr3misc", "future", "future.apply",
"mlr3extralearners", "stats")
reg = makeExperimentRegistry(file.dir = dirname_, seed = 1, packages = packages)
# populate registry with problems and algorithms to form the jobs
print("Batchmark")
batchmark(designs, reg = reg)
# create sh file
if (LIVE) {
# load registry
# reg = loadRegistry("experiments_live", writeable = TRUE)
# test 1 job
# result = testJob(1, external = TRUE, reg = reg)
# user system elapsed
# 0.70 0.72 781.16
# get nondone jobs
ids = findNotDone(reg = reg)
# set up cluster (for local it is parallel)
cf = makeClusterFunctionsSocket(ncpus = 4L)
reg$cluster.functions = cf
saveRegistry(reg = reg)
# define resources and submit jobs
resources = list(ncpus = 2, memory = 8000)
submitJobs(ids = ids$job.id, resources = resources, reg = reg)
} else {
# save registry
print("Save registry")
saveRegistry(reg = reg)
sh_file = sprintf(
"
#!/bin/bash
#PBS -N ZSEML
#PBS -l ncpus=4
#PBS -l mem=4GB
#PBS -J 1-%d
#PBS -o %s/logs
#PBS -j oe
cd ${PBS_O_WORKDIR}
apptainer run image.sif run_job.R 0
",
nrow(designs), dirname_
)
sh_file_name = "jobs.sh"
file.create(sh_file_name)
writeLines(sh_file, sh_file_name)
}
# Inspect individual result
if (LIVE) {
# load registry
reg = loadRegistry("experiments_live", writeable = TRUE)
# import results
results_live = reduceResultsBatchmark(reg = reg)
results_live_dt = as.data.table(results_live)
head(results_live_dt)
# Get predictions
predictions = lapply(results_live_dt$prediction, as.data.table)
predictions = rbindlist(predictions)
# Merge tak id to get maturity
ids_ = vapply(results_live_dt$task,
function(tsk_) tsk_$id,
FUN.VALUE = character(1))
predictions = cbind(predictions, ids_)
# It is ok for 4 folds to have lower row_ids values.
# This is because one task in a list of tasks have lower number of observations
# (this is task with 120 maturity or 10 years maturity). It starts from 1971
# while other starts from 1961.
# The question is which model or combination of models to use for final prediction.
# From results in backtest it seems the two best options are:
# 1. mean acroos all predictions (all maturities and models)
# 2. mean of predictions for all models but only for maturity 60
# Get ansamble predictio (mean) that was best on backtest
predictions[, mean(response)] # 1)
predictions[ids_ == "m60_1"][, mean(response)] # 2)
best_prediction = predictions[, mean(response)]
# Save best prediction to Azure csv
cont = storage_container(BLOBENDPOINT, "qc-live")
time_ = strftime(Sys.time(), format = "%Y%m%d%H%M%S")
file_name = glue("tlt_macro_prediction_{time_}.csv")
storage_write_csv(
object = data.frame(prediction_tlt = best_prediction),
container = cont,
file = file_name
)
}
|
Digamma <- function(link = "log")
{
# Digamma generalized linear model family
# Gordon Smyth 3 July 1998
#
name.link <- substitute(link)
if(is.name(name.link))
if(is.character(link)) # link is character variable
name.link <- link
else # link is name without quotes
link <- name.link <- as.character(name.link)
else
if(is.call(name.link)) # power link
name.link <- deparse(name.link)
if(match(name.link, dimnames(glm.links)[[2]], F))
link <- glm.links[, name.link]
if(!is.null(link$name))
name.link <- link$name
var <- list(
name = "2*[1/theta + trigamma(-theta)]",
variance = varfun.digamma,
deviance = function(mu, y, w = 1, residuals = F) {
devi <- deviance.digamma(y,mu)
if(residuals)
sign(y - mu) * sqrt(abs(devi))
else sum(devi)
}
)
make.family("Digamma", link, var, name.link, "Trigamma")
}
kappa.digamma <- function(theta)
# Cumulant function for the Digamma family
# GKS 3 July 98
2*( theta*(log(-theta)-1) + lgamma(-theta) )
meanval.digamma <- function(theta)
# Mean value function for the Digamma family
# GKS 3 July 98
2*( log(-theta) - digamma(-theta) )
d2kappa.digamma <- function(theta)
# 2nd derivative of cumulant functio for Digamma family
# GKS 3 July 98
2*( 1/theta + trigamma(-theta) )
canonic.digamma <- function(mu) {
# Canonical mapping for Digamma family
# Solve meanval.digamma(theta) = mu for theta
# GKS 3 July 98
#
# Starting value from -log(-theta) =~ log(mu)
mlmt <- log(mu)
theta <- -exp(-mlmt)
for (i in 1:3) {
mu1 <- meanval.digamma(theta)
v <- d2kappa.digamma(theta)
deriv <- -v/mu1*theta
mlmt <- mlmt - log(mu1/mu)/deriv
theta <- -exp(-mlmt)
}
theta
}
varfun.digamma <- function(mu) {
# Variance function for Digamma family
# GKS 3 July 98
#
theta <- canonic.digamma(mu)
2*( 1/theta + trigamma(-theta) )
}
deviance.digamma <- function(y,mu) {
# Unit deviance for Digamma family
# GKS 3 July 98
#
thetay <- canonic.digamma(y)
theta <- canonic.digamma(mu)
2*( y*(thetay-theta) - (kappa.digamma(thetay)-kappa.digamma(theta)) )
}
glm.weight <- function(link, variance)
{
# This function fixes a bug in S-Plus 2000 Release 1.
# It is not required in earlier or later versions of S-Plus.
# Gordon Smyth, U of Queensland, gks@maths.uq.edu.au
# 5 Nov 1999.
#
default <- expression(w/((sqrt(family$variance(mu)) * family$deriv(mu))^2))
dnames <- dimnames(glm.weights)
if(!match(link, dnames[[1]], F))
return(default)
if(!match(variance, dnames[[2]], F))
return(default)
ww <- glm.weights[link, variance]
if(as.character(ww) == "NULL")
default
else ww
}
|
/* ply-label.c - label control
*
* Copyright (C) 2008 Red Hat, Inc.
* Copyright (c) 2016 SUSE LINUX GmbH, Nuernberg, Germany.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
* 02111-1307, USA.
*
* Written by: Ray Strode <rstrode@redhat.com>
* Written by: Fabian Vogt <fvogt@suse.com>
*/
#include <assert.h>
#include <endian.h>
#include <stdio.h>
#include <stdint.h>
#include <string.h>
#include <limits.h>
#include <wchar.h>
#include <ft2build.h>
#include FT_FREETYPE_H
#include "ply-logger.h"
#include "ply-pixel-buffer.h"
#include "ply-pixel-display.h"
#include "ply-utils.h"
#include "ply-label-plugin.h"
/* This is used if fontconfig (fc-match) is not available, like in the initrd. */
#define FONT_FALLBACK "/usr/share/fonts/Plymouth.ttf"
#define MONOSPACE_FONT_FALLBACK "/usr/share/fonts/Plymouth-monospace.ttf"
/* This is a little sketchy... It relies on implementation details of the compiler
* but it makes dealing with the fixed point math freetype uses much more pleasant,
* imo, so I'm going to roll with it for now until it causes problems.
*/
typedef union
{
struct
{
#if BYTE_ORDER == LITTLE_ENDIAN
uint32_t fractional_part : 6;
uint32_t pixels : 26;
#else
uint32_t pixels : 26;
uint32_t fractional_part : 6;
#endif
} as_pixels_unit;
struct
{
#if BYTE_ORDER == LITTLE_ENDIAN
uint32_t fractional_part : 6;
uint32_t points : 26;
#else
uint32_t points : 26;
uint32_t fractional_part : 6;
#endif
} as_points_unit;
uint32_t as_integer;
} ply_freetype_unit_t;
struct _ply_label_plugin_control
{
ply_pixel_display_t *display;
ply_rectangle_t area;
ply_label_alignment_t alignment;
long width; /* For alignment (line wrapping?) */
FT_Library library;
FT_Face face;
char *font;
char *text;
ply_rich_text_t *rich_text;
ply_rich_text_span_t span;
ply_array_t *dimensions_of_lines;
float red;
float green;
float blue;
float alpha;
uint32_t scale_factor;
uint32_t is_hidden : 1;
uint32_t is_monospaced : 1;
uint32_t needs_size_update : 1;
};
typedef enum
{
PLY_LOAD_GLYPH_ACTION_MEASURE,
PLY_LOAD_GLYPH_ACTION_RENDER,
} ply_load_glyph_action_t;
ply_label_plugin_interface_t *ply_label_plugin_get_interface (void);
static void set_font_for_control (ply_label_plugin_control_t *label,
const char *font);
static void load_glyphs (ply_label_plugin_control_t *label,
ply_load_glyph_action_t action,
ply_pixel_buffer_t *pixel_buffer);
static void size_control (ply_label_plugin_control_t *label,
bool force);
static const char *
find_default_font_path (void)
{
FILE *fp;
static char fc_match_out[PATH_MAX];
fp = popen ("/usr/bin/fc-match -f %{file}", "r");
if (!fp)
return FONT_FALLBACK;
fgets (fc_match_out, sizeof(fc_match_out), fp);
pclose (fp);
if (strcmp (fc_match_out, "") == 0)
return FONT_FALLBACK;
return fc_match_out;
}
static const char *
find_default_monospace_font_path (void)
{
FILE *fp;
static char fc_match_out[PATH_MAX];
fp = popen ("/usr/bin/fc-match -f %{file} monospace", "r");
if (!fp)
return MONOSPACE_FONT_FALLBACK;
fgets (fc_match_out, sizeof(fc_match_out), fp);
pclose (fp);
if (strcmp (fc_match_out, "") == 0)
return FONT_FALLBACK;
return fc_match_out;
}
static ply_label_plugin_control_t *
create_control (void)
{
int error;
ply_label_plugin_control_t *label;
label = calloc (1, sizeof(ply_label_plugin_control_t));
label->is_hidden = true;
label->width = -1;
label->text = NULL;
label->scale_factor = 1;
label->dimensions_of_lines = ply_array_new (PLY_ARRAY_ELEMENT_TYPE_POINTER);
error = FT_Init_FreeType (&label->library);
if (error) {
free (label);
return NULL;
}
set_font_for_control (label, "Sans");
return label;
}
static void
clear_dimensions_of_lines (ply_label_plugin_control_t *label)
{
ply_rectangle_t **dimensions_of_lines;
size_t i;
if (label->dimensions_of_lines == NULL)
return;
dimensions_of_lines = (ply_rectangle_t **) ply_array_steal_pointer_elements (label->dimensions_of_lines);
for (i = 0; dimensions_of_lines[i] != NULL; i++) {
free (dimensions_of_lines[i]);
}
}
static void
destroy_control (ply_label_plugin_control_t *label)
{
if (label == NULL)
return;
clear_dimensions_of_lines (label);
ply_array_free (label->dimensions_of_lines);
free (label->text);
free (label->font);
FT_Done_Face (label->face);
FT_Done_FreeType (label->library);
free (label);
}
static long
get_width_of_control (ply_label_plugin_control_t *label)
{
size_control (label, false);
return label->area.width;
}
static long
get_height_of_control (ply_label_plugin_control_t *label)
{
size_control (label, false);
return label->area.height;
}
static FT_GlyphSlot
load_glyph (ply_label_plugin_control_t *label,
ply_load_glyph_action_t action,
const char *input_text)
{
FT_Error error;
size_t character_size;
wchar_t character;
FT_Int32 load_flags = FT_LOAD_TARGET_LIGHT;
if (label->face == NULL)
return NULL;
character_size = mbrtowc (&character, input_text, PLY_UTF8_CHARACTER_SIZE_MAX, NULL);
if (character_size <= 0) {
character = (wchar_t) *input_text;
character_size = 1;
}
if (action == PLY_LOAD_GLYPH_ACTION_RENDER)
load_flags |= FT_LOAD_RENDER;
error = FT_Load_Char (label->face, (FT_ULong) character, load_flags);
if (error)
return NULL;
return label->face->glyph;
}
static void
size_control (ply_label_plugin_control_t *label,
bool force)
{
if (!force && !label->needs_size_update)
return;
if (label->rich_text == NULL && label->text == NULL) {
label->area.width = 0;
label->area.height = 0;
return;
}
load_glyphs (label, PLY_LOAD_GLYPH_ACTION_MEASURE, NULL);
label->needs_size_update = false;
}
static void
trigger_redraw (ply_label_plugin_control_t *label,
bool adjust_size)
{
ply_rectangle_t dirty_area = label->area;
if (adjust_size)
size_control (label, true);
if (label->is_hidden || label->display == NULL)
return;
ply_pixel_display_draw_area (label->display,
dirty_area.x, dirty_area.y,
dirty_area.width, dirty_area.height);
}
static void
draw_bitmap (ply_label_plugin_control_t *label,
uint32_t *target,
ply_rectangle_t target_size,
FT_Bitmap *source,
FT_Int x_start,
FT_Int y_start,
uint8_t rs,
uint8_t gs,
uint8_t bs)
{
FT_Int x, y, xs, ys;
FT_Int x_end = MIN (x_start + source->width, target_size.width);
FT_Int y_end = MIN (y_start + source->rows, target_size.height);
if ((uint32_t) x_start >= target_size.width ||
(uint32_t) y_start >= target_size.height)
return;
uint8_t rd, gd, bd, ad;
for (y = y_start, ys = 0; y < y_end; ++y, ++ys) {
for (x = x_start, xs = 0; x < x_end; ++x, ++xs) {
float alpha = label->alpha *
(source->buffer[xs + source->pitch * ys] / 255.0f);
float invalpha = 1.0f - alpha;
uint32_t dest = target[x + target_size.width * y];
/* Separate colors */
rd = dest >> 16;
gd = dest >> 8;
bd = dest;
/* Alpha blending */
rd = invalpha * rd + alpha * rs;
gd = invalpha * gd + alpha * gs;
bd = invalpha * bd + alpha * bs;
/* Semi-correct: Disregard the target alpha */
ad = alpha * 255;
target[x + target_size.width * y] =
(ad << 24) | (rd << 16) | (gd << 8) | bd;
}
}
}
static void
look_up_rgb_color_from_terminal_color (ply_label_plugin_control_t *label,
ply_terminal_color_t color,
uint8_t *red,
uint8_t *green,
uint8_t *blue)
{
switch (color) {
case PLY_TERMINAL_COLOR_BLACK:
*red = 0x00;
*green = 0x00;
*blue = 0x00;
break;
/* Linux VT Color: 0xaa0000 */
case PLY_TERMINAL_COLOR_RED:
*red = 0xaa;
*green = 0x00;
*blue = 0x00;
break;
/* Linux VT Color: 0x00aa00 */
case PLY_TERMINAL_COLOR_GREEN:
*red = 0x00;
*green = 0xaa;
*blue = 0x00;
break;
/* Linux VT Color: 0xaa5500 */
case PLY_TERMINAL_COLOR_BROWN:
*red = 0xaa;
*green = 0x55;
*blue = 0x00;
break;
/* Linux VT Color: 0x0000aa */
case PLY_TERMINAL_COLOR_BLUE:
*red = 0x00;
*green = 0x00;
*blue = 0xaa;
break;
/* Linux VT Color: 0xaa00aa */
case PLY_TERMINAL_COLOR_MAGENTA:
*red = 0xaa;
*green = 0x00;
*blue = 0xaa;
break;
/* Linux VT Color: 0x00aaaa */
case PLY_TERMINAL_COLOR_CYAN:
*red = 0x00;
*green = 0xaa;
*blue = 0xaa;
break;
/* Linux VT Color: 0xaaaaaa */
case PLY_TERMINAL_COLOR_WHITE:
break;
default:
*red = 255 * label->red;
*green = 255 * label->green;
*blue = 255 * label->blue;
break;
}
}
static void
update_scale_factor_from_pixel_buffer (ply_label_plugin_control_t *label,
ply_pixel_buffer_t *pixel_buffer)
{
uint32_t device_scale;
device_scale = ply_pixel_buffer_get_device_scale (pixel_buffer);
if (label->scale_factor == device_scale)
return;
label->scale_factor = device_scale;
set_font_for_control (label, label->font?: "Sans");
size_control (label, true);
}
static void
finish_measuring_line (ply_label_plugin_control_t *label,
ply_freetype_unit_t *glyph_x,
ply_freetype_unit_t *glyph_y,
ply_rectangle_t *dimensions)
{
ply_freetype_unit_t line_height;
ply_rectangle_t *entry;
if (label->face == NULL)
return;
line_height.as_integer = label->face->size->metrics.ascender + -label->face->size->metrics.descender;
dimensions->x = label->area.x * label->scale_factor;
dimensions->width = glyph_x->as_pixels_unit.pixels - dimensions->x;
label->area.width = MAX (label->area.width, dimensions->width / label->scale_factor);
dimensions->height = line_height.as_pixels_unit.pixels;
label->area.height += dimensions->height / label->scale_factor;
entry = calloc (1, sizeof(ply_rectangle_t));
*entry = *dimensions;
ply_array_add_pointer_element (label->dimensions_of_lines, entry);
dimensions->y += dimensions->height;
}
static void
align_lines (ply_label_plugin_control_t *label)
{
ply_rectangle_t **dimensions_of_lines;
ply_rectangle_t *line_dimensions;
long width;
size_t i;
if (label->alignment == PLY_LABEL_ALIGN_LEFT)
return;
if (label->dimensions_of_lines == NULL)
return;
width = label->width > 0? label->width : label->area.width;
width *= label->scale_factor;
dimensions_of_lines = (ply_rectangle_t **) ply_array_get_pointer_elements (label->dimensions_of_lines);
for (i = 0; dimensions_of_lines[i] != NULL; i++) {
line_dimensions = dimensions_of_lines[i];
if (label->alignment == PLY_LABEL_ALIGN_CENTER)
line_dimensions->x += (width - line_dimensions->width) / 2;
else if (label->alignment == PLY_LABEL_ALIGN_RIGHT)
line_dimensions->x += width - line_dimensions->width;
}
}
static void
load_glyphs (ply_label_plugin_control_t *label,
ply_load_glyph_action_t action,
ply_pixel_buffer_t *pixel_buffer)
{
FT_GlyphSlot glyph = NULL;
ply_rich_text_iterator_t rich_text_iterator;
ply_utf8_string_iterator_t utf8_string_iterator;
uint32_t *target = NULL;
ply_rectangle_t target_size;
ply_freetype_unit_t glyph_x = { .as_pixels_unit = { .pixels = label->area.x * label->scale_factor } };
ply_freetype_unit_t glyph_y = { .as_pixels_unit = { .pixels = label->area.y * label->scale_factor } };
FT_Error error;
FT_UInt previous_glyph_index = 0;
bool is_first_character = true;
ply_rectangle_t *line_dimensions = NULL;
ply_rectangle_t **dimensions_of_lines = NULL;
size_t line_number;
if (label->rich_text == NULL &&
label->text == NULL)
return;
if (label->rich_text != NULL) {
ply_rich_text_iterator_initialize (&rich_text_iterator,
label->rich_text,
&label->span);
} else {
ply_utf8_string_iterator_initialize (&utf8_string_iterator,
label->text,
0,
ply_utf8_string_get_length (label->text, strlen (label->text)));
}
if (action == PLY_LOAD_GLYPH_ACTION_MEASURE) {
clear_dimensions_of_lines (label);
line_dimensions = alloca (sizeof(ply_rectangle_t));
line_dimensions->x = label->area.x * label->scale_factor;
line_dimensions->y = label->area.y * label->scale_factor;
line_dimensions->width = 0;
line_dimensions->height = 0;
label->area.width = 0;
label->area.height = 0;
} else if (ply_array_get_size (label->dimensions_of_lines) == 0) {
return;
} else {
dimensions_of_lines = (ply_rectangle_t **) ply_array_get_pointer_elements (label->dimensions_of_lines);
line_number = 0;
line_dimensions = dimensions_of_lines[line_number++];
assert (line_dimensions != NULL);
glyph_x.as_pixels_unit.pixels = line_dimensions->x;
}
if (action == PLY_LOAD_GLYPH_ACTION_RENDER) {
target = ply_pixel_buffer_get_argb32_data (pixel_buffer);
ply_pixel_buffer_get_size (pixel_buffer, &target_size);
if (target_size.height == 0)
return;
target_size.width *= label->scale_factor;
target_size.height *= label->scale_factor;
}
/* Go through each line */
do {
bool should_stop;
const char *current_character;
uint8_t red, green, blue;
FT_Int extra_advance = 0, positive_bearing_x = 0;
ply_rich_text_character_t *rich_text_character;
if (action == PLY_LOAD_GLYPH_ACTION_RENDER) {
red = 255 * label->red;
green = 255 * label->green;
blue = 255 * label->blue;
}
if (label->rich_text != NULL) {
should_stop = !ply_rich_text_iterator_next (&rich_text_iterator,
&rich_text_character);
if (should_stop)
break;
current_character = rich_text_character->bytes;
if (action == PLY_LOAD_GLYPH_ACTION_RENDER) {
look_up_rgb_color_from_terminal_color (label,
rich_text_character->style.foreground_color,
&red,
&green,
&blue);
}
} else {
size_t character_size;
should_stop = !ply_utf8_string_iterator_next (&utf8_string_iterator,
¤t_character,
&character_size);
if (should_stop)
break;
}
glyph = load_glyph (label, action, current_character);
if (glyph == NULL)
continue;
if (is_first_character) {
/* Move pen to the first character's base line */
glyph_y.as_integer += label->face->size->metrics.ascender;
}
if (*current_character == '\n') {
if (action == PLY_LOAD_GLYPH_ACTION_MEASURE)
finish_measuring_line (label, &glyph_x, &glyph_y, line_dimensions);
else
line_dimensions = dimensions_of_lines[line_number++];
glyph_x.as_pixels_unit.pixels = line_dimensions->x;
glyph_y.as_pixels_unit.pixels = line_dimensions->y;
glyph_y.as_integer += label->face->size->metrics.ascender;
continue;
}
/* We consider negative left bearing an increment in size,
* as we draw full character boxes and don't "go back" in
* this plugin. Positive left bearing is treated as usual.
* For definitions see
* https://freetype.org/freetype2/docs/glyphs/glyphs-3.html
*/
if (glyph->bitmap_left < 0)
extra_advance = -glyph->bitmap_left;
else
positive_bearing_x = glyph->bitmap_left;
if (action == PLY_LOAD_GLYPH_ACTION_RENDER) {
draw_bitmap (label, target, target_size, &glyph->bitmap,
glyph_x.as_pixels_unit.pixels + positive_bearing_x,
glyph_y.as_pixels_unit.pixels - glyph->bitmap_top,
red,
green,
blue);
}
glyph_x.as_integer += glyph->advance.x + extra_advance;
if (!is_first_character) {
FT_Vector kerning_space;
error = FT_Get_Kerning (label->face, previous_glyph_index, glyph->glyph_index, FT_KERNING_DEFAULT, &kerning_space);
if (error == 0)
glyph_x.as_integer += kerning_space.x;
previous_glyph_index = glyph->glyph_index;
} else {
is_first_character = false;
}
} while (true);
if (action == PLY_LOAD_GLYPH_ACTION_MEASURE) {
if (!is_first_character) {
char *text = NULL;
finish_measuring_line (label, &glyph_x, &glyph_y, line_dimensions);
if (ply_is_tracing ()) {
if (label->rich_text != NULL)
text = ply_rich_text_get_string (label->rich_text, &label->span);
ply_trace ("Text '%s' has dimensions %ldx%ld", text?: label->text,
line_dimensions->width,
line_dimensions->height);
free (text);
}
}
align_lines (label);
}
}
static void
draw_control (ply_label_plugin_control_t *label,
ply_pixel_buffer_t *pixel_buffer,
long x,
long y,
unsigned long width,
unsigned long height)
{
if (label->is_hidden)
return;
if (label->rich_text == NULL &&
label->text == NULL)
return;
update_scale_factor_from_pixel_buffer (label, pixel_buffer);
/* Check for overlap.
* TODO: Don't redraw everything if only a part should be drawn! */
if (label->area.x > x + (long) width || label->area.y > y + (long) height
|| label->area.x + (long) label->area.width < x
|| label->area.y + (long) label->area.height < y)
return;
load_glyphs (label, PLY_LOAD_GLYPH_ACTION_RENDER, pixel_buffer);
}
static void
set_alignment_for_control (ply_label_plugin_control_t *label,
ply_label_alignment_t alignment)
{
if (label->alignment != alignment) {
label->alignment = alignment;
label->needs_size_update = true;
trigger_redraw (label, true);
}
}
static void
set_width_for_control (ply_label_plugin_control_t *label,
long width)
{
if (label->width != width) {
label->width = width;
label->needs_size_update = true;
trigger_redraw (label, true);
}
}
static void
clear_text (ply_label_plugin_control_t *label)
{
free (label->text);
label->text = NULL;
if (label->rich_text != NULL) {
ply_rich_text_drop_reference (label->rich_text);
label->rich_text = NULL;
label->span.offset = 0;
label->span.range = 0;
}
clear_dimensions_of_lines (label);
}
static void
set_text_for_control (ply_label_plugin_control_t *label,
const char *text)
{
if (label->text != text) {
clear_text (label);
label->text = strdup (text);
label->needs_size_update = true;
trigger_redraw (label, true);
}
}
static void
set_rich_text_for_control (ply_label_plugin_control_t *label,
ply_rich_text_t *rich_text,
ply_rich_text_span_t *span)
{
clear_text (label);
label->rich_text = rich_text;
ply_rich_text_take_reference (rich_text);
label->span = *span;
label->needs_size_update = true;
trigger_redraw (label, true);
}
static void
set_font_for_control (ply_label_plugin_control_t *label,
const char *font)
{
/* Only able to set size and monospaced/nonmonospaced */
int error = 0;
char *size_str_after;
const char *size_str, *font_path;
char *new_font;
ply_freetype_unit_t size = { .as_points_unit = { .points = 12 } };
int dpi = 96;
bool size_in_pixels = false;
label->needs_size_update = true;
new_font = strdup (font);
free (label->font);
label->font = new_font;
if (strstr (font, "Mono") || strstr (font, "mono")) {
if (!label->is_monospaced) {
FT_Done_Face (label->face);
font_path = find_default_monospace_font_path ();
if (font_path != NULL)
error = FT_New_Face (label->library, font_path, 0, &label->face);
label->is_monospaced = true;
}
} else {
if (label->is_monospaced || label->face == NULL) {
FT_Done_Face (label->face);
font_path = find_default_font_path ();
if (font_path != NULL)
error = FT_New_Face (label->library, font_path, 0, &label->face);
label->is_monospaced = false;
}
}
if (error != 0) {
FT_Done_Face (label->face);
label->face = NULL;
ply_trace ("Could not load font, error %d", error);
return;
}
/* Format is "Family 1[,Family 2[,..]] [25[px]]" .
* [] means optional. */
size_str = strrchr (font, ' ');
if (size_str) {
unsigned long parsed_size;
parsed_size = strtoul (size_str, &size_str_after, 10);
if (size_str_after != size_str) {
if (strcmp (size_str_after, "px") == 0) {
size_in_pixels = true;
size.as_pixels_unit.pixels = parsed_size;
} else {
size.as_points_unit.points = parsed_size;
}
}
}
if (size_in_pixels)
FT_Set_Pixel_Sizes (label->face, 0, size.as_pixels_unit.pixels * label->scale_factor);
else
FT_Set_Char_Size (label->face, size.as_integer, 0, dpi * label->scale_factor, 0);
/* Ignore errors, to keep the current size. */
trigger_redraw (label, true);
}
static void
set_color_for_control (ply_label_plugin_control_t *label,
float red,
float green,
float blue,
float alpha)
{
label->red = red;
label->green = green;
label->blue = blue;
label->alpha = alpha;
trigger_redraw (label, false);
}
static bool
show_control (ply_label_plugin_control_t *label,
ply_pixel_display_t *display,
long x,
long y)
{
ply_rectangle_t dirty_area;
bool force_resize = false;
dirty_area = label->area;
label->display = display;
if (label->area.x != x || label->area.y != y) {
label->area.x = x;
label->area.y = y;
force_resize = true;
}
label->is_hidden = false;
size_control (label, force_resize);
if (!label->is_hidden && label->display != NULL)
ply_pixel_display_draw_area (label->display,
dirty_area.x, dirty_area.y,
dirty_area.width, dirty_area.height);
label->is_hidden = false;
return true;
}
static void
hide_control (ply_label_plugin_control_t *label)
{
label->is_hidden = true;
if (label->display != NULL)
ply_pixel_display_draw_area (label->display,
label->area.x, label->area.y,
label->area.width, label->area.height);
label->display = NULL;
}
static bool
is_control_hidden (ply_label_plugin_control_t *label)
{
return label->is_hidden;
}
ply_label_plugin_interface_t *
ply_label_plugin_get_interface (void)
{
static ply_label_plugin_interface_t plugin_interface =
{
.create_control = create_control,
.destroy_control = destroy_control,
.show_control = show_control,
.hide_control = hide_control,
.draw_control = draw_control,
.is_control_hidden = is_control_hidden,
.set_text_for_control = set_text_for_control,
.set_rich_text_for_control = set_rich_text_for_control,
.set_alignment_for_control = set_alignment_for_control,
.set_width_for_control = set_width_for_control,
.set_font_for_control = set_font_for_control,
.set_color_for_control = set_color_for_control,
.get_width_of_control = get_width_of_control,
.get_height_of_control = get_height_of_control
};
return &plugin_interface;
}
/* vim: set ts=4 sw=4 expandtab autoindent cindent cino={.5s, (0: */
|
import pyttsx3
import speech_recognition as sr
import language_tool_python as ltp
import datetime
import wikipedia
import webbrowser as wb
import os
engine = pyttsx3.init('sapi5')
voices = engine.getProperty('voices')
engine.setProperty('voice', voices[0].id)
engine.setProperty('rate', 175)
def speak(audio):
engine.say(audio)
engine.runAndWait()
def greetings():
hour = int(datetime.datetime.now().hour)
if hour >= 3 and hour < 12:
speak("Good Morning, namastay!!!")
elif hour >= 12 and hour < 16:
speak("Good Afternoon, namastay!!!")
else:
speak("Good Evening, namastay!!!")
speak("I am suhl·maan kahn, How may I help you?")
def takeCommand():
r = sr.Recognizer()
with sr.Microphone() as source:
print("Listening...")
r.pause_threshold = 1
audio = r.listen(source)
try:
print("Recognizing...")
query = r.recognize_google(audio, language='en-in')
print("\n", query)
except Exception as e:
print("Say that again please.")
speak("Say that again please.")
return 'None'
return query
if __name__ == '__main__':
greetings()
chrome_path = 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe %s'
while True:
query = takeCommand().lower()
if 'wikipedia' in query:
speak("Searching Wikipedia...")
query = query.replace("wikipedia", "")
results = wikipedia.summary(query, sentences=2)
speak("According to Wikipedia,")
print(results)
speak(results)
elif 'open youtube' in query:
wb.get(chrome_path).open('youtube.com')
elif 'open whatsapp' in query:
wb.get(chrome_path).open("web.whatsapp.com")
elif 'open gmail' in query:
wb.get(chrome_path).open("mail.google.com")
elif 'open google' in query:
wb.get(chrome_path).open("www.google.com")
elif 'open spotify' in query:
wb.get(chrome_path).open("www.spotify.com")
elif 'play harry potter part 1' in query:
mov_dir = 'F:\\Movies\\Harry Potter And The Sorcerers Stone 2001 [ Bolly4u.trade ] Dual Audio BRip .mkv'
os.startfile(mov_dir)
elif 'play the dark knight' in query:
mov_dir = 'F:\\Movies\\The Dark Knight 2008 Dual Audio 720P BluRay[dualdl.net].mkv'
os.startfile(mov_dir)
elif 'stop' in query:
print("Have a nice day!")
speak("Have a nice day!")
exit()
elif 'exit' in query:
print("Have a nice day!")
speak("Have a nice day!")
exit()
elif 'kam khatm' in query:
print("Have a nice day!")
speak("Have a nice day!")
exit()
|
"""
File containing functions to handle the parsing of a TOKDOC formatted message.
"""
import datetime
import hashlib
import os
import platform
from dotenv import load_dotenv
from Utilities import constants
def parse_message(message) -> dict:
"""
Function that takes a string message and returns
a dictionary of message elements as described in the TOKDOC protocol.
The message must be in one of the following forms:
<checksum>\r\n<message_size>\r\n{START}\r\n\r\n{{START METHOD}}\r\nAUTH <email> <password>\r\n{{END METHOD}}\r\n\r\n{{END}}
<checksum>\r\n<message_size>\r\n{START}\r\n\r\n{{START METHOD}}\r\nEXIT <email> <password>\r\n{{END METHOD}}\r\n\r\n{{END}}
<checksum>\r\n<message_size>\r\n{START}\r\n\r\n{{START METHOD}}\r\nDATA <method> <ip>:<port> [<file_name>]\r\n{{END METHOD}}\r\n\r\n{{START HEADERS}}\r\nUSER:<email>\r\nACCESS_KEY:<access_key>\r\n[TIMESTAMP:<iso format datetime>]\r\n[AUTHORIZED:(<email>,<email>,...)]\r\n{{END HEADERS}}\r\n\r\n{{START FILE}}\r\nFILE_SIZE:<size>\r\n{{END FILE}}\r\n\r\n{{END}}
[] -> optional
<> -> replace with appropriate data
... -> 0 to many
? -> may not exist
:return: message dictionary -> a dictionary containing some or all of the following:
{
'parameters' : {
'method_group' -> string?,
'method' -> string,
'ip' -> string?,
'port' -> int?,
'filename' -> string?,
'email' -> string?,
'password' -> string?
},
'headers' ?: {
'USER' -> string,
'ACCESS_KEY' -> string,
'TIMESTAMP' -> string?,
'AUTHORIZED' -> string[]?,
},
'file_size' -> int?
}
e.g.
{
'parameters': {
'method_group': 'DATA',
'method': 'UPLOAD',
'ip': '127.0.0.1',
'port': 3000,
'file_name': 'tested.png'
},
'headers': {
'USER': 'test@test.com',
'ACCESS_KEY': '081c07e1074a3fc784799e5f78799fe276a7aed7d2656b6a4b7028f3dc39775a',
'TIMESTAMP': '2023-02-26T23:14:23.562854',
'AUTHORIZED': ['test@test.com', 'test2@test2.com']
},
'file_size': 239937
}
e.g.
{
'parameters': {
'method': 'AUTH',
'email': 'test@test.com',
'password': 'test'
}
}
"""
message = get_message_string(message)
parsed = {}
method_group_type = get_method_group_type(message)
if method_group_type == constants.AUTH or method_group_type == constants.EXIT:
parsed[constants.PARAMETERS_KEY] = get_auth_exit_parameters(message)
elif method_group_type == constants.DATA:
parsed[constants.PARAMETERS_KEY] = get_data_parameters(message)
parsed[constants.HEADERS] = get_headers(message)
parsed[constants.FILE_SIZE_KEY] = get_file_size(message)
else:
raise TypeError('The method group type "' + method_group_type + '" is not supported')
return parsed
# START TESTING STUFF
def encoded_auth_test() -> bytes:
"""
:return: bytes -> an encoded AUTH message
"""
return str.encode(
"5a32034ef2da2b10585068273adef5fd602b631b62b8527bb73d7669f2490aae\r\n77 \r\n{START}\r\n\r\n{{"
"START METHOD}}\r\nAUTH test@test.com test\r\n{{END METHOD}}\r\n\r\n{{END}} "
)
def encoded_data_test() -> bytes:
"""
:return: bytes -> an encoded DATA message
"""
return str.encode(
"d59075b5224106ca9b6f9a83ded0cefe4be51f9c1fd5e45f3d76128e60de3a68\r\n314 \r\n{START}\r\n\r\n{{"
"START METHOD}}\r\nDATA DOWNLOAD 127.0.0.1:3000 image.jpg\r\n{{END METHOD}}\r\n\r\n{{START "
"HEADERS}}\r\nUSER:john@doe.com\r\nACCESS_KEY:kewuahcopfmw983c2[093mru0cum239rcum2[3pa[29cu,"
"r\r\nTIMESTAMP:2023-02-22T20:14:31Z\r\n{{END HEADERS}}\r\n\r\n{{START "
"FILE}}\r\nFILE_SIZE:33\r\njryghfiweufhjwemflnwefkwe\r\n{{END FILE}}\r\n\r\n{END} "
)
def test_message() -> str:
"""
:return: str -> a DATA message
"""
if platform.system() == 'Windows':
file_size = os.stat(".\\test_data\\test.png").st_size
else:
file_size = os.stat('./test_data/test.png').st_size
message = (constants.START +
constants.CRLF + constants.CRLF +
constants.START_METHOD +
constants.CRLF +
constants.DATA + constants.SPACE + constants.UPLOAD + constants.SPACE + '127.0.0.1:3000' + constants.SPACE + 'tested.png' +
constants.CRLF +
constants.END_METHOD +
constants.CRLF + constants.CRLF +
constants.START_HEADERS +
constants.CRLF +
constants.USER + ':test@test.com' +
constants.CRLF +
constants.ACCESS_KEY + ':' + generate_access_key_decoded('test@test.com') +
constants.CRLF +
constants.TIMESTAMP + ':' + str(datetime.datetime.utcnow().isoformat()) +
constants.CRLF +
constants.AUTHORIZED + ':' + '(test@test.com,test2@test2.com)' +
constants.CRLF +
constants.END_HEADERS +
constants.CRLF + constants.CRLF +
constants.START_FILE +
constants.CRLF +
constants.FILE_SIZE + ":" + str(file_size) +
constants.CRLF +
constants.END_FILE +
constants.CRLF + constants.CRLF +
constants.END)
message_length = len(message)
message_length = str(message_length) + (16 - len(str(message_length))) * " "
message = message_length + constants.CRLF + message
hashed = hashlib.sha256(message.encode()).hexdigest()
message = hashed + constants.CRLF + message
return message
def test_file() -> bytes:
"""
:return: bytes -> the bytes of a test file
"""
file = open('../test_data/test.png', 'rb')
file_bytes = file.read()
file.close()
return file_bytes
# END TESTING STUFF
def get_message_size(message) -> int:
"""
:param message:
:return: int -> message size as provided in the message
"""
message = get_message_string(message)
return int(message[
constants.CHECKSUM_LENGTH + constants.CRLF_LENGTH:constants.CHECKSUM_LENGTH + constants.CRLF_LENGTH + constants.MESSAGE_SIZE_LENGTH].strip())
def get_message_content(message) -> str:
"""
:param message:
:return: str -> the message content excluding the checksum and message size
"""
message = get_message_string(message)
size = get_message_size(message)
offset = constants.CHECKSUM_LENGTH + constants.CRLF_LENGTH + constants.MESSAGE_SIZE_LENGTH + constants.CRLF_LENGTH
return message[offset: offset + constants.CRLF_LENGTH + size]
def get_method_content(message) -> str:
"""
:param message:
:return: str -> The content enclosed with in the {{START METHOD}}, {{END METHOD}} tags
"""
message = get_message_string(message)
message_content = get_message_content(message)
start_index = str(message_content).index(constants.START_METHOD) + len(constants.START_METHOD)
end_index = str(message_content).index(constants.END_METHOD)
return message_content[start_index: end_index].strip('\r\n')
def get_auth_exit_parameters(message) -> dict:
"""
:param message:
:return: dict -> returns a dictionary
{
'method': 'AUTH',
'email': str,
'password': str
}
"""
if get_method_group_type(message) != constants.AUTH and get_method_group_type(message) != constants.EXIT:
raise TypeError('The method group type must be "AUTH" or "EXIT" to use this function')
message = get_message_string(message)
method_content = get_method_content(message)
content_list = str(method_content).split()
if content_list[0] == constants.EXIT:
# exit request
return {
'method': content_list[0]
}
return {
'method': content_list[0],
'email': content_list[1],
'password': content_list[2]
}
def generate_access_key_decoded(email) -> str:
"""
Generates an access key for the provided email
:param email:
:return: str -> access key
"""
load_dotenv()
server_key = os.getenv('SERVER_KEY')
pre_hash = server_key + email
return hashlib.sha256(pre_hash.encode()).hexdigest()
def get_data_parameters(message) -> dict:
"""
:param message:
:return: dict -> {
'method_group': str,
'method': str,
'ip': str,
'port': int,
}
"""
message = get_message_string(message)
method_content = get_method_content(message)
content_list = str(method_content).split()
parameters = {
'method_group': content_list[0],
'method': content_list[1],
'ip': content_list[2].split(':')[0],
'port': int(content_list[2].split(':')[1]),
}
try:
parameters['file_name'] = content_list[3]
except IndexError:
pass
return parameters
def get_method_group_type(message) -> str:
"""
Returns the type of message (AUTH or DATA)
:param message:
:return: str
"""
message = get_message_string(message)
return get_method_content(message).split()[0]
def get_method_type(message) -> str:
"""
Returns the method type (DOWNLOAD, UPLOAD, or LIST)
:param message:
:return:
"""
message = get_message_string(message)
return get_method_content(message).split()[1]
def get_header_content(message) -> str:
"""
:param message:
:return: str -> the string contained in the {{START HEADERS}}, {{END HEADERS}} tags
"""
message_content = get_message_content(message)
start_index = str(message_content).index(constants.START_HEADERS) + len(constants.START_HEADERS)
end_index = str(message_content).index(constants.END_HEADERS)
return message_content[start_index: end_index].strip('\r\n')
def get_headers(message) -> dict:
"""
:param message:
:return: dict -> containing key value pairs of all the headers
"""
message = get_message_string(message)
header_content = get_header_content(message)
headers_list = header_content.split()
headers = {}
for header in headers_list:
key = header[:header.index(':')]
if key == constants.AUTHORIZED:
value = header[header.index(':') + 2: -1]
value = value.replace(' ', '')
value = value.split(',')
else:
value = header[header.index(':') + 1:]
if value:
headers[key] = value
return headers
def get_file_size(message) -> int:
"""
:param message:
:return: int -> file size as specified in the message
"""
message = get_message_string(message)
message_content = get_message_content(message)
start_index = str(message_content).index(constants.START_FILE) + len(constants.START_FILE)
end_index = str(message_content).index(constants.END_FILE)
message_content = message_content[start_index: end_index].strip('\r\n')
return int(message_content.split(':')[1])
def get_message_string(message) -> str:
"""
:param message: string or bytes of the message
:return: the string formatted message
"""
if isinstance(message, bytes):
return bytes(message).decode()
elif isinstance(message, str):
return message
else:
raise TypeError('The message must either be of type bytes or string. You message was type', type(message))
|
/*
Copyright (C) 2019 SUSE LLC
This library is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation; either version 2.1 of the
License, or (at your option) version 3.0 of the License. This library
is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
License for more details. You should have received a copy of the GNU
Lesser General Public License along with this library; if not, write
to the Free Software Foundation, Inc., 51 Franklin Street, Fifth
Floor, Boston, MA 02110-1301 USA
*/
/*-/
File: YQCustomStatusItemSelector.h
Author: Stefan Hundhammer <shundhammer@suse.de>
/-*/
#ifndef YQCustomStatusItemSelector_h
#define YQCustomStatusItemSelector_h
#include "YQItemSelector.h"
class YQCustomStatusSelectorItemWidget;
/**
* ItemSelector widget with support for custom status values, not just 0 or 1.
*
* This does not use a standard QCheckBox or QRadioButton, but a QToolButton
* with an icon that is exchanged whenever the status changes.
**/
class YQCustomStatusItemSelector: public YQItemSelector
{
Q_OBJECT
public:
/**
* Constructor.
**/
YQCustomStatusItemSelector( YWidget * parent,
const YItemCustomStatusVector & customStates );
/**
* Destructor.
**/
virtual ~YQCustomStatusItemSelector();
/**
* Add an item.
*
* Reimplemented from YQItemSelector / YSelectionWidget.
**/
virtual void addItem( YItem * item );
/**
* Select or deselect an item.
*
* Reimplemented from YSelectionWidget.
**/
virtual void selectItem( YItem * item, bool selected = true );
/**
* Return the status icon for the specified status.
**/
const QIcon & statusIcon( int status ) const;
/**
* Activate selected item. Can be used in tests to simulate user input.
*
* Derived classes are required to implement this.
**/
virtual void activateItem( YItem * item );
protected slots:
/**
* Cycle the status of an item and report the click to the calling
* application.
**/
void itemClicked( YQCustomStatusSelectorItemWidget * itemWidget );
protected:
/**
* Preload and cache the status icons.
**/
void preloadStatusIcons();
/**
* Update the status indicator (the status icon) for an item.
*
* Reimplemented from YItemSelector.
**/
virtual void updateCustomStatusIndicator( YItem * item );
// Data members
QList<QIcon> _statusIcons;
}; // class YQCustomStatusItemSelector
/**
* Class for the widgets of one YQCustomStatusItemSelector item.
**/
class YQCustomStatusSelectorItemWidget: public YQSelectorItemWidget
{
Q_OBJECT
public:
/**
* Constructor.
**/
YQCustomStatusSelectorItemWidget( YQItemSelector * parent,
YItem * item );
/**
* Destructor.
**/
virtual ~YQCustomStatusSelectorItemWidget();
/**
* Set the correct status icon according to the status of this item.
**/
void setStatusIcon();
signals:
/**
* Emitted when the user clicks on the heading toggle to change the status
* or uses a keyboard operation to do the same.
**/
void clicked( YQCustomStatusSelectorItemWidget * itemWidget );
protected slots:
/**
* Forwarder slot from the heading toggle to this class.
**/
void slotClicked();
protected:
/**
* Create the appropriate toggle button for this item and connect it to
* appropriate slots.
*
* Reimplemented from YQCustomStatusSelectorItemWidget.
**/
virtual QAbstractButton * createHeadingToggle( const std::string & label,
QWidget * parent );
/**
* Set the correct status icon according to the status of this item.
**/
void setStatusIcon( QAbstractButton * toggle );
/**
* Return the amount of indentation in pixels for the description text.
*
* Reimplemented from YQCustomStatusSelectorItemWidget.
**/
virtual int itemDescriptionIndent() const;
}; // class YQCustomStatusSelectorItemWidget
#endif // YQCustomStatusItemSelector_h
|
# Data-Centric Vision-Language Pre-training
[Arxiv](https://arxiv.org/pdf/2305.20087.pdf)

- **At least half of the samples in the well-cleaned dataset (CC3M, refined from 5 billion images with 0.0006 preserved) negatively affect the learned rpresentation!**
- The purpose of this project is to **<span style="color:green">compress existing Large-scale Vision-Language Pre-training dataset</span>** without drop the performance.
We want the communtity pay more attention to data.
> This work **is still in progress**, now the compression rate is around 70%-80%.
> However, the data selection strategy is quite simple, we are exploring more sloid methods.
> We also focus on refine existing dataset with our toolbox [Image2Paragraph](https://github.com/showlab/Image2Paragraph).
## News
08/17/2023: Code released.
## To do
- Website.
- Show referenced generated_annotation_file.
## 1. Introduction
### 1. Conventional Vision-language Datasets
|Index|Original Dataset| #Original Samples|Reduced-Dataset|#Reduced Samples| Compressison Rate|
|--|--|--|--|--|--|
|0|CC3M|2.82M|TL;DR CC3M|0.67M|76.25%|
|1|CC12M|10.8M|TL;DR CC12M|2.4M|77.8%|
|2|YFCC|14.9M|TL;DR YFCC|2.5M|83.33%|
|3|LAION-Sub|40M|TL;DR LAION-Sub|8.04M|79.90%|
### 2. Data-efficient learning methods
"Large-scale" means that the methods are effective when used on datasets that are very large in size.
The "task agnostic" means that the methods can be used regardless of the specific downstream task, and without any prior exposure to the associated data.
| Method | Year | Data Type | Compression Ratio | Task Agnostic | Large-scale | Supervision | Generation/Selection |
|--------------------------|------|---------------|-------------------|---------------|-------------|-------------------|----------------------|
| Dataset Distillation [1] | 2017 | Image | 99%-99.99% | No | No | Class Label | Generation |
| Data Pruning [2] | 2022 | Image | 20%-30% | No | Yes | Class Label | Selection |
| Neural Data Server [3] | 2020 | Multi-modality | 94%-98% | No | Yes | Image-text Pairs | Selection |
| TL;DR (ours) | 2023 | Multi-modality | 75%-90% | Yes | Yes | Image-text Pairs | Generation+Selection |
[1] Wang T et al. Dataset distillation[J]. arXiv preprint arXiv:1811.10959, 2018.
[2] Sorscher B et al. Beyond neural scaling laws: beating power law scaling via data pruning[J]. NeurIPS, 2022.
[3] Yan X, et all . Neural data server: A large-scale search engine for transfer learning data[C]. CVPR. 2020.
## 2. Run
### Step 1. Pre-train Codebook-based Vision-Language Model
The codebook implementation is from VQ-VQE.
Please follow [GETTING_START.md](GETTING_START.md) for data preparation and captioner model training.
### Step 2. Codebook Extractor
```
python codebook_extractor.py
```
### Step 3. Codebook Clustering and Selection
```
python codebook_cluster.py
```
In comparison, use random selection also
```
python random_selection.py
```
### Step4. Fine-tuning VLP Model on Human-cleaned Captioning Dataset
```
python vq_compress_model/train_caption.py
```
### Step5. Generate Training Json
```
python generate_train_json_w_caption.py
```
We show the ITM score distribution as below:

The main reason for the following steps is to higher the matching score. This not limited to image captioner, nueral data server and other techniques to improve the alignment between visual and text also works.
### Step6. Pre-training and Evaluating on downstream Tasks
Use the generated annotation files to train VLP model in normal way.
## 3. Some Result
#### a. CC3M
|Dataset|Sample|Pretraining Time|COCO TR@1|COCO IR@1|COCO Captioning B@4|NLVR2|
|--|--|--|--|--|--|--|
|CC3M|2.82M|19H|70.9|54.3|36.8|76.2|
|TL;DR CC3M|0.67M|4.7H|72.8|54.8|37.6|78.0|
#### b. CC12M
|Dataset|Sample|Pretraining Time|Flickr TR@1|Flcikr IR@1|COCO Captioning B@4|NLVR2|
|--|--|--|--|--|--|--|
|CC12M|10.8M|65H|84.7|75.3|37.5|78.9|
|TL;DR CC12M|2.4M|14H|85.5|76.3|38.1|78.5|
#### c. YFCC
Compression Rate: 83.33%
#### d. LAION-Subset
Compression Rate: 80%
## Acknowledgement
This work is mainly inspired by [Dataset Distillation](https://arxiv.org/abs/1811.10959) and [Data Pruning](https://arxiv.org/abs/2206.14486).
The architecutres ablation are mainly based on [blip](https://github.com/salesforce/BLIP), and [ViLT](https://github.com/dandelin/ViLT).
Thanks for these good works.
## Citation
If you find our work helps, please use the following BibTeX entry for citation.
```
@article{wang2023tldr,
title={Too Large; Data Reduction for Vision-Language Pre-Training},
author={Alex Jinpeng Wang, Kevin Qinghong Lin, David Junhao Zhang, Stan Weixian Lei and Mike Zheng Shou },
journal={ICCV},
year={2023}
}
```
|
<!--In questo file html è contenuta la struttura della pagina Manage Surveys-->
<h2 class="md-display-1" layout-padding>Manage Surveys</h2>
<div layout="column" ng-hide="Surveys.showResult || Surveys.showQuestion" layout-padding>
<div layout="row" layout-align="start center">
<md-button class="md-raised" goto="/newSurvey"> <!--Premendo questo bottone si passa alla pagina da cui si creano le nuove survey-->
New Survey
</md-button>
</div>
<div class="md-title" layout-padding>Surveys list:</div> <!--Qua c'è la lista delle survey-->
<div layout="row" layout-wrap>
<md-card ng-repeat="surv in Surveys.surveys | orderBy: 'surv_name'" layout="column" layout-padding> <!--la direttiva ng-reapeat permette di far comparire più volte il div in cui è inserita, tante volte tanto quanti sono gli elementi contenuti nell'array tra parentesi-->
<b>{{surv.surv_name}}</b> <!--In questo caso l'array tra parentesi è Surveys.surveys, gli elementi dell'array saranno reperibili come surv-->
<div layout="row">
<div layout="column">
<md-button class="md-raised font-size-button" ng-click="Surveys.deleteSurvey(surv.ID,$event)"> <!--con questo bottone è possibile cancellare la survey selezionata-->
Delete
</md-button>
<md-button class="md-raised font-size-button" ng-click="Surveys.assignUsers($event,surv.ID)"> <!--Questo bottone avvia il dialog che permette di assegnare gli utenti alla survey selezionata-->
Assign Users
</md-button>
</div>
<div layout="column">
<md-button class="md-raised font-size-button" ng-click="Surveys.showQuestions(surv)"> <!--Premendo questo bottone è possibile vedere le domande contenute nella survey-->
Show Questions
</md-button>
<md-button class="md-raised font-size-button" ng-click="Surveys.showResults(surv)"> <!--Premendo questo bottone è possibile vedere tutte le risposte al sondaggio-->
Show Results
</md-button>
</div>
</div>
</md-card>
</div>
</div>
<div ng-show="Surveys.showResult" layout="column" layout-padding> <!--Questa è la parte che compare quando si vogliono vedere i risultati delle survey, sparisce la lista delle survey e compare questa parte-->
<div layout="row" layout-align="start center">
<md-button class="md-raised font-size-button" ng-click="Surveys.showResult = false">Back</md-button>
</div>
<div class="md-headline">Results of: <b font-size="24px">{{Surveys.selectedSurvName}}</b></div>
<div layout="row" layout-wrap>
<md-card layout="column" ng-repeat="ans in Surveys.selectedAnswers | orderBy:'date'" layout-padding>
<div>User: {{ans.username}}</div>
<div>Place: {{ans.place}}</div>
<div>Date: {{ans.date}}</div>
<div layout="row">
<md-button class="md-raised font-size-button" ng-click="Surveys.showThisResult(ans)">Show</md-button>
<md-button class="md-raised font-size-button" ng-click="Surveys.deleteThisResult(ans.ID,$event)">Delete</md-button>
</div>
</md-card>
</div>
<md-card layout="column" ng-show="Surveys.showAnswerForm" layout-padding>
<div>User: {{Surveys.thisResult.username}}</div>
<div>Description: {{Surveys.thisResult.description}}</div>
<div>Place: {{Surveys.thisResult.place}}</div>
<div>Date: {{Surveys.thisResult.date}}</div>
<div>Questions/answers:</div>
<div layout="column" ng-repeat="q in Surveys.thisResultSurvey.questions">
<div layout="row">Question {{$index+1}}: {{q}}</div>
<div>Answer {{$index + 1}}: {{Surveys.thisResult.answers[$index]}}</div>
</div>
</md-card>
</div>
<div ng-show="Surveys.showQuestion" layout="column" layout-padding> <!--Questa è la parte che compare quando si vogliono le domande contenute nella survey, sparisce la lista delle survey e compare questa parte-->
<div layout="row" layout-align="start center">
<md-button class="md-raised font-size-button" ng-click="Surveys.showQuestion = false">Back</md-button>
</div>
<div class="md-headline">Questions of: <b style="font-size:24px">{{Surveys.questionsView.surv_name}}</b></div>
<div class="md-subhead">Description: <b style="font-size:16px">{{Surveys.questionsView.description}}</b></div>
<div layout="row" layout-wrap>
<md-card ng-repeat="q in Surveys.questionsView.questions" layout="row" layout-padding>
<div>Question {{$index + 1}}: {{q}}</div>
</md-card>
</div>
</div>
|
import 'package:animate_do/animate_do.dart';
import 'package:card_swiper/card_swiper.dart';
import 'package:cinemapedia/config/router/app_router.dart';
import 'package:cinemapedia/domain/entities/movie.dart';
import 'package:flutter/material.dart';
class MoviesSlideShow extends StatelessWidget {
final List<Movie> movies;
const MoviesSlideShow({super.key, required this.movies});
@override
Widget build(BuildContext context) {
final colors = Theme.of(context).colorScheme;
return SizedBox(
height: 210,
width: double.infinity,
child: Swiper(
viewportFraction: 0.8,
scale: 0.9,
autoplay: true,
pagination: SwiperPagination(
margin: const EdgeInsets.only(top: 0),
builder: DotSwiperPaginationBuilder(
activeColor: colors.primary,
color: colors.secondary,
),
),
itemCount: movies.length,
itemBuilder: (context, index) => _Slide(movie: movies[index]),
),
);
}
}
class _Slide extends StatelessWidget {
final Movie movie;
const _Slide({required this.movie});
@override
Widget build(BuildContext context) {
final decoration = BoxDecoration(
borderRadius: BorderRadius.circular(20),
boxShadow: const [
BoxShadow(
color: Colors.black45,
blurRadius: 10,
offset: Offset(0, 10),
)
]);
return Padding(
padding: const EdgeInsets.only(bottom: 30),
child: DecoratedBox(
decoration: decoration,
child: ClipRRect(
borderRadius: BorderRadius.circular(20),
child: GestureDetector(
onTap: () => goToMoviePage(context, movie.id),
child: FadeInImage(
fit: BoxFit.cover,
placeholder: const AssetImage('assets/images/bottle-loader.gif'),
image: NetworkImage(movie.backdropPath),
),
),
),
),
);
}
}
|
## Overview
This application is a comprehensive, interactive candlestick chart renderer for financial data visualization. It is designed for users who require detailed insights into market trends and price movements. The application features real-time data fetching and dynamic interactions such as zooming, panning, and detailed examination of specific data points.
## How to Use
### Setup and Initial Display
#### Starting the Application
Once you launch the application, the initial setup populates the user interface with controls for selecting a financial instrument (e.g., stocks, commodities) and setting the desired time frame for the data.
#### Loading Data
After configuring the settings, clicking the "Submit" button fetches the candlestick data for the specified symbol and time frame, rendering it on the canvas.
### Interacting with the Chart
The application supports several interactive features to explore the candlestick data:
#### Dragging/Panning
- **Action:** Click and hold the left mouse button over the chart, then move the mouse horizontally to drag the chart left or right. This allows you to view different parts of the data timeline.
- **Purpose:** This feature enables users to navigate through the time series data, allowing for an examination of earlier or later data points beyond what is initially visible on the screen.
#### Horizontal Scrolling
- **Action:** Use the horizontal scroll functionality of your mouse or trackpad to move the chart left or right. If your device does not support horizontal scrolling natively, you can also hold down the `Shift` key and use the vertical scroll wheel to achieve the same effect.
- **Purpose:** Provides a convenient and quick way to navigate through the chart without the need to click and drag. This method is especially useful for making smaller, more precise adjustments to the view.
#### Zooming
- **Horizontal Zoom:** Use the mouse wheel while hovering over the chart to zoom in or out. Scrolling up zooms in, and scrolling down zooms out, focusing on the point under the cursor.
- **Vertical Zoom:** Move the cursor to the rightmost edge of the canvas where the price scale is visible, and then use the mouse wheel to zoom in or out on the price axis.
### Automatic Data Fetching
The application detects when you are nearing the edge of the available data and automatically fetches additional data in the direction you are panning or zooming (forward or backward in time). This feature ensures a seamless and continuous data viewing experience.
|
import React, { useEffect, useState } from "react";
import { Route, Routes, useNavigate } from "react-router-dom";
import Home from "./components/Home";
import CreatePolysign from "./components/CreatePolysign";
import { Layout, Menu, Breadcrumb, Button } from "antd";
import { APP_NAME } from "./util/constants";
import History from "./components/History";
import Sign from "./components/Sign";
import logo from "./assets/logo.png";
import "./App.css";
const { Header, Content, Footer } = Layout;
function App() {
const [account, setAccount] = useState();
const [loading ,setLoading] = useState(false);
const login = async () => {
setLoading(true)
const e = window.ethereum
if (!e) {
alert('Metamask must be connected to use Polysign')
return
}
try {
const accs = await e.request({ method: 'eth_requestAccounts' });
console.log('accounts', accs)
setAccount(accs[0])
} catch (e) {
} finally {
setLoading(false)
}
}
const checkConnected = async () => {
const e = window.ethereum
if (!e) {
return
}
const connected = e.isConnected()
console.log('connected', connected)
if (connected) {
await login()
}
}
const logout =
useEffect(() => {
checkConnected()
}, [])
const navigate = useNavigate();
const path = window.location.pathname;
const isSignature = path.startsWith("/sign");
return (
<div className="App">
<Layout className="layout">
<Header>
{/* <div className="logo" /> */}
<Menu
// theme="dark"
mode="horizontal"
defaultSelectedKeys={[]}
>
<Menu.Item key={0}>
<img
src={logo}
className="header-logo pointer"
onClick={() => navigate("/")}
/>
</Menu.Item>
{!isSignature && (
<>
<Menu.Item key={1} onClick={() => navigate("/create")}>
Create esignature request
</Menu.Item>
<Menu.Item key={2} onClick={() => navigate("/history")}>
Lookup
</Menu.Item>
</>
)}
{!account && <span>
<Button type="primary" onClick={login} loading={loading} disabled={loading}>Login with Metamask</Button>
</span> }
{account && <span>
Hello: {account}</span>}
</Menu>
</Header>
<Content style={{ padding: "0 50px" }}>
<div className="container">
<Routes>
<Route path="/" element={<Home />} />
<Route path="/sign/:signId" element={<Sign account={account} />} />
<Route path="/create" element={<CreatePolysign account={account}/>} />
<Route path="/history" element={<History />} />
</Routes>
</div>
</Content>
<Footer style={{ textAlign: "center" }}>
{APP_NAME} ©2022 - A Polygon-powered esignature platform
</Footer>
</Layout>
</div>
);
}
export default App;
|
import React from 'react';
import ReactDOM from 'react-dom/client';
import App from './App';
import { createStore } from 'redux';
import { Provider } from 'react-redux';
import { composeWithDevTools } from 'redux-devtools-extension';
import rootReducer from './modules';
const store = createStore(rootReducer, composeWithDevTools());
const root = ReactDOM.createRoot(document.getElementById('root'));
root.render(
<React.StrictMode>
<Provider store={store}>
<App />
</Provider>
</React.StrictMode>
);
// 리액트 컴포넌트에서 스토어를 사용하려면 Provider 컴포넌트로 감싸준다.
// Provider 컴포넌트를 사용할 때 store를 props로 전달해 주어야 한다.
|
import { computed } from 'vue'
import { useMessage } from 'naive-ui'
import { t } from '@/locales'
import { useChatStore } from '@/store'
export function useUsingContext() {
const ms = useMessage()
const chatStore = useChatStore()
const usingContext = computed<boolean>(() => chatStore.usingContext)
function toggleUsingContext() {
chatStore.setUsingContext(!usingContext.value)
if (usingContext.value)
ms.success(t('chat.turnOnContext'))
else
ms.warning(t('chat.turnOffContext'))
}
return {
usingContext,
toggleUsingContext,
}
}
|
import java.io.*;
import java.text.NumberFormat;
import java.util.*;
import freemarker.template.*;
public class StatementPrinter {
private final Configuration cfg;
private Map<String, Object> statementData;
public StatementPrinter() {
cfg = configureFreeMarker();
}
public void printHTML() throws IOException, TemplateException {
if (this.statementData == null) {
throw new IllegalStateException("Statement data has not been generated. Call generateStatementData() first.");
}
toHTML(this.statementData);
}
public String printTXT() throws IOException {
if (this.statementData == null) {
throw new IllegalStateException("Statement data has not been generated. Call generateStatementData() first.");
}
return toText(this.statementData);
}
private Configuration configureFreeMarker() {
Configuration cfg = new Configuration(Configuration.VERSION_2_3_28);
try {
cfg.setDirectoryForTemplateLoading(new File("src/ressources/templates"));
} catch (IOException e) {
e.printStackTrace();
}
cfg.setDefaultEncoding("UTF-8");
cfg.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER);
cfg.setLogTemplateExceptions(false);
cfg.setWrapUncheckedExceptions(true);
cfg.setSQLDateAndTimeTimeZone(TimeZone.getDefault());
return cfg;
}
public void generateStatementData(Invoice invoice, HashMap<String, Play> plays) {
this.statementData = invoice.generateDataForStatement(plays);
}
private void toHTML(Map<String, Object> root) throws IOException, TemplateException {
Template temp = cfg.getTemplate("test.ftlh");
Writer out = new FileWriter(new File("build/results/invoice.html"));
temp.process(root, out);
}
private String toText(Map<String, Object> root){
NumberFormat frmt = NumberFormat.getCurrencyInstance(Locale.US);
String result = String.format("Statement for %s\n", root.get("client"));
List<Map<String, Object>> retrievedPerformancesList = (List<Map<String, Object>>) root.get("performances");
for (Map<String, Object> performanceData : retrievedPerformancesList) {
result += String.format(" %s: %s (%s seats)\n", performanceData.get("playName"), frmt.format(performanceData.get("price")), performanceData.get("audience"));
}
result += String.format("Amount owed is %s\n", root.get("totalAmount"));
result += String.format("You earned %s credits\n", root.get("fidelityPoints"));
if ((Boolean) root.get("promotionApplied")) {
result += "A promotion of $15 has been applied to your invoice due to your fidelity points.\n";
}
String filePath = "build/results/invoice.txt";
try (BufferedWriter writer = new BufferedWriter(new FileWriter(filePath))) {
writer.write(result);
} catch (IOException e) {
e.printStackTrace();
}
return result;
}
}
|
import { Component, Input, OnChanges, OnDestroy, OnInit, QueryList, SimpleChanges, ViewChildren } from '@angular/core';
import { MatDialog } from '@angular/material/dialog';
import { MatPaginator } from '@angular/material/paginator';
import { MatSort } from '@angular/material/sort';
import { MatTableDataSource } from '@angular/material/table';
import { Router } from '@angular/router';
import { faArchive, faEdit, faEye, faRecycle } from '@fortawesome/free-solid-svg-icons';
import { Subscription } from 'rxjs';
import { BaseComponent } from 'src/app/shared/components/base.component';
import { UserStoryStatusEnum } from 'src/app/shared/enums/user-story-status.enum';
import { IError } from 'src/app/shared/interfaces/IError';
import { UserStory, UserStoryUpdate } from 'src/app/shared/models/UserStory';
import { ErrorHandlerService } from 'src/app/shared/services/error-handler.service';
import { UserStoryService } from 'src/app/shared/services/userstory.service';
import { UserstoriesCreateDialogComponent } from './userstories-create-dialog/userstories-create-dialog.component';
import { UserstoriesEditDialogComponent } from './userstories-edit-dialog/userstories-edit-dialog.component';
@Component({
selector: 'app-userstories',
templateUrl: './userstories.component.html',
styleUrls: ['./userstories.component.scss']
})
export class UserstoriesComponent extends BaseComponent implements OnInit, OnChanges, OnDestroy {
@Input()
public projectId: string;
private userstories: UserStory[] = [];
private subscription: Subscription | null = null;
displayedColumns: string[] = ['name', 'description', 'status', 'storypoints', 'assignee', 'actions'];
dataSource = new MatTableDataSource<UserStory>();
archivedDisplayedColumns: string[] = ['name', 'description', 'storypoints', 'assignee', 'actions'];
archivedDataSource = new MatTableDataSource<UserStory>();
@ViewChildren(MatPaginator) paginator = new QueryList<MatPaginator>();
@ViewChildren(MatSort) sort = new QueryList<MatSort>();
constructor(
private userstoryService: UserStoryService,
private dialog: MatDialog,
private router: Router,
private errorService: ErrorHandlerService
) {
super();
}
ngOnInit(): void {
this.init();
this.icons.set('archive', faArchive);
this.icons.set('deArchive', faRecycle);
this.icons.set('edit', faEdit);
this.icons.set('show', faEye);
}
ngOnChanges(changes: SimpleChanges): void {
this.init();
}
ngOnDestroy(): void {
this.subscription?.unsubscribe();
}
private init(): void {
this.checkRequiredFields(this.projectId);
this.subscription = this.userstoryService.getAllFromProject(this.projectId).subscribe((userstories: UserStory[]) => {
this.userstories = userstories;
this.dataSource.data = this.userstories.filter(story => story.status !== UserStoryStatusEnum.Archived);
this.archivedDataSource.data = this.userstories.filter(story => story.status === UserStoryStatusEnum.Archived);
this.dataSource.paginator = this.paginator.toArray()[0];
this.archivedDataSource.paginator = this.paginator.toArray()[1];
this.dataSource.sort = this.sort.toArray()[0];
this.archivedDataSource.sort = this.sort.toArray()[1];
}, (err: IError) => this.errorService.HandleError(err));
}
createStory(event: Event | undefined): void {
if (event !== undefined) { event.preventDefault(); }
this.dialog.open(UserstoriesCreateDialogComponent, {
width: '300px',
data: { projectId: this.projectId }
});
}
showStory(element: UserStory, event: Event | undefined): void {
if (event !== undefined) { event.preventDefault(); }
this.router.navigate([`/project/${this.projectId}/story/${element.id}`]);
}
editStory(element: UserStory, event: Event | undefined): void {
if (event !== undefined) { event.preventDefault(); }
this.dialog.open(UserstoriesEditDialogComponent, {
width: '300px',
data: {
projectId: this.projectId,
userstory: element
}
});
}
deArchiveStory(element: UserStory, event: Event | undefined): void {
if (event !== undefined) { event.preventDefault(); }
this.setStoryStatusProperty(element, (element.sprint !== null) ? UserStoryStatusEnum.Ready : UserStoryStatusEnum.Backlog);
}
archiveStory(element: UserStory, event: Event | undefined): void {
if (event !== undefined) { event.preventDefault(); }
this.setStoryStatusProperty(element, UserStoryStatusEnum.Archived);
}
private setStoryStatusProperty(element: UserStory, status: UserStoryStatusEnum): void {
const userstory: UserStoryUpdate = {
name: element.name,
description: element.description,
status,
points: element.points,
completedAt: element.completedAt,
projectId: element.projectId,
creatorId: element.creatorId,
assigneeId: element.assigneeId,
sprintId: element.sprintId
};
this.userstoryService.update(element.id, userstory);
}
}
|
<?php
/**
* Sample implementation of the Custom Header feature
* http://codex.wordpress.org/Custom_Headers
*
* You can add an optional custom header image to header.php like so ...
*
* <?php if ( get_header_image() ) : ?>
* <a href="<?php echo esc_url( home_url( '/' ) ); ?>" rel="home">
* <img src="<?php header_image(); ?>" width="<?php echo get_custom_header()->width; ?>" height="<?php echo get_custom_header()->height; ?>" alt="sample">
* </a>
* <?php endif; // End header image check. ?>
*
* @package madara
*/
/**
* Setup the WordPress core custom header feature.
*
* @uses madara_header_style()
* @uses madara_admin_header_style()
* @uses madara_admin_header_image()
*/
function madara_custom_header_setup() {
add_theme_support( 'custom-header', apply_filters( 'madara_custom_header_args', array(
'default-image' => '',
'default-text-color' => '000000',
'width' => 1000,
'height' => 250,
'flex-height' => true,
'wp-head-callback' => 'madara_header_style',
'admin-head-callback' => 'madara_admin_header_style',
'admin-preview-callback' => 'madara_admin_header_style',
) ) );
}
add_action( 'after_setup_theme', 'madara_custom_header_setup' );
if ( ! function_exists( 'madara_header_style' ) ) :
/**
* Styles the header image and text displayed on the blog
*
* @see madara_custom_header_setup().
*/
function madara_header_style() {
$header_text_color = get_header_textcolor();
// If we get this far, we have custom styles. Let's do this.
?>
<style type="text/css">
<?php
// Has the text been hidden?
if ( 'blank' == $header_text_color ) :
?>
.site-title,
.site-description {
position: absolute;
clip: rect(1px, 1px, 1px, 1px);
}
<?php
// If the user has set a custom color for the text use that
else :
?>
.site-title a,
.site-description {
color: # <?php echo esc_html($header_text_color); ?>;
}
<?php endif; ?>
</style>
<?php
}
endif; // madara_header_style
if ( ! function_exists( 'madara_admin_header_style' ) ) :
/**
* Styles the header image displayed on the Appearance > Header admin panel.
*
* @see madara_custom_header_setup().
*/
function madara_admin_header_style() {
?>
<style type="text/css">
.appearance_page_custom-header #headimg {
border: none;
}
#headimg h1,
#desc {
}
#headimg h1 {
}
#headimg h1 a {
}
#desc {
}
#headimg img {
}
</style>
<?php
}
endif; // madara_admin_header_style
if ( ! function_exists( 'madara_admin_header_image' ) ) :
/**
* Custom header image markup displayed on the Appearance > Header admin panel.
*
* @see madara_custom_header_setup().
*/
function madara_admin_header_image() {
$style = sprintf( ' style="color:#%s;"', get_header_textcolor() );
?>
<div id="headimg">
<h1 class="displaying-header-text">
<a id="name"<?php echo esc_html( $style ); ?> onclick="return false;" href="<?php echo esc_url( home_url( '/' ) ); ?>"><?php bloginfo( 'name' ); ?></a>
</h1>
<div class="displaying-header-text" id="desc"<?php echo esc_html( $style ); ?>><?php bloginfo( 'description' ); ?></div>
<?php if ( get_header_image() ) : ?>
<img src="<?php header_image(); ?>" alt="<?php bloginfo( 'description' ); ?>">
<?php endif; ?>
</div>
<?php
}
endif; // madara_admin_header_image
|
/*
* Copyright 2024 Goldman Sachs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.finos.legend.engine.ide.lsp.server.integration;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import java.nio.file.Path;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import net.javacrumbs.jsonunit.JsonAssert;
import net.javacrumbs.jsonunit.core.Option;
import org.eclipse.lsp4j.CodeLens;
import org.eclipse.lsp4j.CodeLensParams;
import org.eclipse.lsp4j.Diagnostic;
import org.eclipse.lsp4j.DiagnosticSeverity;
import org.eclipse.lsp4j.DocumentDiagnosticParams;
import org.eclipse.lsp4j.DocumentDiagnosticReport;
import org.eclipse.lsp4j.Location;
import org.eclipse.lsp4j.Position;
import org.eclipse.lsp4j.PreviousResultId;
import org.eclipse.lsp4j.Range;
import org.eclipse.lsp4j.ReferenceContext;
import org.eclipse.lsp4j.ReferenceParams;
import org.eclipse.lsp4j.SymbolKind;
import org.eclipse.lsp4j.TextDocumentIdentifier;
import org.eclipse.lsp4j.WorkspaceDiagnosticParams;
import org.eclipse.lsp4j.WorkspaceDocumentDiagnosticReport;
import org.eclipse.lsp4j.WorkspaceSymbol;
import org.eclipse.lsp4j.WorkspaceSymbolParams;
import org.eclipse.lsp4j.jsonrpc.ResponseErrorException;
import org.eclipse.lsp4j.jsonrpc.messages.Either;
import org.finos.legend.engine.ide.lsp.extension.LegendEntity;
import org.finos.legend.engine.ide.lsp.extension.text.TextLocation;
import org.finos.legend.engine.ide.lsp.server.request.LegendEntitiesRequest;
import org.finos.legend.engine.ide.lsp.utils.LegendToLSPUtilities;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.RepeatedTest;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.extension.RegisterExtension;
@Timeout(value = 3, unit = TimeUnit.MINUTES)
// all tests should finish but in case of some uncaught deadlock, timeout whole test
public class TestLegendLanguageServerIntegration
{
@RegisterExtension
static LegendLanguageServerIntegrationExtension extension = new LegendLanguageServerIntegrationExtension();
@Test
void testUnknownGrammar() throws Exception
{
String content = "###HelloGrammar\n" +
"Hello abc::abc\n" +
"{\n" +
" abc: 1\n" +
"}\n";
Path pureFile = extension.addToWorkspace("hello.pure", content);
DocumentDiagnosticReport diagnosticReport = extension.futureGet(extension.getServer().getTextDocumentService().diagnostic(new DocumentDiagnosticParams(new TextDocumentIdentifier(pureFile.toUri().toString()))));
Assertions.assertNotNull(diagnosticReport.getRelatedFullDocumentDiagnosticReport());
Assertions.assertEquals("full", diagnosticReport.getRelatedFullDocumentDiagnosticReport().getKind());
Assertions.assertEquals(1, diagnosticReport.getRelatedFullDocumentDiagnosticReport().getItems().size());
Diagnostic diagnostic = diagnosticReport.getRelatedFullDocumentDiagnosticReport().getItems().get(0);
Assertions.assertEquals("Parser", diagnostic.getSource());
Assertions.assertTrue(diagnostic.getMessage().startsWith("Unknown grammar: HelloGrammar"));
}
// repeat to test for race conditions, thread dead-locks, etc
@RepeatedTest(value = 10, failureThreshold = 1)
void testWorkspaceSymbols() throws Exception
{
Path file1Path = extension.addToWorkspace("file1.pure", "###Pure\n" +
"Class abc::abc\n" +
"{\n" +
" abc: String[1];\n" +
"}\n" +
"Class abc::abc2\n" +
"{\n" +
" abc: String[1];\n" +
"}\n" +
"Class abc::abc3\n" +
"{\n" +
" abc: String[1];\n" +
"}\n");
Path file2Path = extension.addToWorkspace("file2.pure", "###Pure\n" +
"Class xyz::abc\n" +
"{\n" +
" abc: String[1];\n" +
"}\n" +
"Class xyz::abc2\n" +
"{\n" +
" abc: String[1];\n" +
"}\n" +
"Class xyz::abc3\n" +
"{\n" +
" abc: String[1];\n" +
"}\n");
Path enumPath = extension.addToWorkspace("enum.pure", "Enum test::model::TestEnumeration\n" +
"{\n" +
" VAL1, VAL2,\n" +
" VAL3, VAL4\n" +
"}\n");
List<? extends WorkspaceSymbol> symbols = extension.futureGet(extension.getServer().getWorkspaceService().symbol(new WorkspaceSymbolParams(""))).getRight();
Assertions.assertNotNull(symbols);
symbols.sort(Comparator.comparing(WorkspaceSymbol::getName));
List<WorkspaceSymbol> expected = List.of(
createWorkspaceSymbol("abc::abc", SymbolKind.Class, TextLocation.newTextSource(file1Path.toUri().toString(), 1, 0, 4, 0), null, "meta::pure::metamodel::type::Class"),
createWorkspaceSymbol("abc::abc.abc", SymbolKind.Field, TextLocation.newTextSource(file1Path.toUri().toString(), 3, 2, 3, 16), "abc::abc", "meta::pure::metamodel::function::property::Property"),
createWorkspaceSymbol("abc::abc2", SymbolKind.Class, TextLocation.newTextSource(file1Path.toUri().toString(), 5, 0, 8, 0), null, "meta::pure::metamodel::type::Class"),
createWorkspaceSymbol("abc::abc2.abc", SymbolKind.Field, TextLocation.newTextSource(file1Path.toUri().toString(), 7, 2, 7, 16), "abc::abc2", "meta::pure::metamodel::function::property::Property"),
createWorkspaceSymbol("abc::abc3", SymbolKind.Class, TextLocation.newTextSource(file1Path.toUri().toString(), 9, 0, 12, 0), null, "meta::pure::metamodel::type::Class"),
createWorkspaceSymbol("abc::abc3.abc", SymbolKind.Field, TextLocation.newTextSource(file1Path.toUri().toString(), 11, 2, 11, 16), "abc::abc3", "meta::pure::metamodel::function::property::Property"),
createWorkspaceSymbol("test::model::TestEnumeration", SymbolKind.Enum, TextLocation.newTextSource(enumPath.toUri().toString(), 0, 0, 4, 0), null, "meta::pure::metamodel::type::Enumeration"),
createWorkspaceSymbol("test::model::TestEnumeration.VAL1", SymbolKind.EnumMember, TextLocation.newTextSource(enumPath.toUri().toString(), 2, 2, 2, 5), "test::model::TestEnumeration", "test::model::TestEnumeration"),
createWorkspaceSymbol("test::model::TestEnumeration.VAL2", SymbolKind.EnumMember, TextLocation.newTextSource(enumPath.toUri().toString(), 2, 8, 2, 11), "test::model::TestEnumeration", "test::model::TestEnumeration"),
createWorkspaceSymbol("test::model::TestEnumeration.VAL3", SymbolKind.EnumMember, TextLocation.newTextSource(enumPath.toUri().toString(), 3, 2, 3, 5), "test::model::TestEnumeration", "test::model::TestEnumeration"),
createWorkspaceSymbol("test::model::TestEnumeration.VAL4", SymbolKind.EnumMember, TextLocation.newTextSource(enumPath.toUri().toString(), 3, 8, 3, 11), "test::model::TestEnumeration", "test::model::TestEnumeration"),
createWorkspaceSymbol("vscodelsp::test::dependency::Employee", SymbolKind.Class, TextLocation.newTextSource("legend-vfs:/dependencies.pure", 3, 0, 7, 0), null, "meta::pure::metamodel::type::Class"),
createWorkspaceSymbol("vscodelsp::test::dependency::Employee.foobar1", SymbolKind.Field, TextLocation.newTextSource("legend-vfs:/dependencies.pure", 5, 2, 5, 19), "vscodelsp::test::dependency::Employee", "meta::pure::metamodel::function::property::Property"),
createWorkspaceSymbol("vscodelsp::test::dependency::Employee.foobar2", SymbolKind.Field, TextLocation.newTextSource("legend-vfs:/dependencies.pure", 6, 2, 6, 19), "vscodelsp::test::dependency::Employee", "meta::pure::metamodel::function::property::Property"),
createWorkspaceSymbol("vscodelsp::test::dependency::StaticConnection", SymbolKind.Struct, TextLocation.newTextSource("legend-vfs:/dependencies.pure", 10, 0, 17, 0), null, "meta::pure::runtime::PackageableConnection"),
createWorkspaceSymbol("xyz::abc", SymbolKind.Class, TextLocation.newTextSource(file2Path.toUri().toString(), 1, 0, 4, 0), null, "meta::pure::metamodel::type::Class"),
createWorkspaceSymbol("xyz::abc.abc", SymbolKind.Field, TextLocation.newTextSource(file2Path.toUri().toString(), 3, 2, 3, 16), "xyz::abc", "meta::pure::metamodel::function::property::Property"),
createWorkspaceSymbol("xyz::abc2", SymbolKind.Class, TextLocation.newTextSource(file2Path.toUri().toString(), 5, 0, 8, 0), null, "meta::pure::metamodel::type::Class"),
createWorkspaceSymbol("xyz::abc2.abc", SymbolKind.Field, TextLocation.newTextSource(file2Path.toUri().toString(), 7, 2, 7, 16), "xyz::abc2", "meta::pure::metamodel::function::property::Property"),
createWorkspaceSymbol("xyz::abc3", SymbolKind.Class, TextLocation.newTextSource(file2Path.toUri().toString(), 9, 0, 12, 0), null, "meta::pure::metamodel::type::Class"),
createWorkspaceSymbol("xyz::abc3.abc", SymbolKind.Field, TextLocation.newTextSource(file2Path.toUri().toString(), 11, 2, 11, 16), "xyz::abc3", "meta::pure::metamodel::function::property::Property")
);
for (int i = 0; i < Math.max(symbols.size(), expected.size()); i++)
{
WorkspaceSymbol expectedSymbol = null;
if (expected.size() > i)
{
expectedSymbol = expected.get(i);
}
WorkspaceSymbol actualSymbol = null;
if (symbols.size() > i)
{
actualSymbol = symbols.get(i);
}
Assertions.assertEquals(expectedSymbol, actualSymbol, String.format("Symbol at %d are not equal", i));
}
List<? extends WorkspaceSymbol> symbolsFiltered1 = extension.futureGet(extension.getServer().getWorkspaceService().symbol(new WorkspaceSymbolParams("xyz"))).getRight();
Set<String> symbolNamesFiltered1 = symbolsFiltered1.stream().map(WorkspaceSymbol::getName).collect(Collectors.toSet());
Assertions.assertEquals(Set.of("xyz::abc", "xyz::abc2", "xyz::abc3"), symbolNamesFiltered1);
List<? extends WorkspaceSymbol> symbolsFiltered2 = extension.futureGet(extension.getServer().getWorkspaceService().symbol(new WorkspaceSymbolParams("abc2"))).getRight();
Set<String> symbolNamesFiltered2 = symbolsFiltered2.stream().map(WorkspaceSymbol::getName).collect(Collectors.toSet());
Assertions.assertEquals(Set.of("abc::abc2", "xyz::abc2"), symbolNamesFiltered2);
}
private WorkspaceSymbol createWorkspaceSymbol(String name, SymbolKind kind, TextLocation textLocation, String containerName, String classifier)
{
Location location = new Location(textLocation.getDocumentId(), LegendToLSPUtilities.toRange(textLocation.getTextInterval()));
WorkspaceSymbol workspaceSymbol = new WorkspaceSymbol(name, kind, Either.forLeft(location), containerName);
JsonObject data = new JsonObject();
data.add("classifier", new JsonPrimitive(classifier));
workspaceSymbol.setData(data);
return workspaceSymbol;
}
// repeat to test for race conditions, thread dead-locks, etc
@RepeatedTest(value = 10, failureThreshold = 1)
void testWorkspaceDiagnostic() throws Exception
{
// define class
Path pureFile1 = extension.addToWorkspace("file1.pure", "###Pure\n" +
"Class abc::abc\n" +
"{\n" +
" abc: String[1];\n" +
"}\n");
// extend class
Path pureFile2 = extension.addToWorkspace("file2.pure", "###Pure\n" +
"Class xyz::abc extends abc::abc\n" +
"{\n" +
" xyz: String[1];\n" +
"}\n");
// no diagnostics as it parses and compiles
assertDiagnostics(Map.of(), List.of());
// creates a parse error on file 2, so only that diagnostics comes back
extension.changeWorkspaceFile(pureFile2, "###Pure\n" +
"Class xyz::abc extends abc::abc\n" +
"{\n" +
" xyz: String[1\n" +
"}\n");
// diagnostics reported on file
Set<Diagnostic> parseDiagnostic = Set.of(
new Diagnostic(
new Range(new Position(4, 0), new Position(4, 1)),
"Unexpected token '}'. Valid alternatives: [']']",
DiagnosticSeverity.Error,
"Parser"
)
);
List<PreviousResultId> previousResultIds = assertDiagnostics(Map.of(pureFile2, parseDiagnostic), List.of());
// repeating asking for diagnostics using prev result id yield no result
assertDiagnostics(Map.of(), previousResultIds);
// but results are reported if result ids are different
previousResultIds = assertDiagnostics(Map.of(pureFile2, parseDiagnostic), List.of());
// fix parser error
extension.changeWorkspaceFile(pureFile2, "###Pure\n" +
"Class xyz::abc extends abc::abc\n" +
"{\n" +
" xyz: String[1];\n" +
"}\n");
// the document report diagnostics, but with empty items
previousResultIds = assertDiagnostics(Map.of(pureFile2, Set.of()), previousResultIds);
// no diagnostics now reported with either prev result id or no ids
assertDiagnostics(Map.of(), previousResultIds);
// create compile error on file 2
extension.changeWorkspaceFile(pureFile1, "###Pure\n" +
"Class abc::ab\n" +
"{\n" +
" abc: String[1];\n" +
"}\n");
// report compile diagnostics on file
Set<Diagnostic> compileDiagnostic = Set.of(
new Diagnostic(
new Range(new Position(1, 23), new Position(1, 31)),
"Can't find type 'abc::abc'",
DiagnosticSeverity.Error,
"Compiler"
)
);
previousResultIds = assertDiagnostics(Map.of(pureFile2, compileDiagnostic), previousResultIds);
// repeating call yield no diagnostic reported
assertDiagnostics(Map.of(), previousResultIds);
// fix compile error
extension.changeWorkspaceFile(pureFile2, "###Pure\n" +
"Class xyz::abc extends abc::ab\n" +
"{\n" +
" xyz: String[1];\n" +
"}\n");
// the document report diagnostics, but with empty items
previousResultIds = assertDiagnostics(Map.of(pureFile2, Set.of()), previousResultIds);
// no diagnostic if called again
assertDiagnostics(Map.of(), previousResultIds);
extension.changeWorkspaceFile(pureFile1, "###Pure\n" +
"Clas abc::ab\n" +
"{\n" +
" abc: String[1];\n" +
"}\n");
extension.changeWorkspaceFile(pureFile2, "###Pure\n" +
"Clas xyz::abc extends abc::ab\n" +
"{\n" +
" xyz: String[1];\n" +
"}\n");
// parse error on both files
Map<Path, Set<Diagnostic>> expected = Map.of(
pureFile2, Set.of(new Diagnostic(
new Range(new Position(1, 0), new Position(1, 4)),
"Unexpected token 'Clas'. Valid alternatives: ['Class', 'Association', 'Profile', 'Enum', 'Measure', 'function', 'native', '^']",
DiagnosticSeverity.Error,
"Parser"
)),
pureFile1, Set.of(new Diagnostic(
new Range(new Position(1, 0), new Position(1, 4)),
"Unexpected token 'Clas'. Valid alternatives: ['Class', 'Association', 'Profile', 'Enum', 'Measure', 'function', 'native', '^']",
DiagnosticSeverity.Error,
"Parser"
)));
previousResultIds = assertDiagnostics(expected, previousResultIds);
assertDiagnostics(Map.of(), previousResultIds);
// fix parse errors
extension.changeWorkspaceFile(pureFile1, "###Pure\n" +
"Class abc::ab\n" +
"{\n" +
" abc: String[1];\n" +
"}\n");
extension.changeWorkspaceFile(pureFile2, "###Pure\n" +
"Class xyz::abc extends abc::ab\n" +
"{\n" +
" xyz: String[1];\n" +
"}\n");
// report for both files, but empty diagnostics
previousResultIds = assertDiagnostics(Map.of(pureFile2, Set.of(), pureFile1, Set.of()), previousResultIds);
// no diagnostics if called again
assertDiagnostics(Map.of(), previousResultIds);
}
private static List<PreviousResultId> assertDiagnostics(Map<Path, Set<Diagnostic>> expected, List<PreviousResultId> ids) throws Exception
{
List<WorkspaceDocumentDiagnosticReport> items = extension.futureGet(extension.getServer().getWorkspaceService().diagnostic(new WorkspaceDiagnosticParams(ids))).getItems();
Map<String, Set<Diagnostic>> reportsByUri = items
.stream()
.collect(Collectors.toMap(
x -> x.getWorkspaceFullDocumentDiagnosticReport().getUri(),
x -> new HashSet<>(x.getWorkspaceFullDocumentDiagnosticReport().getItems())
)
);
Map<String, Set<Diagnostic>> expectedPathAsString = expected.entrySet().stream().collect(Collectors.toMap(x -> x.getKey().toUri().toString(), Map.Entry::getValue));
Assertions.assertEquals(expectedPathAsString, reportsByUri);
return items
.stream()
.map(x -> new PreviousResultId(x.getWorkspaceFullDocumentDiagnosticReport().getUri(), x.getWorkspaceFullDocumentDiagnosticReport().getResultId()))
.collect(Collectors.toList());
}
@Test
void codeLensCommandsFunctionActivator() throws Exception
{
String code1 = "###Pure\n" +
"function model::Hello(name: String[1]): String[1]\n" +
"{\n" +
" 'Hello World! My name is ' + $name + '.';\n" +
"}\n" +
"{\n" +
" testSuite_1\n" +
" (\n" +
" testPass | Hello('John') => 'Hello World! My name is John.';\n" +
" )\n" +
"}\n";
String code2 = "###Snowflake\n" +
"SnowflakeApp app::pack::MyApp\n" +
"{" +
" applicationName : 'name';\n" +
" function : model::Hello(String[1]):String[1];\n" +
" ownership : Deployment { identifier: 'MyAppOwnership'};\n" +
"}\n";
extension.addToWorkspace("file1.pure", code1);
Path path = extension.addToWorkspace("file2.pure", code2);
extension.assertWorkspaceParseAndCompiles();
String file = path.toUri().toString();
List<? extends CodeLens> codeLensWithoutServer = extension.futureGet(extension.getServer().getTextDocumentService().codeLens(new CodeLensParams(new TextDocumentIdentifier(file))));
Assertions.assertTrue(codeLensWithoutServer.isEmpty(), "Expect empty, got: " + codeLensWithoutServer);
try
{
System.setProperty("legend.engine.server.url", "http://localhost/hello");
List<? extends CodeLens> codeLensWithServer = extension.futureGet(extension.getServer().getTextDocumentService().codeLens(new CodeLensParams(new TextDocumentIdentifier(file))));
codeLensWithServer.sort(Comparator.comparing(x -> x.getCommand().getTitle()));
Assertions.assertEquals(2, codeLensWithServer.size(), "Expect 2 code lends, got: " + codeLensWithoutServer);
Assertions.assertEquals("Publish to Sandbox", codeLensWithServer.get(0).getCommand().getTitle());
Assertions.assertEquals("Validate", codeLensWithServer.get(1).getCommand().getTitle());
}
finally
{
System.clearProperty("legend.engine.server.url");
}
}
@Test
void virtualFileSystem() throws Exception
{
String content = extension.futureGet(extension.getServer().getLegendLanguageService().loadLegendVirtualFile("legend-vfs:/dependencies.pure"));
Assertions.assertEquals(
"// READ ONLY (sourced from workspace dependencies)\n\n" +
"###Pure\n" +
"Class vscodelsp::test::dependency::Employee\n" +
"{\n" +
" foobar1: Float[1];\n" +
" foobar2: Float[1];\n" +
"}\n" +
"\n" +
"###Connection\n" +
"RelationalDatabaseConnection vscodelsp::test::dependency::StaticConnection\n" +
"{\n" +
" type: H2;\n" +
" specification: LocalH2\n" +
" {\n" +
" };\n" +
" auth: DefaultH2;\n" +
"}\n" +
"\n",
content
);
ResponseErrorException exception = Assertions.assertThrows(ResponseErrorException.class, () -> extension.futureGet(extension.getServer().getLegendLanguageService().loadLegendVirtualFile("file:/dependencies.pure")));
Assertions.assertTrue(exception.getResponseError().getData().toString().contains("Provided URI not managed by Legend Virtual Filesystem: " + "file:/dependencies.pure"));
}
@Test
void entities() throws Exception
{
Path file1Path = extension.addToWorkspace("file1.pure", "###Pure\n" +
"Class abc::abc\n" +
"{\n" +
" abc: String[1];\n" +
"}\n" +
"Class abc::abc2\n" +
"{\n" +
" abc: String[1];\n" +
"}\n" +
"Class abc::abc3\n" +
"{\n" +
" abc: String[1];\n" +
"}\n");
extension.addToWorkspace("file2.pure", "###Pure\n" +
"Class xyz::abc\n" +
"{\n" +
" abc: String[1];\n" +
"}\n" +
"Class xyz::abc2\n" +
"{\n" +
" abc: String[1];\n" +
"}\n" +
"Class xyz::abc3\n" +
"{\n" +
" abc: String[1];\n" +
"}\n");
Path enumPath = extension.addToWorkspace("enum.pure", "Enum test::model::TestEnumeration\n" +
"{\n" +
" VAL1, VAL2,\n" +
" VAL3, VAL4\n" +
"}\n");
List<LegendEntity> entities = extension.futureGet(extension.getServer().getLegendLanguageService().entities(new LegendEntitiesRequest()));
Assertions.assertEquals(9, entities.size());
entities.sort(Comparator.comparing(LegendEntity::getPath));
String json = new Gson().toJson(entities);
JsonAssert.assertJsonEquals(
"[" +
" {\"path\":\"abc::abc\",\"classifierPath\":\"meta::pure::metamodel::type::Class\",\"content\":{\"_type\":\"class\",\"name\":\"abc\",\"superTypes\":[],\"originalMilestonedProperties\":[],\"properties\":[{\"name\":\"abc\",\"type\":\"String\",\"multiplicity\":{\"lowerBound\":1.0,\"upperBound\":1.0},\"stereotypes\":[],\"taggedValues\":[]}],\"qualifiedProperties\":[],\"stereotypes\":[],\"taggedValues\":[],\"constraints\":[],\"package\":\"abc\"}}," +
" {\"path\":\"abc::abc2\",\"classifierPath\":\"meta::pure::metamodel::type::Class\",\"content\":{\"_type\":\"class\",\"name\":\"abc2\",\"superTypes\":[],\"originalMilestonedProperties\":[],\"properties\":[{\"name\":\"abc\",\"type\":\"String\",\"multiplicity\":{\"lowerBound\":1.0,\"upperBound\":1.0},\"stereotypes\":[],\"taggedValues\":[]}],\"qualifiedProperties\":[],\"stereotypes\":[],\"taggedValues\":[],\"constraints\":[],\"package\":\"abc\"}}," +
" {\"path\":\"abc::abc3\",\"classifierPath\":\"meta::pure::metamodel::type::Class\",\"content\":{\"_type\":\"class\",\"name\":\"abc3\",\"superTypes\":[],\"originalMilestonedProperties\":[],\"properties\":[{\"name\":\"abc\",\"type\":\"String\",\"multiplicity\":{\"lowerBound\":1.0,\"upperBound\":1.0},\"stereotypes\":[],\"taggedValues\":[]}],\"qualifiedProperties\":[],\"stereotypes\":[],\"taggedValues\":[],\"constraints\":[],\"package\":\"abc\"}}," +
" {\"path\":\"test::model::TestEnumeration\",\"classifierPath\":\"meta::pure::metamodel::type::Enumeration\",\"content\":{\"_type\":\"Enumeration\",\"name\":\"TestEnumeration\",\"values\":[{\"value\":\"VAL1\",\"stereotypes\":[],\"taggedValues\":[]},{\"value\":\"VAL2\",\"stereotypes\":[],\"taggedValues\":[]},{\"value\":\"VAL3\",\"stereotypes\":[],\"taggedValues\":[]},{\"value\":\"VAL4\",\"stereotypes\":[],\"taggedValues\":[]}],\"stereotypes\":[],\"taggedValues\":[],\"package\":\"test::model\"}}," +
" {\"path\":\"vscodelsp::test::dependency::Employee\",\"classifierPath\":\"meta::pure::metamodel::type::Class\",\"content\":{\"_type\":\"class\",\"name\":\"Employee\",\"superTypes\":[],\"originalMilestonedProperties\":[],\"properties\":[{\"name\":\"foobar1\",\"type\":\"Float\",\"multiplicity\":{\"lowerBound\":1.0,\"upperBound\":1.0},\"stereotypes\":[],\"taggedValues\":[]},{\"name\":\"foobar2\",\"type\":\"Float\",\"multiplicity\":{\"lowerBound\":1.0,\"upperBound\":1.0},\"stereotypes\":[],\"taggedValues\":[]}],\"qualifiedProperties\":[],\"stereotypes\":[],\"taggedValues\":[],\"constraints\":[],\"package\":\"vscodelsp::test::dependency\"}}," +
" {\"path\":\"vscodelsp::test::dependency::StaticConnection\",\"classifierPath\":\"meta::pure::runtime::PackageableConnection\",\"content\":{\"_type\":\"connection\",\"name\":\"StaticConnection\",\"connectionValue\":{\"_type\":\"RelationalDatabaseConnection\",\"type\":\"H2\",\"postProcessorWithParameter\":[],\"datasourceSpecification\":{\"_type\":\"h2Local\"},\"authenticationStrategy\":{\"_type\":\"h2Default\"},\"databaseType\":\"H2\"},\"package\":\"vscodelsp::test::dependency\"}}," +
" {\"path\":\"xyz::abc\",\"classifierPath\":\"meta::pure::metamodel::type::Class\",\"content\":{\"_type\":\"class\",\"name\":\"abc\",\"superTypes\":[],\"originalMilestonedProperties\":[],\"properties\":[{\"name\":\"abc\",\"type\":\"String\",\"multiplicity\":{\"lowerBound\":1.0,\"upperBound\":1.0},\"stereotypes\":[],\"taggedValues\":[]}],\"qualifiedProperties\":[],\"stereotypes\":[],\"taggedValues\":[],\"constraints\":[],\"package\":\"xyz\"}}," +
" {\"path\":\"xyz::abc2\",\"classifierPath\":\"meta::pure::metamodel::type::Class\",\"content\":{\"_type\":\"class\",\"name\":\"abc2\",\"superTypes\":[],\"originalMilestonedProperties\":[],\"properties\":[{\"name\":\"abc\",\"type\":\"String\",\"multiplicity\":{\"lowerBound\":1.0,\"upperBound\":1.0},\"stereotypes\":[],\"taggedValues\":[]}],\"qualifiedProperties\":[],\"stereotypes\":[],\"taggedValues\":[],\"constraints\":[],\"package\":\"xyz\"}}," +
" {\"path\":\"xyz::abc3\",\"classifierPath\":\"meta::pure::metamodel::type::Class\",\"content\":{\"_type\":\"class\",\"name\":\"abc3\",\"superTypes\":[],\"originalMilestonedProperties\":[],\"properties\":[{\"name\":\"abc\",\"type\":\"String\",\"multiplicity\":{\"lowerBound\":1.0,\"upperBound\":1.0},\"stereotypes\":[],\"taggedValues\":[]}],\"qualifiedProperties\":[],\"stereotypes\":[],\"taggedValues\":[],\"constraints\":[],\"package\":\"xyz\"}}" +
"]",
json,
JsonAssert.when(Option.IGNORING_EXTRA_FIELDS).whenIgnoringPaths("[*].location")
);
List<LegendEntity> entitiesPerFile = extension.futureGet(extension.getServer().getLegendLanguageService().entities(
new LegendEntitiesRequest(
List.of(
new TextDocumentIdentifier(enumPath.toUri().toString()),
new TextDocumentIdentifier(file1Path.toUri().toString())
)
)
)
);
Assertions.assertEquals(4, entitiesPerFile.size());
entitiesPerFile.sort(Comparator.comparing(LegendEntity::getPath));
String jsonPerFile = new Gson().toJson(entitiesPerFile);
JsonAssert.assertJsonEquals(
"[" +
" {\"path\":\"abc::abc\",\"classifierPath\":\"meta::pure::metamodel::type::Class\",\"content\":{\"_type\":\"class\",\"name\":\"abc\",\"superTypes\":[],\"originalMilestonedProperties\":[],\"properties\":[{\"name\":\"abc\",\"type\":\"String\",\"multiplicity\":{\"lowerBound\":1.0,\"upperBound\":1.0},\"stereotypes\":[],\"taggedValues\":[]}],\"qualifiedProperties\":[],\"stereotypes\":[],\"taggedValues\":[],\"constraints\":[],\"package\":\"abc\"}}," +
" {\"path\":\"abc::abc2\",\"classifierPath\":\"meta::pure::metamodel::type::Class\",\"content\":{\"_type\":\"class\",\"name\":\"abc2\",\"superTypes\":[],\"originalMilestonedProperties\":[],\"properties\":[{\"name\":\"abc\",\"type\":\"String\",\"multiplicity\":{\"lowerBound\":1.0,\"upperBound\":1.0},\"stereotypes\":[],\"taggedValues\":[]}],\"qualifiedProperties\":[],\"stereotypes\":[],\"taggedValues\":[],\"constraints\":[],\"package\":\"abc\"}}," +
" {\"path\":\"abc::abc3\",\"classifierPath\":\"meta::pure::metamodel::type::Class\",\"content\":{\"_type\":\"class\",\"name\":\"abc3\",\"superTypes\":[],\"originalMilestonedProperties\":[],\"properties\":[{\"name\":\"abc\",\"type\":\"String\",\"multiplicity\":{\"lowerBound\":1.0,\"upperBound\":1.0},\"stereotypes\":[],\"taggedValues\":[]}],\"qualifiedProperties\":[],\"stereotypes\":[],\"taggedValues\":[],\"constraints\":[],\"package\":\"abc\"}}," +
" {\"path\":\"test::model::TestEnumeration\",\"classifierPath\":\"meta::pure::metamodel::type::Enumeration\",\"content\":{\"_type\":\"Enumeration\",\"name\":\"TestEnumeration\",\"values\":[{\"value\":\"VAL1\",\"stereotypes\":[],\"taggedValues\":[]},{\"value\":\"VAL2\",\"stereotypes\":[],\"taggedValues\":[]},{\"value\":\"VAL3\",\"stereotypes\":[],\"taggedValues\":[]},{\"value\":\"VAL4\",\"stereotypes\":[],\"taggedValues\":[]}],\"stereotypes\":[],\"taggedValues\":[],\"package\":\"test::model\"}}" +
"]",
jsonPerFile,
JsonAssert.when(Option.IGNORING_EXTRA_FIELDS).whenIgnoringPaths("[*].location")
);
}
@Test
void getDeclarationReferences() throws Exception
{
Path modelPath = extension.addToWorkspace("LegalEntity.pure",
"###Pure\n" +
"Class showcase::model::LegalEntity\n" +
"{\n" +
" id: String[1];\n" +
" legalName: String[1];\n" +
" businessDate: Date[1];\n" +
"}\n" +
"Class showcase::model::LegalEntitySrc\n" +
"{\n" +
" id: String[1];\n" +
" legalName: String[1];\n" +
" businessDate: Date[1];\n" +
"}"
);
Path mappingPath = extension.addToWorkspace("mapping.pure",
"###Mapping\n" +
"Mapping showcase::model::mapping\n" +
"(\n" +
" showcase::model::LegalEntity : Pure\n" +
" {\n" +
" ~src showcase::model::LegalEntitySrc\n" +
" id : '123',\n" +
" legalName : $src.legalName,\n" +
" businessDate : $src.businessDate\n" +
" }\n" +
")");
Path funcPath = extension.addToWorkspace("myfunc.pure",
"###Pure\n" +
"function showcase::model::myfunc(businessDate: Date[1]): meta::pure::tds::TabularDataSet[1]\n" +
"{\n" +
" showcase::model::LegalEntity.all($businessDate)->project(\n" +
" [\n" +
" x|$x.id,\n" +
" x|$x.legalName\n" +
" ],\n" +
" [\n" +
" 'Id',\n" +
" 'Legal Name'\n" +
" ]\n" +
" )->distinct()->take(100);\n" +
"}");
String modelDocumentId = modelPath.toUri().toString();
String mappingDocumentId = mappingPath.toUri().toString();
String functionDocumentId = funcPath.toUri().toString();
this.assertReferences("Usage of class LegalEntity", modelPath, 2, 3, false,
// reference in class mapping definition
TextLocation.newTextSource(mappingDocumentId, 3, 3, 3, 30)
// todo - missing the usage on function given missing source information
);
this.assertReferences("Usage of property LegalEntity.legalName", modelPath, 4, 3, false,
// reference in class mapping property definition
TextLocation.newTextSource(mappingDocumentId, 7, 6, 7, 14),
// usage in function expression
TextLocation.newTextSource(functionDocumentId, 6, 11, 6, 19)
);
this.assertReferences("Usage of property LegalEntity.id (without declaration)", modelPath, 3, 3, false,
// reference in class mapping property definition
TextLocation.newTextSource(mappingDocumentId, 6, 6, 6, 7),
// usage in function expression
TextLocation.newTextSource(functionDocumentId, 5, 11, 5, 12)
);
this.assertReferences("Usage of property LegalEntity.id (with declaration)", modelPath, 3, 3, true,
// reference in class mapping property definition
TextLocation.newTextSource(mappingDocumentId, 6, 6, 6, 7),
// usage in function expression
TextLocation.newTextSource(functionDocumentId, 5, 11, 5, 12),
// the declaration of the property
TextLocation.newTextSource(modelDocumentId, 3, 2, 3, 15)
);
this.assertReferences("Usage of class LegalEntitySrc", modelPath, 8, 3, false,
// reference in the Pure class mapping ~src
TextLocation.newTextSource(mappingDocumentId, 5, 11, 5, 41)
);
this.assertReferences("Usage of class LegalEntitySrc.businessDate", modelPath, 11, 3, false,
// reference in class mapping property right-side expression
TextLocation.newTextSource(mappingDocumentId, 8, 26, 8, 37)
);
}
private void assertReferences(String description, Path document, int posLine, int posChar, boolean includeDeclaration, TextLocation... expectedReferences) throws Exception
{
Comparator<Location> locationComparator = Comparator.comparing(Location::toString);
ReferenceParams params = new ReferenceParams(
new TextDocumentIdentifier(document.toUri().toString()),
new Position(posLine, posChar),
new ReferenceContext(includeDeclaration)
);
List<? extends Location> locations = extension.futureGet(extension.getServer().getTextDocumentService().references(params));
locations.sort(locationComparator);
List<Location> expected = Stream.of(expectedReferences).map(x -> new Location(x.getDocumentId(), LegendToLSPUtilities.toRange(x.getTextInterval())))
.sorted(locationComparator)
.collect(Collectors.toList());
Assertions.assertEquals(expected, locations, description);
}
}
|
// Copyright (c) - Damien Fontaine <damien.fontaine@lineolia.net>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>
package record
import (
"archive/zip"
"context"
"errors"
"fmt"
"io"
"io/ioutil"
"log"
"os"
"path/filepath"
"strings"
"time"
"github.com/mongodb/mongo-go-driver/bson/primitive"
"github.com/DamienFontaine/lunarc/datasource/mongo"
"github.com/mongodb/mongo-go-driver/mongo/gridfs"
"github.com/mongodb/mongo-go-driver/mongo/options"
"github.com/mongodb/mongo-go-driver/x/bsonx"
)
//Record audio
type Record struct {
Metadata Metadata `json:"metadata" bson:"metadata"`
Filename string `json:"filename" bson:"filename"`
ID primitive.ObjectID `json:"id,omitempty" bson:"_id,omitempty"`
}
//Metadata audio text
type Metadata struct {
Text string `json:"text" bson:"text"`
Set string `json:"set" bson:"set"`
}
//Manager manage records
type Manager interface {
Add(r Record, reader io.Reader) (Record, error)
FindAll() ([]Record, error)
FindPerPage(int32, int32) ([]Record, error)
Delete(string) error
Update(id string, set string) (Record, error)
Upload(id string) (io.Reader, error)
Count() (int64, error)
Get(id string) (Record, error)
Export() (io.Reader, error)
}
//Service works with models.Thing
type Service struct {
MongoService mongo.Service
}
//NewService creates a new Service
func NewService(ms mongo.Service) Service {
s := Service{MongoService: ms}
return s
}
//Count records in datasource
func (s *Service) Count() (count int64, err error) {
c := s.MongoService.Mongo.Database.Collection("records.files", nil)
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
count, err = c.Count(ctx, bsonx.Doc{})
if err != nil {
return 0, err
}
return
}
//Add a new Record in DataSource
func (s *Service) Add(r Record, reader io.Reader) (Record, error) {
optsBucket := options.GridFSBucket()
optsBucket.SetName("records")
bucket, err := gridfs.NewBucket(s.MongoService.Mongo.Database, optsBucket)
if err != nil {
return r, err
}
doc := bsonx.Doc{}
doc = doc.Append("text", bsonx.String(r.Metadata.Text))
doc = doc.Append("set", bsonx.String(r.Metadata.Set))
optsUpload := options.GridFSUpload()
optsUpload.SetMetadata(doc)
t := time.Now()
r.Filename = fmt.Sprintf("%v.wav", t.Unix())
r.ID, err = bucket.UploadFromStream(r.Filename, reader, optsUpload)
if err != nil {
return r, err
}
return r, nil
}
//Delete a Record in DataSource
func (s *Service) Delete(id string) error {
optsBucket := options.GridFSBucket()
optsBucket.SetName("records")
bucket, err := gridfs.NewBucket(s.MongoService.Mongo.Database, optsBucket)
if err != nil {
return err
}
objectID, err := primitive.ObjectIDFromHex(id)
err = bucket.Delete(objectID)
if err != nil {
return err
}
return nil
}
//FindAll Record in DataSource
func (s *Service) FindAll() (r []Record, err error) {
optsBucket := options.GridFSBucket()
optsBucket.SetName("records")
bucket, err := gridfs.NewBucket(s.MongoService.Mongo.Database, optsBucket)
if err != nil {
return r, err
}
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
res, err := bucket.Find(ctx, nil)
if err != nil {
return r, err
}
defer res.Close(ctx)
for res.Next(ctx) {
var record Record
err := res.Decode(&record)
if err != nil {
log.Printf("Error: %v", err)
return r, err
}
r = append(r, record)
}
return r, nil
}
//FindPerPage Record in DataSource
func (s *Service) FindPerPage(page int32, limit int32) (r []Record, err error) {
optsBucket := options.GridFSBucket()
optsBucket.SetName("records")
bucket, err := gridfs.NewBucket(s.MongoService.Mongo.Database, optsBucket)
if err != nil {
return r, err
}
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
optsFindOptions := options.GridFSFind()
optsFindOptions.SetSkip(page * limit)
optsFindOptions.SetLimit(limit)
res, err := bucket.Find(ctx, optsFindOptions)
if err != nil {
return r, err
}
defer res.Close(ctx)
for res.Next(ctx) {
var record Record
err := res.Decode(&record)
if err != nil {
log.Printf("Error: %v", err)
return r, err
}
r = append(r, record)
}
return r, nil
}
//Upload a record
func (s *Service) Upload(id string) (r io.Reader, err error) {
optsBucket := options.GridFSBucket()
optsBucket.SetName("records")
bucket, err := gridfs.NewBucket(s.MongoService.Mongo.Database, optsBucket)
if err != nil {
return r, err
}
objectID, err := primitive.ObjectIDFromHex(id)
r, err = bucket.OpenDownloadStream(objectID)
return
}
// Get a Record
func (s *Service) Get(id string) (r Record, err error) {
optsBucket := options.GridFSBucket()
optsBucket.SetName("records")
bucket, err := gridfs.NewBucket(s.MongoService.Mongo.Database, optsBucket)
if err != nil {
return r, err
}
objectID, err := primitive.ObjectIDFromHex(id)
res, err := bucket.Find(bsonx.Doc{bsonx.Elem{Key: "_id", Value: bsonx.ObjectID(objectID)}})
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
for res.Next(ctx) {
err := res.Decode(&r)
if err != nil {
log.Printf("Error: %v", err)
return r, err
}
}
return r, nil
}
//Update Record in DataSource
func (s *Service) Update(id string, set string) (r Record, err error) {
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
objectID, err := primitive.ObjectIDFromHex(id)
res, err := s.MongoService.Mongo.Database.Collection("records.files").UpdateOne(ctx, bsonx.Doc{bsonx.Elem{Key: "_id", Value: bsonx.ObjectID(objectID)}}, bsonx.Doc{bsonx.Elem{Key: "$set", Value: bsonx.Document(bsonx.Doc{bsonx.Elem{Key: "metadata.set", Value: bsonx.String(set)}})}})
if err != nil {
return r, err
}
if res.MatchedCount == 0 {
return r, errors.New("file with this ID not found")
}
r, err = s.Get(id)
if err != nil {
return r, err
}
return r, nil
}
// Export all records
func (s *Service) Export() (r io.Reader, err error) {
records, err := s.FindAll()
if err != nil {
return
}
// Create structure
tmpDirPath, err := ioutil.TempDir("", "export-")
if err != nil {
return
}
testDirPath := filepath.Join(tmpDirPath, "test")
err = os.Mkdir(testDirPath, 0700)
if err != nil {
return
}
test, err := os.Create(filepath.Join(testDirPath, "test.csv"))
defer test.Close()
if err != nil {
return
}
test.WriteString("wav_filename,wav_filesize,transcript\n")
devDirPath := filepath.Join(tmpDirPath, "dev")
err = os.Mkdir(devDirPath, 0700)
if err != nil {
return
}
dev, err := os.Create(filepath.Join(devDirPath, "dev.csv"))
defer dev.Close()
if err != nil {
return
}
dev.WriteString("wav_filename,wav_filesize,transcript\n")
trainDirPath := filepath.Join(tmpDirPath, "train")
err = os.Mkdir(trainDirPath, 0700)
if err != nil {
return
}
train, err := os.Create(filepath.Join(trainDirPath, "train.csv"))
defer train.Close()
if err != nil {
return
}
train.WriteString("wav_filename,wav_filesize,transcript\n")
// Create vocabulary
voc, err := os.Create(filepath.Join(tmpDirPath, "vocabulary.txt"))
defer voc.Close()
if err != nil {
return
}
for _, record := range records {
// Add in vocabulary
voc.WriteString(record.Metadata.Text + "\n")
// Create wav
reader, err := s.Upload(record.ID.Hex())
if err != nil {
return nil, err
}
var path string
if strings.Compare(record.Metadata.Set, "train") == 0 {
path = trainDirPath
} else if strings.Compare(record.Metadata.Set, "dev") == 0 {
path = devDirPath
} else {
path = testDirPath
}
file, err := os.Create(filepath.Join(path, record.Filename))
if err != nil {
return nil, err
}
defer file.Close()
written, err := io.Copy(file, reader)
if err != nil {
return nil, err
}
if strings.Compare(record.Metadata.Set, "train") == 0 {
train.WriteString(fmt.Sprintf("/data/train/%s,%d,%s\n", record.Filename, written, record.Metadata.Text))
} else if strings.Compare(record.Metadata.Set, "dev") == 0 {
dev.WriteString(fmt.Sprintf("/data/dev/%s,%d,%s\n", record.Filename, written, record.Metadata.Text))
} else {
test.WriteString(fmt.Sprintf("/data/test/%s,%d,%s\n", record.Filename, written, record.Metadata.Text))
}
}
//Create ZIP
zipFile, err := ioutil.TempFile("", "export-*.zip")
if err != nil {
return
}
defer zipFile.Close()
archive := zip.NewWriter(zipFile)
defer archive.Close()
baseDir := filepath.Base(tmpDirPath)
filepath.Walk(tmpDirPath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
header, err := zip.FileInfoHeader(info)
if err != nil {
return err
}
if baseDir != "" {
header.Name = filepath.Join(baseDir, strings.TrimPrefix(path, tmpDirPath))
}
if info.IsDir() {
header.Name += "/"
} else {
header.Method = zip.Deflate
}
writer, err := archive.CreateHeader(header)
if err != nil {
return err
}
if info.IsDir() {
return nil
}
file, err := os.Open(path)
if err != nil {
return err
}
defer file.Close()
_, err = io.Copy(writer, file)
return err
})
zipFile, err = os.Open(filepath.Join(filepath.Dir(tmpDirPath), filepath.Base(zipFile.Name())))
return zipFile, err
}
|
336. Palindrome Pairs
要用到哈希表来建立每个单词和其位置的映射,然后需要一个set来保存出现过的单词的长度,算法的思想是,遍历单词集,对于遍历到的单词,我们对其翻转一下,然后在哈希表查找翻转后的字符串是否存在,注意不能和原字符串的坐标位置相同,因为有可能一个单词翻转后和原单词相等,现在我们只是处理了bat和tab的情况,还存在abcd和cba,dcb和abcd这些情况需要考虑,这就是我们为啥需要用set,由于set是自动排序的,我们可以找到当前单词长度在set中的iterator,然后从开头开始遍历set,遍历比当前单词小的长度,比如abcdd翻转后为ddcba,我们发现set中有长度为3的单词,然后我们dd是否为回文串,若是,再看cba是否存在于哈希表,若存在,则说明abcdd和cba是回文对,存入结果中,对于dcb和aabcd这类的情况也是同样处理,我们要在set里找的字符串要在遍历到的字符串的左边和右边分别尝试,看是否是回文对,这样遍历完单词集,就能得到所有的回文对。
实际不需要用set, 在截取str2不仅要判断是否在map中还要看长度是否不为0,条件不同
class Solution {
public List<List<Integer>> palindromePairs(String[] words) {
List<List<Integer>> res = new ArrayList<>();
if(words == null || words.length < 2){
return res;
}
Map<String, Integer> map = new HashMap<>();//string - index
for(int i = 0; i<words.length; i++){
map.put(words[i], i);
}
for(int i = 0; i<words.length; i++){
for(int j = 0; j<=words[i].length(); j++){
String str1 = words[i].substring(0, j);
String str2 = words[i].substring(j);
if(isPalindrom(str1)){
String str2reverse = new StringBuilder(str2).reverse().toString();
if(map.containsKey(str2reverse) && map.get(str2reverse) != i){
res.add(Arrays.asList(map.get(str2reverse), i));
}
}
if(str2.length() != 0 && isPalindrom(str2)){
String str1reverse = new StringBuilder(str1).reverse().toString();
if(map.containsKey(str1reverse) && map.get(str1reverse) != i){
res.add(Arrays.asList(i, map.get(str1reverse)));
}
}
}
}
return res;
}
private boolean isPalindrom(String s){
int left = 0;
int right = s.length() - 1;
while(left <= right){
if(s.charAt(left++) != s.charAt(right--))
return false;
}
return true;
}
}
337. House Robber III
(1) array dynamic programming
这种方法的递归函数返回一个大小为2的一维数组res,其中res[0]表示不包含当前节点值的最大值,res[1]表示包含当前值的最大值,那么我们在遍历某个节点时,首先对其左右子节点调用递归函数,分别得到包含与不包含左子节点值的最大值,和包含于不包含右子节点值的最大值,那么当前节点的res[0]就是左子节点两种情况的较大值加上右子节点两种情况的较大值,res[1]就是不包含左子节点值的最大值加上不包含右子节点值的最大值,和当前节点值之和,返回即可。
class Solution {
public int rob(TreeNode root) {
int[] res = dfs(root);
return Math.max(res[0], res[1]);
}
private int[] dfs(TreeNode root){
if(root == null)
return new int[]{0 , 0};
int[] left = dfs(root.left);
int[] right = dfs(root.right);
int[] res = new int[]{0, 0};
res[0] = Math.max(left[0], left[1]) + Math.max(right[0], right[1]);
res[1] = left[0] + right[0] + root.val;
return res;
}
}
(2) recursion very slow
public int rob(TreeNode root) {
if(root == null)
return 0;
int val = 0;
if(root.left != null){
val += rob(root.left.left) + rob(root.left.right);
}
if(root.right != null){
val += rob(root.right.left) + rob(root.right.right);
}
return Math.max(val + root.val, rob(root.left) + rob(root.right));
}
338. Counting Bits
(1) find pattern
我们写出0到 15 的数的二进制和1的个数如下:
0 0000 0
-------------
1 0001 1
-------------
2 0010 1
3 0011 2
-------------
4 0100 1
5 0101 2
6 0110 2
7 0111 3
-------------
8 1000 1
9 1001 2
10 1010 2
11 1011 3
12 1100 2
13 1101 3
14 1110 3
15 1111 4
我最先看出的规律是这样的,除去前两个数字0个1,从2开始,2和3,是 [21, 22) 区间的,值为1和2。而4到7属于 [22, 23) 区间的,值为 1,2,2,3,前半部分1和2和上一区间相同,2和3是上面的基础上每个数字加1。再看8到 15,属于 [23, 24) 区间的,同样满足上述规律
class Solution {
public int[] countBits(int num) {
if(num == 0)
return new int[]{0};
int[] res = new int[num + 1];
res[0] = 0; res[1] = 1;
int k = 2;
int i = 2;
int index = 2;
while(i <= num){
for(i = (int)Math.pow(2, k-1); i < (int)Math.pow(2, k); i++){
if(i > num)
break;
int t = (int)(Math.pow(2, k) - Math.pow(2, k-1)) / 2;
if(i < Math.pow(2, k-1) + t){
res[index] = res[i-t];
index++;
}else{
res[index] = res[i-t] + 1;
index++;
}
}
k++;
}
return res;
}
}
(2) bit manipulation
巧妙的利用了 i&(i - 1), 这个本来是用来判断一个数是否是2的指数的快捷方法,比如8,二进制位 1000, 那么 8&(8-1) 为0,只要为0就是2的指数, 那么我们现在来看一下0到 15 的数字和其对应的 i&(i - 1) 值:
i binary '1' i&(i-1)
0 0000 0
-----------------------
1 0001 1 0000
-----------------------
2 0010 1 0000
3 0011 2 0010
-----------------------
4 0100 1 0000
5 0101 2 0100
6 0110 2 0100
7 0111 3 0110
-----------------------
8 1000 1 0000
9 1001 2 1000
10 1010 2 1000
11 1011 3 1010
12 1100 2 1000
13 1101 3 1100
14 1110 3 1100
15 1111 4 1110
我们可以发现每个i值都是 i&(i-1) 对应的值加1。
* Time complexity: O(n)
* Space complexity: O(1)
class Solution {
public int[] countBits(int num) {
if(num == 0)
return new int[]{0};
int[] res = new int[num + 1];
for(int i = 1; i<=num; i++){
res[i] = res[i & i-1] + 1;
}
return res;
}
}
339. Nested List Weight Sum
* Time complexity: O(n)
* Space complexity: O(n)
(1) recursion
/**
* // This is the interface that allows for creating nested lists.
* // You should not implement it, or speculate about its implementation
* public interface NestedInteger {
* // Constructor initializes an empty nested list.
* public NestedInteger();
*
* // Constructor initializes a single integer.
* public NestedInteger(int value);
*
* // @return true if this NestedInteger holds a single integer, rather than a nested list.
* public boolean isInteger();
*
* // @return the single integer that this NestedInteger holds, if it holds a single integer
* // Return null if this NestedInteger holds a nested list
* public Integer getInteger();
*
* // Set this NestedInteger to hold a single integer.
* public void setInteger(int value);
*
* // Set this NestedInteger to hold a nested list and adds a nested integer to it.
* public void add(NestedInteger ni);
*
* // @return the nested list that this NestedInteger holds, if it holds a nested list
* // Return null if this NestedInteger holds a single integer
* public List<NestedInteger> getList();
* }
*/
class Solution {
public int depthSum(List<NestedInteger> nestedList) {
if(nestedList == null)
return 0;
return helper(nestedList, 1);
}
private int helper(List<NestedInteger> nestedList, int depth){
int res = 0;
for(NestedInteger nest : nestedList){
if(nest.isInteger()){
res += nest.getInteger() * depth;
}else{
res += helper(nest.getList(), depth+1);
}
}
return res;
}
}
(2) iteration
class Solution {
public int depthSum(List<NestedInteger> nestedList) {
if(nestedList == null)
return 0;
int depth = 1;
int res = 0;
Queue<NestedInteger> queue = new LinkedList<>(nestedList);
System.out.println(queue.size());
while(!queue.isEmpty()){
int size = queue.size();
for(int i = 0; i<size; i++){
NestedInteger nest = queue.poll();
if(nest.isInteger()){
res += nest.getInteger() * depth;
}else{//把list形式的加到queue末尾,此时不计算,depth++之后再计算
queue.addAll(nest.getList());
}
}
depth++;
}
return res;
}
}
340. Longest Substring with At Most K Distinct Characters
* Time complexity: O(n)
* Space complexity: O(n)
(1) hashmap
利用hashmap 的key不能重复的性质, map中记录的是字母和其最大的index。map的size始终保持在k的大小, 每次加一个新的char,此时map的size是k+1, 删掉index最小的字母,并更新left位置。
class Solution {
public int lengthOfLongestSubstringKDistinct(String s, int k) {
if(k == 0) return 0;
int n = s.length();
if(n < k)
return n;
int left = 0;
int right = 0;
//int res = 0;
HashMap<Character, Integer> map = new HashMap<>();
int max = k;
while(right < n){
if(map.size() <= k){
map.put(s.charAt(right), right);//map的key不能重复,所以遇到重复的字母就不会加到map中,但是right index 在移动
right++;
}
if(map.size() == k+1){
int deleteIndex = Collections.min(map.values());
map.remove(s.charAt(deleteIndex));
left = deleteIndex+1 ;
}
max = Math.max(max, right - left);
}
return max;
}
}
|
/*
* *******************************************************************************
* Copyright (c) 2023 BMW AG
* Copyright (c) 2023 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
* ********************************************************************************
*/
import React, { useState, useEffect, useContext } from 'react';
import Modal from 'react-bootstrap/Modal';
import Button from 'react-bootstrap/Button';
import Alert from 'react-bootstrap/Alert';
import Form from 'react-bootstrap/Form';
import ListGroup from 'react-bootstrap/ListGroup';
import { DemandProp } from '../../interfaces/demand_interfaces';
import { CapacityGroupProp } from '../../interfaces/capacitygroup_interfaces';
import { CapacityGroupContext } from '../../contexts/CapacityGroupsContextProvider';
import {LoadingMessage} from './../common/LoadingMessages';
import { InputGroup } from 'react-bootstrap';
import { FaSearch } from 'react-icons/fa';
interface CapacityGroupAddToExistingProps {
show: boolean;
onHide: () => void;
checkedDemands: DemandProp[] | null;
}
const CapacityGroupAddToExisting: React.FC<CapacityGroupAddToExistingProps> = ({
show,
onHide,
checkedDemands
}) => {
const [selectedCapacityGroupId, setSelectedCapacityGroupId] = useState<string | null>(null);
const [customerFilter, setCustomerFilter] = useState<string | null>(null);
const [filteredCapacityGroups, setFilteredCapacityGroups] = useState<CapacityGroupProp[] | null>(null);
const [searchQuery, setSearchQuery] = useState<string>('');
const [isLoading, setIsLoading] = useState<boolean>(false);
const capacityGroupContext = useContext(CapacityGroupContext);
const { capacitygroups } = capacityGroupContext || {};
const toggleDemandSelection = (demandId: string) => {
if (selectedCapacityGroupId === demandId) {
setSelectedCapacityGroupId(null);
} else {
setSelectedCapacityGroupId(demandId);
}
};
const resetModalValues = () => {
setSelectedCapacityGroupId(null);
setSearchQuery('');
};
useEffect(() => {
if (checkedDemands) {
const customer = checkedDemands[0]?.customer.companyName || null;
setCustomerFilter(customer);
if (customer) {
setIsLoading(true);
if (capacitygroups) {
const filteredGroups = capacitygroups.filter((group) => group.customerName === customer);
setFilteredCapacityGroups(filteredGroups);
setIsLoading(false);
}
}
}
}, [checkedDemands, capacityGroupContext, capacitygroups]);
useEffect(() => {
if (customerFilter && capacitygroups) {
const filteredGroups = capacitygroups.filter((group) =>
(group.name && group.name.toLowerCase().includes(searchQuery.toLowerCase())) ||
(group.customerName && group.customerName.toLowerCase().includes(searchQuery.toLowerCase())) ||
(group.customerBPNL && group.customerBPNL.toLowerCase().includes(searchQuery.toLowerCase())) ||
(group.capacityGroupId && group.capacityGroupId.toString().toLowerCase().includes(searchQuery.toLowerCase()))
);
setFilteredCapacityGroups(filteredGroups);
}
}, [searchQuery, customerFilter, capacitygroups]);
const handleLinkToCapacityGroup = () => {
if (selectedCapacityGroupId && checkedDemands && checkedDemands.length > 0) {
const demandIds = checkedDemands.map((demand) => demand.id);
const capacityGroupLink = {
capacityGroupID: selectedCapacityGroupId,
linkedMaterialDemandID: demandIds,
};
capacityGroupContext?.linkToCapacityGroup(capacityGroupLink);
onHide();
resetModalValues();
}
};
const renderDemands = () => {
if (!checkedDemands || checkedDemands.length === 0) {
return (
<Alert variant="danger" onClose={onHide}>
<p>No Demands selected.</p>
</Alert>
);
}
return (
<>
<div>
<InputGroup className="mb-3">
<InputGroup.Text id="basic-addon1"><FaSearch /></InputGroup.Text>
<Form.Control
type="text"
placeholder="Search for capacity groups..."
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
aria-describedby="basic-addon1"
/>
</InputGroup>
<br />
<span>Customer - Capacity Group Name</span>
{isLoading ? (
<LoadingMessage />
) : (
<ListGroup>
{filteredCapacityGroups &&
filteredCapacityGroups.map((group) => (
<ListGroup.Item
key={group.capacityGroupId}
className="d-flex justify-content-between align-items-center"
>
<span> {group.customerName} - {group.name}</span>
<Button
variant={selectedCapacityGroupId === group.internalId ? 'primary' : 'outline-primary'}
onClick={() => toggleDemandSelection(group.internalId)}
>
Select
</Button>
</ListGroup.Item>
))}
</ListGroup>
)}
</div>
<br />
<div>
<h4>Selected Capacity Group :</h4>
<ListGroup>
{selectedCapacityGroupId && (
<ListGroup.Item>
{selectedCapacityGroupId}
<Button
variant="danger"
onClick={() => setSelectedCapacityGroupId(null)}
>
Remove
</Button>
</ListGroup.Item>
)}
</ListGroup>
</div>
</>
);
};
return (
<Modal show={show} onHide={onHide}>
<Modal.Header closeButton>
<Modal.Title>Link to Existing Capacity Group</Modal.Title>
</Modal.Header>
<Modal.Body>
{renderDemands()}
</Modal.Body>
<Modal.Footer>
<Button variant="secondary" onClick={onHide}>
Close
</Button>
{checkedDemands !== null && checkedDemands.length > 0 && (
<Button variant="primary" onClick={handleLinkToCapacityGroup}>
Link to Capacity Group
</Button>
)}
</Modal.Footer>
</Modal>
);
};
export default CapacityGroupAddToExisting;
|
# frozen_string_literal: true
class Plan < ApplicationRecord
belongs_to :site
def name
plan_defaults[:name]
end
def free?
plan_id == Plans.free_plan[:id]
end
def exceeded?
current_month_recordings_count >= max_monthly_recordings
end
def enterprise?
plan_defaults[:enterprise]
end
def deprecated?
plan_defaults[:deprecated]
end
def invalid?
# They have no billing so it can't be invalid
return false if free?
unless site.billing
Rails.logger.info "Site #{site.id} is missing billing but is not on the free tier"
return false
end
site.billing.status == Billing::INVALID
end
def features_enabled
self[:features_enabled].presence || plan_defaults[:features_enabled]
end
def team_member_limit
self[:team_member_limit] || plan_defaults[:team_member_limit]
end
def max_monthly_recordings
self[:max_monthly_recordings] || plan_defaults[:max_monthly_recordings]
end
def data_storage_months
self[:data_storage_months] || plan_defaults[:data_storage_months]
end
def response_time_hours
self[:response_time_hours] || plan_defaults[:response_time_hours]
end
def support
self[:support].empty? ? plan_defaults[:support] : self[:support]
end
def site_limit
plan_defaults[:site_limit]
end
def fractional_usage
current_month_recordings_count.to_f / max_monthly_recordings
end
def pricing
plan_defaults[:pricing]
end
def change_plan!(plan_id)
# Set the plan_id and reset all the overrides
update!(
plan_id:,
features_enabled: [],
team_member_limit: nil,
max_monthly_recordings: nil,
data_storage_months: nil,
response_time_hours: nil,
support: []
)
end
def start_free_trial!
FreeTrialJob.set(wait: 14.days).perform_later(site_id)
FreeTrialMailerService.enqueue(site)
update!(
max_monthly_recordings: 5000,
features_enabled: Types::Plans::Feature.values.keys
)
end
def end_free_trial!
update!(
max_monthly_recordings: nil,
features_enabled: []
)
end
def current_month_recordings_count
# This causes n+1 in the admin app
@current_month_recordings_count ||= site.recordings.where(
'created_at > ? AND created_at < ?',
Time.current.beginning_of_month,
Time.current.end_of_month
).count
end
private
def plan_defaults
@plan_defaults ||= Plans.find_by_plan_id(plan_id)
end
end
|
using ComplexCRUDApplication.Models;
using ComplexCRUDApplication.Repos;
using ComplexCRUDApplication.Services;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Options;
using Microsoft.IdentityModel.Tokens;
using System.IdentityModel.Tokens.Jwt;
using System.Security.Claims;
using System.Text;
namespace ComplexCRUDApplication.Controllers
{
[Route("api/[controller]")]
[ApiController]
public class AuthorizationController : ControllerBase
{
private readonly DataContext _dataContext;
private readonly ILogger<AuthorizationController> _logger;
private readonly JwtSettings _jwtSettings;
private readonly IConfiguration _configuration;
private readonly IRefreshHandler _refresh;
public AuthorizationController(DataContext dataContext, ILogger<AuthorizationController> logger, IOptions<JwtSettings> _jwtOptions, IConfiguration configuration, IRefreshHandler refreshHandler)
{
_dataContext = dataContext;
_logger = logger;
_jwtSettings = _jwtOptions.Value;
_configuration = configuration;
_refresh = refreshHandler;
}
[HttpPost("generate-refresh-token")]
public async Task<IActionResult> GenerateRefreshToken([FromBody] TokenResponse tokenResponse)
{
var refreshToken = await _dataContext.TblRefreshtokens.FirstOrDefaultAsync(r => r.Refreshtoken == tokenResponse.RefreshToken);
if (refreshToken != null)
{
// Generate the token by JwtSecurity key
var tokenHandler = new JwtSecurityTokenHandler();
var tokenKey = Encoding.UTF8.GetBytes(_jwtSettings.SecurityKey);
SecurityToken securityToken;
var principal = tokenHandler.ValidateToken(tokenResponse.Token, new TokenValidationParameters()
{
ValidateIssuerSigningKey = true,
ValidateIssuer = false,
IssuerSigningKey = new SymmetricSecurityKey(tokenKey),
ValidateAudience = false
}, out securityToken);
var token = securityToken as JwtSecurityToken;
if (token != null && token.Header.Alg.Equals(SecurityAlgorithms.HmacSha256))
{
string username = principal.Identity?.Name;
var exists = await _dataContext.TblRefreshtokens.FirstOrDefaultAsync(r => r.Userid == username && r.Refreshtoken == tokenResponse.RefreshToken);
if (exists != null)
{
var jwtTokenNew = new JwtSecurityToken(
claims: principal.Claims.ToArray(),
expires: DateTime.Now.AddSeconds(30),
signingCredentials: new SigningCredentials(new SymmetricSecurityKey(Encoding.UTF8.GetBytes(_jwtSettings.SecurityKey)), SecurityAlgorithms.HmacSha256)
);
var finalToken = tokenHandler.WriteToken(jwtTokenNew);
return Ok(new TokenResponse() { Token = finalToken, RefreshToken = await _refresh.GenerateToken(username) });
}
else
{
return Unauthorized();
}
}
else
{
return Unauthorized();
}
}
else
{
return Unauthorized();
}
}
[HttpPost("generate-token")]
public async Task<IActionResult> GenerateToken([FromBody] UserCredential userCredential)
{
var user = await _dataContext.TblUsers.FirstOrDefaultAsync(r => r.Code == userCredential.Username && r.Password == userCredential.Password);
if (user != null)
{
// Generate the token by JwtSecurity key
var tokenHandler = new JwtSecurityTokenHandler();
var securityKey = _jwtSettings.SecurityKey;
var tokenKey = Encoding.UTF8.GetBytes(_jwtSettings.SecurityKey);
// var securityKeyClone = _configuration["JwtSettings:SecurityKey"];
// var securityKeyClone1 = _configuration.GetSection("JwtSettings:SecurityKey").Value;
var tokenDescriptor = new SecurityTokenDescriptor
{
Subject = new ClaimsIdentity(new Claim[] {
new Claim(ClaimTypes.Name, user.Code),
new Claim(ClaimTypes.Role, user.Role)
}),
Expires = DateTime.UtcNow.AddSeconds(30),
SigningCredentials = new SigningCredentials(new SymmetricSecurityKey(tokenKey), SecurityAlgorithms.HmacSha256)
};
var token = tokenHandler.CreateToken(tokenDescriptor);
var finalToken = tokenHandler.WriteToken(token);
return Ok(new TokenResponse() { Token = finalToken, RefreshToken = await _refresh.GenerateToken(userCredential.Username) });
}
else
{
return Unauthorized();
}
}
}
}
|
<?php
/**
* WooCommerce Order Item Functions
*
* Functions for order specific things.
*
* @package WooCommerce\Functions
* @version 3.4.0
*/
defined( 'ABSPATH' ) || exit;
/**
* Add a item to an order (for example a line item).
*
* @param int $order_id Order ID.
* @param array $item_array Items list.
*
* @throws Exception When `WC_Data_Store::load` validation fails.
* @return int|bool Item ID or false
*/
function wc_add_order_item( $order_id, $item_array ) {
$order_id = absint( $order_id );
if ( ! $order_id ) {
return false;
}
$defaults = array(
'order_item_name' => '',
'order_item_type' => 'line_item',
);
$item_array = wp_parse_args( $item_array, $defaults );
$data_store = WC_Data_Store::load( 'order-item' );
$item_id = $data_store->add_order_item( $order_id, $item_array );
$item = WC_Order_Factory::get_order_item( $item_id );
do_action( 'woocommerce_new_order_item', $item_id, $item, $order_id );
return $item_id;
}
/**
* Update an item for an order.
*
* @since 2.2
* @param int $item_id Item ID.
* @param array $args Either `order_item_type` or `order_item_name`.
*
* @throws Exception When `WC_Data_Store::load` validation fails.
* @return bool True if successfully updated, false otherwise.
*/
function wc_update_order_item( $item_id, $args ) {
$data_store = WC_Data_Store::load( 'order-item' );
$update = $data_store->update_order_item( $item_id, $args );
if ( false === $update ) {
return false;
}
do_action( 'woocommerce_update_order_item', $item_id, $args );
return true;
}
/**
* Delete an item from the order it belongs to based on item id.
*
* @param int $item_id Item ID.
*
* @throws Exception When `WC_Data_Store::load` validation fails.
* @return bool
*/
function wc_delete_order_item( $item_id ) {
$item_id = absint( $item_id );
if ( ! $item_id ) {
return false;
}
$data_store = WC_Data_Store::load( 'order-item' );
do_action( 'woocommerce_before_delete_order_item', $item_id );
$data_store->delete_order_item( $item_id );
do_action( 'woocommerce_delete_order_item', $item_id );
return true;
}
/**
* WooCommerce Order Item Meta API - Update term meta.
*
* @param int $item_id Item ID.
* @param string $meta_key Meta key.
* @param mixed $meta_value Meta value.
* @param string $prev_value Previous value (default: '').
*
* @throws Exception When `WC_Data_Store::load` validation fails.
* @return bool
*/
function wc_update_order_item_meta( $item_id, $meta_key, $meta_value, $prev_value = '' ) {
$data_store = WC_Data_Store::load( 'order-item' );
if ( $data_store->update_metadata( $item_id, $meta_key, $meta_value, $prev_value ) ) {
WC_Cache_Helper::invalidate_cache_group( 'object_' . $item_id ); // Invalidate cache.
return true;
}
return false;
}
/**
* WooCommerce Order Item Meta API - Add term meta.
*
* @param int $item_id Item ID.
* @param string $meta_key Meta key.
* @param mixed $meta_value Meta value.
* @param bool $unique If meta data should be unique (default: false).
*
* @throws Exception When `WC_Data_Store::load` validation fails.
* @return int New row ID or 0.
*/
function wc_add_order_item_meta( $item_id, $meta_key, $meta_value, $unique = false ) {
$data_store = WC_Data_Store::load( 'order-item' );
$meta_id = $data_store->add_metadata( $item_id, $meta_key, $meta_value, $unique );
if ( $meta_id ) {
WC_Cache_Helper::invalidate_cache_group( 'object_' . $item_id ); // Invalidate cache.
return $meta_id;
}
return 0;
}
/**
* WooCommerce Order Item Meta API - Delete term meta.
*
* @param int $item_id Item ID.
* @param string $meta_key Meta key.
* @param mixed $meta_value Meta value (default: '').
* @param bool $delete_all Delete all meta data, defaults to `false`.
*
* @throws Exception When `WC_Data_Store::load` validation fails.
* @return bool
*/
function wc_delete_order_item_meta( $item_id, $meta_key, $meta_value = '', $delete_all = false ) {
$data_store = WC_Data_Store::load( 'order-item' );
if ( $data_store->delete_metadata( $item_id, $meta_key, $meta_value, $delete_all ) ) {
WC_Cache_Helper::invalidate_cache_group( 'object_' . $item_id ); // Invalidate cache.
return true;
}
return false;
}
/**
* WooCommerce Order Item Meta API - Get term meta.
*
* @param int $item_id Item ID.
* @param string $key Meta key.
* @param bool $single Whether to return a single value. (default: true).
*
* @throws Exception When `WC_Data_Store::load` validation fails.
* @return mixed
*/
function wc_get_order_item_meta( $item_id, $key, $single = true ) {
$data_store = WC_Data_Store::load( 'order-item' );
return $data_store->get_metadata( $item_id, $key, $single );
}
/**
* Get order ID by order item ID.
*
* @param int $item_id Item ID.
*
* @throws Exception When `WC_Data_Store::load` validation fails.
* @return int
*/
function wc_get_order_id_by_order_item_id( $item_id ) {
$data_store = WC_Data_Store::load( 'order-item' );
return $data_store->get_order_id_by_order_item_id( $item_id );
}
|
import pandas as pd
import matplotlib as plt
import os
import NewareNDA as nda
from galvani import BioLogic
from datetime import timedelta
import datetime
def process_neware_data(ndax_file_path, theoretical_capacity):
data = nda.read(ndax_file_path)
cycle_numbers = data['Cycle'].unique()
time_utc = pd.to_datetime(data['Timestamp'])
min_time = time_utc.min()
time_in_seconds = (time_utc - min_time).dt.total_seconds()
grouped = data.groupby('Cycle')
max_charge = grouped['Charge_Capacity(mAh)'].max()
max_discharge = grouped['Discharge_Capacity(mAh)'].max()
charge_capacity = (max_charge / theoretical_capacity) * 100
discharge_capacity = (max_discharge / theoretical_capacity) * 100
coulombic_efficiency = (max_discharge / max_charge) * 100
processed_cycle_df = pd.DataFrame({
'Cycle_Number': cycle_numbers,
'Charge_Capacity': charge_capacity.reindex(cycle_numbers, fill_value=0),
'Discharge_Capacity': discharge_capacity.reindex(cycle_numbers, fill_value=0),
'Coulombic_Efficiency': coulombic_efficiency.reindex(cycle_numbers, fill_value=100),
'Time': time_in_seconds.reindex(cycle_numbers, fill_value=0)
})
return processed_cycle_df
def process_eclab_mpr(mpr_file_path, theoretical_capacity):
mpr_file = BioLogic.MPRfile(mpr_file_path)
df = pd.DataFrame(mpr_file.data)
df['Abs_Q_charge_discharge'] = df['Q charge/discharge/mA.h'].abs()
df['Full_Cycle_Number'] = ((df['half cycle'] // 2) + 1).astype(int)
is_charge = df['half cycle'] % 2 == 0
is_discharge = ~is_charge
max_charge = df[is_charge].groupby('Full_Cycle_Number')['Abs_Q_charge_discharge'].max()
max_discharge = df[is_discharge].groupby('Full_Cycle_Number')['Abs_Q_charge_discharge'].max()
charge_capacity = (max_charge / theoretical_capacity) * 100
discharge_capacity = (max_discharge / theoretical_capacity) * 100
coulombic_efficiency = (discharge_capacity / charge_capacity) * 100
cycle_numbers = charge_capacity.index.union(discharge_capacity.index)
time = df.groupby('Full_Cycle_Number')['time/s'].max()
processed_cycle_df = pd.DataFrame({
'Cycle_Number': cycle_numbers,
'Charge_Capacity': charge_capacity.reindex(cycle_numbers, fill_value=0),
'Discharge_Capacity': discharge_capacity.reindex(cycle_numbers, fill_value=0),
'Coulombic_Efficiency': coulombic_efficiency.reindex(cycle_numbers),
'Time': time.reindex(cycle_numbers, fill_value=0),
})
return processed_cycle_df
|
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* ft_strdup.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: rhusak <marvin@42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2017/11/07 12:05:27 by rhusak #+# #+# */
/* Updated: 2017/11/07 12:13:12 by rhusak ### ########.fr */
/* */
/* ************************************************************************** */
#include "libft.h"
char *ft_strdup(const char *s1)
{
char *string;
int src_size;
int i;
i = 0;
src_size = 0;
while (s1[src_size])
src_size++;
string = (char*)malloc(sizeof(*string) * (src_size + 1));
if (string == NULL)
return (NULL);
while (i < src_size)
{
string[i] = s1[i];
i++;
}
string[src_size] = '\0';
return (string);
}
|
import classnames from 'classnames';
import * as React from 'react';
import { injectIntl } from 'react-intl';
import { connect, MapDispatchToProps } from 'react-redux';
import { CustomInput } from '../../components';
import { checkValidBitcoinAddress, checkValidErc20Address } from '../../../helpers';
import { IntlProps } from '../../../index';
import {
beneficiariesCreate,
BeneficiaryBank,
RootState,
selectBeneficiariesCreateError,
selectBeneficiariesCreateSuccess,
selectMobileDeviceState,
} from '../../../modules';
import { CommonError } from '../../../modules/types';
import { NewModal } from '../NewModal';
interface ReduxProps {
beneficiariesAddError?: CommonError;
beneficiariesAddSuccess: boolean;
isMobileDevice: boolean;
}
interface DispatchProps {
createAddress: typeof beneficiariesCreate;
}
interface OwnProps {
currency: string;
type: 'fiat' | 'coin';
handleToggleAddAddressModal: () => void;
handleToggleConfirmationModal: () => void;
blockchainType: string;
}
interface CoinState {
coinAddress: string;
coinBeneficiaryName: string;
coinDescription: string;
coinAddressFocused: boolean;
coinBeneficiaryNameFocused: boolean;
coinDescriptionFocused: boolean;
isInvalidAddress: boolean;
}
interface FiatState {
fiatName: string;
fiatFullName: string;
fiatAccountNumber: string;
fiatBankName: string;
fiatBankSwiftCode: string;
fiatIntermediaryBankName: string;
fiatIntermediaryBankSwiftCode: string;
fiatNameFocused: boolean;
fiatFullNameFocused: boolean;
fiatAccountNumberFocused: boolean;
fiatBankNameFocused: boolean;
fiatBankSwiftCodeFocused: boolean;
fiatIntermediaryBankNameFocused: boolean;
fiatIntermediaryBankSwiftCodeFocused: boolean;
}
type Props = ReduxProps & DispatchProps & OwnProps & IntlProps;
type State = CoinState & FiatState;
const defaultState = {
coinAddress: '',
coinBeneficiaryName: '',
coinDescription: '',
coinAddressFocused: false,
coinBeneficiaryNameFocused: false,
coinDescriptionFocused: false,
isInvalidAddress: false,
fiatName: '',
fiatFullName: '',
fiatAccountNumber: '',
fiatBankName: '',
fiatBankSwiftCode: '',
fiatIntermediaryBankName: '',
fiatIntermediaryBankSwiftCode: '',
fiatNameFocused: false,
fiatFullNameFocused: false,
fiatAccountNumberFocused: false,
fiatBankNameFocused: false,
fiatBankSwiftCodeFocused: false,
fiatIntermediaryBankNameFocused: false,
fiatIntermediaryBankSwiftCodeFocused: false,
};
class BeneficiariesAddModalComponent extends React.Component<Props, State> {
constructor(props: Props) {
super(props);
this.state = {
...defaultState,
};
}
public componentWillReceiveProps(nextProps: Props) {
const { beneficiariesAddError, beneficiariesAddSuccess } = this.props;
if (
(nextProps.beneficiariesAddError && !beneficiariesAddError) ||
(nextProps.beneficiariesAddSuccess && !beneficiariesAddSuccess)
) {
this.props.handleToggleAddAddressModal();
this.handleClearModalsInputs();
}
if (nextProps.beneficiariesAddSuccess && !beneficiariesAddSuccess) {
this.props.handleToggleConfirmationModal();
}
}
public render() {
const { type, isMobileDevice } = this.props;
const addModalClass = classnames('beneficiaries-add-address-modal', {
'beneficiaries-add-address-modal--coin': type === 'coin',
'beneficiaries-add-address-modal--fiat': type === 'fiat',
'td-modal': !isMobileDevice,
});
const { coinAddress, coinBeneficiaryName, isInvalidAddress } = this.state;
const isCoinButtonDisabled = !coinAddress || !coinBeneficiaryName || isInvalidAddress;
const { fiatName, fiatFullName, fiatAccountNumber, fiatBankName } = this.state;
const isFiatButtonDisabled = !fiatName || !fiatFullName || !fiatAccountNumber || !fiatBankName;
return (
<NewModal
show
onClose={this.props.handleToggleAddAddressModal}
title={this.props.intl.formatMessage({ id: 'page.body.wallets.beneficiaries.addAddressModal.header' })}
>
<div className={addModalClass} hidden={type !== 'coin'} style={{ fontSize: '12px' }}>
{isInvalidAddress ? (
<p style={{ fontSize: '12px', color: 'red' }}>
** Please enter <strong>{String(this.props.blockchainType).toUpperCase()}</strong> address
</p>
) : null}
<div className="mt-3">{this.renderEnterCoinAddressInput('coinAddress')}</div>
<div className="mt-3">{this.renderAddAddressModalBodyItem('coinBeneficiaryName')}</div>
<div className="mt-3">{this.renderAddAddressModalBodyItem('coinDescription', true)}</div>
<div className="mt-3">
<button
disabled={isCoinButtonDisabled}
className="w-100 green-btn"
onClick={this.handleSubmitAddAddressCoinModal}
>
{this.translate('page.body.wallets.beneficiaries.addAddressModal.body.button')}
</button>
</div>
</div>
<div hidden={type === 'coin'}>
{this.renderAddAddressModalBodyItem('fiatName')}
{this.renderAddAddressModalBodyItem('fiatFullName')}
{this.renderAddAddressModalBodyItem('fiatAccountNumber')}
{this.renderAddAddressModalBodyItem('fiatBankName')}
{this.renderAddAddressModalBodyItem('fiatBankSwiftCode', true)}
{this.renderAddAddressModalBodyItem('fiatIntermediaryBankName', true)}
{this.renderAddAddressModalBodyItem('fiatIntermediaryBankSwiftCode', true)}
<button
disabled={isFiatButtonDisabled}
hidden={type === 'coin'}
onClick={this.handleSubmitAddAddressFiatModal}
className="w-100 green-btn"
>
{this.translate('page.body.wallets.beneficiaries.addAddressModal.body.button')}
</button>
</div>
</NewModal>
);
}
private renderAddAddressModalBodyItem = (field: string, optional?: boolean) => {
const focusedClass = classnames('td-email-form__group', {
'td-email-form__group--focused': this.state[`${field}Focused`],
'td-email-form__group--optional': optional,
});
return (
<div key={field} className={focusedClass}>
<CustomInput
type="text"
label={this.translate(`page.body.wallets.beneficiaries.addAddressModal.body.${field}`)}
placeholder={this.translate(`page.body.wallets.beneficiaries.addAddressModal.body.${field}`)}
defaultLabel={field}
handleChangeInput={value => this.handleChangeFieldValue(field, value)}
inputValue={this.state[field]}
handleFocusInput={() => this.handleChangeFieldFocus(`${field}Focused`)}
classNameLabel="td-email-form__label"
classNameInput="td-email-form__input"
autoFocus={true}
isInvalid={false}
/>
</div>
);
};
private renderEnterCoinAddressInput = (field: string, optional?: boolean) => {
const focusedClass = classnames('td-email-form__group', {
'td-email-form__group--focused': this.state[`${field}Focused`],
'td-email-form__group--optional': optional,
});
const { isInvalidAddress } = this.state;
return (
<div key={field} className={focusedClass}>
<CustomInput
type="text"
label={this.translate(`page.body.wallets.beneficiaries.addAddressModal.body.${field}`)}
placeholder={this.translate(`page.body.wallets.beneficiaries.addAddressModal.body.${field}`)}
defaultLabel={field}
handleChangeInput={value => this.handleChangeAddAddressFieldValue(field, value)}
inputValue={this.state[field]}
handleFocusInput={() => this.handleChangeFieldFocus(`${field}Focused`)}
classNameLabel="td-email-form__label"
classNameInput="td-email-form__input"
autoFocus={true}
isInvalid={isInvalidAddress}
/>
</div>
);
};
private handleChangeAddAddressFieldValue = (key: string, value: string) => {
const { blockchainType } = this.props;
let isValid = true;
switch (blockchainType) {
case 'eth-main':
isValid = checkValidErc20Address(value);
this.setState({
isInvalidAddress: !isValid,
});
break;
case 'bsc-main':
isValid = checkValidErc20Address(value);
this.setState({
isInvalidAddress: !isValid,
});
break;
case 'btc-main':
isValid = checkValidBitcoinAddress(value);
this.setState({
isInvalidAddress: !isValid,
});
break;
case 'doge-main':
isValid = checkValidBitcoinAddress(value);
this.setState({
isInvalidAddress: !isValid,
});
break;
default:
break;
}
// @ts-ignore
this.setState({
[key]: value,
});
};
private handleChangeFieldValue = (key: string, value: string) => {
// @ts-ignore
this.setState({
[key]: value,
});
};
private handleChangeFieldFocus = (key: string) => {
// @ts-ignore
this.setState(prev => ({
[key]: !prev[key],
}));
};
private handleClearModalsInputs = () => {
this.setState({
...defaultState,
});
};
private handleSubmitAddAddressCoinModal = () => {
const { currency } = this.props;
const { coinAddress, coinBeneficiaryName, coinDescription } = this.state;
// tslint:disable-next-line:no-any
let payload: any = {
currency: currency || '',
name: coinBeneficiaryName,
data: JSON.stringify({
address: coinAddress,
}),
};
if (coinDescription) {
payload = {
...payload,
description: coinDescription,
};
}
this.props.createAddress(payload);
};
private handleSubmitAddAddressFiatModal = () => {
const { currency } = this.props;
const {
fiatName,
fiatFullName,
fiatAccountNumber,
fiatBankName,
fiatBankSwiftCode,
fiatIntermediaryBankName,
fiatIntermediaryBankSwiftCode,
} = this.state;
let data: BeneficiaryBank = {
full_name: fiatFullName,
account_number: fiatAccountNumber,
bank_name: fiatBankName,
};
if (fiatBankSwiftCode) {
data = {
...data,
bank_swift_code: fiatBankSwiftCode,
};
}
if (fiatIntermediaryBankName) {
data = {
...data,
intermediary_bank_name: fiatIntermediaryBankName,
};
}
if (fiatIntermediaryBankSwiftCode) {
data = {
...data,
intermediary_bank_swift_code: fiatIntermediaryBankSwiftCode,
};
}
const payload = {
currency: currency || '',
name: fiatName,
data: JSON.stringify(data),
};
this.props.createAddress(payload);
};
private translate = (id: string) => this.props.intl.formatMessage({ id });
}
const mapStateToProps = (state: RootState): ReduxProps => ({
beneficiariesAddError: selectBeneficiariesCreateError(state),
beneficiariesAddSuccess: selectBeneficiariesCreateSuccess(state),
isMobileDevice: selectMobileDeviceState(state),
});
const mapDispatchToProps: MapDispatchToProps<DispatchProps, {}> = dispatch => ({
createAddress: payload => dispatch(beneficiariesCreate(payload)),
});
// tslint:disable-next-line:no-any
export const BeneficiariesAddModal = injectIntl(
connect(mapStateToProps, mapDispatchToProps)(BeneficiariesAddModalComponent) as any,
) as any;
|
<template>
<a-modal :title="`Edit User`" v-model="visible" @cancel="closeModal" @ok="updateUser">
<a-form :model="editedUser">
<a-form-item label="Name">
<a-input v-model="editedUser.name" />
</a-form-item>
<a-form-item label="Username">
<a-input v-model="editedUser.username" />
</a-form-item>
<a-form-item label="Email">
<a-input v-model="editedUser.email" />
</a-form-item>
<a-form-item label="Phone">
<a-input v-model="editedUser.phone" />
</a-form-item>
<a-form-item label="Address">
<a-input v-model="editedUser.address" />
</a-form-item>
</a-form>
</a-modal>
</template>
<script>
import { Modal, Form, Input } from "ant-design-vue";
export default {
components: {
"a-modal": Modal,
"a-form": Form,
"a-input": Input,
"a-form-item": Form.Item,
},
props: {
user: {
type: Object,
default: () => ({}),
},
visible: Boolean,
},
data() {
return {
editedUser: { ...this.user },
};
},
methods: {
closeModal() {
this.$emit("close");
},
updateUser() {
this.$emit("update", this.editedUser);
},
},
};
</script>
<style scoped></style>
|
package co.edu.unbosque.Controller;
import co.edu.unbosque.Model.Cliente;
import co.edu.unbosque.Service.EmailService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import jakarta.transaction.Transactional;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@Transactional
@CrossOrigin(origins = { "http://localhost:8090", "http://localhost:8080", "*" })
@RestController
@RequestMapping("/correo")
public class CorreoController {
private final EmailService emailService;
/**
* Constructor de la clase CorreoController.
*
* @param emailService el servicio de correo electrónico
*/
@Autowired
public CorreoController(EmailService emailService) {
this.emailService = emailService;
}
/**
* Método para enviar un correo electrónico.
*
* @param json el contenido del correo electrónico en formato JSON
* @return un mensaje indicando si el correo se envió exitosamente
*/
@PostMapping("/enviar")
@Operation(summary = "Enviar correo", description = "Envía un correo electrónico.")
@ApiResponses(value = {
@ApiResponse(responseCode = "200", description = "Correo enviado exitosamente"),
@ApiResponse(responseCode = "412", description = "Precondición fallida")
})
public String enviarCorreo(@RequestBody String json) {
emailService.enviarCorreo(json);
return "Correo enviado exitosamente";
}
}
|
CREATE-WORKGROUP() CREATE-WORKGROUP()
NAME
create-workgroup -
DESCRIPTION
Creates an workgroup in Amazon Redshift Serverless.
See also: AWS API Documentation
SYNOPSIS
create-workgroup
[--base-capacity <value>]
[--config-parameters <value>]
[--enhanced-vpc-routing | --no-enhanced-vpc-routing]
[--max-capacity <value>]
--namespace-name <value>
[--port <value>]
[--publicly-accessible | --no-publicly-accessible]
[--security-group-ids <value>]
[--subnet-ids <value>]
[--tags <value>]
--workgroup-name <value>
[--cli-input-json <value>]
[--generate-cli-skeleton <value>]
[--debug]
[--endpoint-url <value>]
[--no-verify-ssl]
[--no-paginate]
[--output <value>]
[--query <value>]
[--profile <value>]
[--region <value>]
[--version <value>]
[--color <value>]
[--no-sign-request]
[--ca-bundle <value>]
[--cli-read-timeout <value>]
[--cli-connect-timeout <value>]
OPTIONS
--base-capacity (integer)
The base data warehouse capacity of the workgroup in Redshift Pro-
cessing Units (RPUs).
--config-parameters (list)
An array of parameters to set for advanced control over a database.
The options are auto_mv , datestyle , enable_case_sensitive_identi-
fier , enable_user_activity_logging , query_group , search_path ,
require_ssl , use_fips_ssl , and query monitoring metrics that let
you define performance boundaries. For more information about query
monitoring rules and available metrics, see Query monitoring metrics
for Amazon Redshift Serverless .
(structure)
An array of key-value pairs to set for advanced control over
Amazon Redshift Serverless.
parameterKey -> (string)
The key of the parameter. The options are auto_mv , datestyle
, enable_case_sensitive_identifier , enable_user_activ-
ity_logging , query_group , search_path , require_ssl ,
use_fips_ssl , and query monitoring metrics that let you de-
fine performance boundaries. For more information about query
monitoring rules and available metrics, see Query monitoring
metrics for Amazon Redshift Serverless .
parameterValue -> (string)
The value of the parameter to set.
Shorthand Syntax:
parameterKey=string,parameterValue=string ...
JSON Syntax:
[
{
"parameterKey": "string",
"parameterValue": "string"
}
...
]
--enhanced-vpc-routing | --no-enhanced-vpc-routing (boolean)
The value that specifies whether to turn on enhanced virtual private
cloud (VPC) routing, which forces Amazon Redshift Serverless to
route traffic through your VPC instead of over the internet.
--max-capacity (integer)
The maximum data-warehouse capacity Amazon Redshift Serverless uses
to serve queries. The max capacity is specified in RPUs.
--namespace-name (string)
The name of the namespace to associate with the workgroup.
--port (integer)
The custom port to use when connecting to a workgroup. Valid port
ranges are 5431-5455 and 8191-8215. The default is 5439.
--publicly-accessible | --no-publicly-accessible (boolean)
A value that specifies whether the workgroup can be accessed from a
public network.
--security-group-ids (list)
An array of security group IDs to associate with the workgroup.
(string)
Syntax:
"string" "string" ...
--subnet-ids (list)
An array of VPC subnet IDs to associate with the workgroup.
(string)
Syntax:
"string" "string" ...
--tags (list)
A array of tag instances.
(structure)
A map of key-value pairs.
key -> (string)
The key to use in the tag.
value -> (string)
The value of the tag.
Shorthand Syntax:
key=string,value=string ...
JSON Syntax:
[
{
"key": "string",
"value": "string"
}
...
]
--workgroup-name (string)
The name of the created workgroup.
--cli-input-json (string) Performs service operation based on the JSON
string provided. The JSON string follows the format provided by --gen-
erate-cli-skeleton. If other arguments are provided on the command
line, the CLI values will override the JSON-provided values. It is not
possible to pass arbitrary binary values using a JSON-provided value as
the string will be taken literally.
--generate-cli-skeleton (string) Prints a JSON skeleton to standard
output without sending an API request. If provided with no value or the
value input, prints a sample input JSON that can be used as an argument
for --cli-input-json. If provided with the value output, it validates
the command inputs and returns a sample output JSON for that command.
GLOBAL OPTIONS
--debug (boolean)
Turn on debug logging.
--endpoint-url (string)
Override command's default URL with the given URL.
--no-verify-ssl (boolean)
By default, the AWS CLI uses SSL when communicating with AWS services.
For each SSL connection, the AWS CLI will verify SSL certificates. This
option overrides the default behavior of verifying SSL certificates.
--no-paginate (boolean)
Disable automatic pagination.
--output (string)
The formatting style for command output.
o json
o text
o table
--query (string)
A JMESPath query to use in filtering the response data.
--profile (string)
Use a specific profile from your credential file.
--region (string)
The region to use. Overrides config/env settings.
--version (string)
Display the version of this tool.
--color (string)
Turn on/off color output.
o on
o off
o auto
--no-sign-request (boolean)
Do not sign requests. Credentials will not be loaded if this argument
is provided.
--ca-bundle (string)
The CA certificate bundle to use when verifying SSL certificates. Over-
rides config/env settings.
--cli-read-timeout (int)
The maximum socket read time in seconds. If the value is set to 0, the
socket read will be blocking and not timeout. The default value is 60
seconds.
--cli-connect-timeout (int)
The maximum socket connect time in seconds. If the value is set to 0,
the socket connect will be blocking and not timeout. The default value
is 60 seconds.
OUTPUT
workgroup -> (structure)
The created workgroup object.
baseCapacity -> (integer)
The base data warehouse capacity of the workgroup in Redshift
Processing Units (RPUs).
configParameters -> (list)
An array of parameters to set for advanced control over a data-
base. The options are auto_mv , datestyle , enable_case_sensi-
tive_identifier , enable_user_activity_logging , query_group ,
search_path , require_ssl , use_fips_ssl , and query monitoring
metrics that let you define performance boundaries. For more in-
formation about query monitoring rules and available metrics,
see Query monitoring metrics for Amazon Redshift Serverless .
(structure)
An array of key-value pairs to set for advanced control over
Amazon Redshift Serverless.
parameterKey -> (string)
The key of the parameter. The options are auto_mv , dat-
estyle , enable_case_sensitive_identifier , en-
able_user_activity_logging , query_group , search_path ,
require_ssl , use_fips_ssl , and query monitoring metrics
that let you define performance boundaries. For more in-
formation about query monitoring rules and available met-
rics, see Query monitoring metrics for Amazon Redshift
Serverless .
parameterValue -> (string)
The value of the parameter to set.
creationDate -> (timestamp)
The creation date of the workgroup.
crossAccountVpcs -> (list)
A list of VPCs. Each entry is the unique identifier of a virtual
private cloud with access to Amazon Redshift Serverless. If all
of the VPCs for the grantee are allowed, it shows an asterisk.
(string)
customDomainCertificateArn -> (string)
The custom domain names certificate Amazon resource name (ARN).
customDomainCertificateExpiryTime -> (timestamp)
The expiration time for the certificate.
customDomainName -> (string)
The custom domain name associated with the workgroup.
endpoint -> (structure)
The endpoint that is created from the workgroup.
address -> (string)
The DNS address of the VPC endpoint.
port -> (integer)
The port that Amazon Redshift Serverless listens on.
vpcEndpoints -> (list)
An array of VpcEndpoint objects.
(structure)
The connection endpoint for connecting to Amazon Redshift
Serverless through the proxy.
networkInterfaces -> (list)
One or more network interfaces of the endpoint. Also
known as an interface endpoint.
(structure)
Contains information about a network interface in
an Amazon Redshift Serverless managed VPC end-
point.
availabilityZone -> (string)
The availability Zone.
networkInterfaceId -> (string)
The unique identifier of the network interface.
privateIpAddress -> (string)
The IPv4 address of the network interface
within the subnet.
subnetId -> (string)
The unique identifier of the subnet.
vpcEndpointId -> (string)
The connection endpoint ID for connecting to Amazon
Redshift Serverless.
vpcId -> (string)
The VPC identifier that the endpoint is associated
with.
enhancedVpcRouting -> (boolean)
The value that specifies whether to enable enhanced virtual pri-
vate cloud (VPC) routing, which forces Amazon Redshift Server-
less to route traffic through your VPC.
maxCapacity -> (integer)
The maximum data-warehouse capacity Amazon Redshift Serverless
uses to serve queries. The max capacity is specified in RPUs.
namespaceName -> (string)
The namespace the workgroup is associated with.
patchVersion -> (string)
The patch version of your Amazon Redshift Serverless workgroup.
For more information about patch versions, see Cluster versions
for Amazon Redshift .
port -> (integer)
The custom port to use when connecting to a workgroup. Valid
port ranges are 5431-5455 and 8191-8215. The default is 5439.
publiclyAccessible -> (boolean)
A value that specifies whether the workgroup can be accessible
from a public network.
securityGroupIds -> (list)
An array of security group IDs to associate with the workgroup.
(string)
status -> (string)
The status of the workgroup.
subnetIds -> (list)
An array of subnet IDs the workgroup is associated with.
(string)
workgroupArn -> (string)
The Amazon Resource Name (ARN) that links to the workgroup.
workgroupId -> (string)
The unique identifier of the workgroup.
workgroupName -> (string)
The name of the workgroup.
workgroupVersion -> (string)
The Amazon Redshift Serverless version of your workgroup. For
more information about Amazon Redshift Serverless versions,
see`Cluster versions for Amazon Redshift <-
https://docs.aws.amazon.com/redshift/latest/mgmt/cluster-ver-
sions.html>`__ .
CREATE-WORKGROUP()
|
import { Link, useLoaderData, useNavigation } from "react-router-dom";
import Header from '../components/Header'
import Footer from '../components/Footer'
import { singleSelectorPath, reloadSelectorPath, gridSelectorPath } from '../util'
// TODO: import as much as possible from S.js
export default function ArtistSingle(props){
const nav = useNavigation()
const loader = useLoaderData();
const page = loader?.page || 0;
const item = loader.nftGallery[page];
let singlePath = singleSelectorPath(loader.nftGallery, loader.nftGalleryCursor, "single", page)
let reloadPath = reloadSelectorPath(loader.nftGallery, loader.nftGalleryCursor, "single", page)
let gridPath = gridSelectorPath(loader.nftGallery, loader.nftGalleryCursor, "single", page)
if (nav.state === "loading") {
return(
<div className="nft-loading" />
)
} else if (!item) {
// this should not normally happen unless someone types in a bad URL by hand?
return(
<div className="no-nft" />
)
} else {
return (
<div className="rnd1">
<Header viewMode="1x" page={page} nftGalleryCursor={loader?.nftGalleryCursor} nftGallery={loader?.nftGallery} walletSelector={props.walletSelector} walletClick={props.walletClick}
singlePath={singlePath}
reloadPath={reloadPath}
gridPath={gridPath}
/>
<div className="artist-info container text-center mt-5">
<div className="row">
<div className="col">
ARTIST: {loader.artistId}
</div>
</div>
</div>
<div id="maincontent" className="maincontent text-center mt-5">
<div className="nft-single nft-1-view">
<Link to={'/id/' + item.metadata_id}>
{/* duplicated from routes/Id */}
<div className="nft-1up">
<div className="row align-items-center">
<div className="nft-col col-sm-12">
<img src={item.media_url} className="nft-img img-fluid"/>
</div>
</div>
</div>
</Link>
</div>
</div>
<Footer viewMode="1x" page={page} nftGalleryCursor={loader?.nftGalleryCursor} nftGallery={loader?.nftGallery}
singlePath={singlePath}
reloadPath={reloadPath}
gridPath={gridPath}
/>
</div>
)
}
}
|
/*
* ************************************************************************
*
* Copyright: Robert Bosch Power Tools GmbH, 2018 - 2023
*
* ************************************************************************
*/
package com.bosch.pt.iot.smartsite.project.taskattachment.boundary
import com.bosch.pt.csm.cloud.common.exceptions.AggregateNotFoundException
import com.bosch.pt.iot.smartsite.application.SmartSiteMockKTest
import com.bosch.pt.iot.smartsite.common.i18n.Key.TASK_VALIDATION_ERROR_NOT_FOUND
import com.bosch.pt.iot.smartsite.project.attachment.boundary.AttachmentService
import com.bosch.pt.iot.smartsite.project.attachment.model.AttachmentImageResolution
import com.bosch.pt.iot.smartsite.project.attachment.repository.AttachmentRepository
import com.bosch.pt.iot.smartsite.project.task.domain.TaskId
import com.bosch.pt.iot.smartsite.project.task.shared.repository.TaskRepository
import com.bosch.pt.iot.smartsite.project.taskattachment.repository.TaskAttachmentRepository
import com.bosch.pt.iot.smartsite.test.RandomData.multiPartFile
import com.bosch.pt.iot.smartsite.util.withMessageKey
import io.mockk.Called
import io.mockk.every
import io.mockk.impl.annotations.InjectMockKs
import io.mockk.impl.annotations.MockK
import io.mockk.verify
import java.net.URL
import java.util.TimeZone
import java.util.UUID.randomUUID
import org.assertj.core.api.Assertions.assertThatExceptionOfType
import org.junit.jupiter.api.Test
import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.EnumSource
import org.springframework.util.IdGenerator
import org.springframework.web.multipart.MultipartFile
@SmartSiteMockKTest
class TaskAttachmentServiceTest {
@MockK lateinit var idGenerator: IdGenerator
@MockK lateinit var taskRepository: TaskRepository
@Suppress("Unused", "UnusedPrivateMember")
@MockK
lateinit var attachmentRepository: AttachmentRepository
@MockK lateinit var taskAttachmentRepository: TaskAttachmentRepository
@MockK lateinit var attachmentService: AttachmentService
@InjectMockKs lateinit var cut: TaskAttachmentService
@ParameterizedTest
@EnumSource(value = AttachmentImageResolution::class)
fun `generate blob url calls the attachment boundary service for`(
imageResolution: AttachmentImageResolution
) {
val attachmentIdentifier = randomUUID()
every { attachmentService.generateBlobAccessUrl(attachmentIdentifier, imageResolution) } returns
URL("https://blobstore.azure.com")
cut.generateBlobAccessUrl(attachmentIdentifier, imageResolution)
verify(exactly = 1) {
attachmentService.generateBlobAccessUrl(attachmentIdentifier, imageResolution)
}
}
@Test
fun `save a task attachment for a non found task fails`() {
val taskIdentifier = TaskId()
val multipartFile: MultipartFile = multiPartFile()
every { taskRepository.findOneByIdentifier(taskIdentifier) } returns null
assertThatExceptionOfType(AggregateNotFoundException::class.java)
.isThrownBy {
cut.saveTaskAttachment(
multipartFile.bytes, taskIdentifier, "Test_file", null, TimeZone.getDefault())
}
.withMessageKey(TASK_VALIDATION_ERROR_NOT_FOUND)
verify(exactly = 1) { taskRepository.findOneByIdentifier(taskIdentifier) }
verify { listOf(idGenerator, taskAttachmentRepository) wasNot Called }
}
}
|
Heap Displasia Detection (HDC)
The objective of this ML project is to predict whether or not an RX image of a heap represents a positive HDC case.
Project Organization
------------
├── LICENSE
├── Makefile <- Makefile with commands like `make data` or `make train`
├── README.md <- The top-level README for developers using this project.
├── data
│ ├── external <- Data from third party sources.
│ ├── interim <- Intermediate data that has been transformed.
│ ├── processed <- The final, canonical data sets for modeling.
│ └── raw <- The original, immutable data dump.
│
├── docs <- A default Sphinx project; see sphinx-doc.org for details
│
├── models <- Trained and serialized models, model predictions, or model summaries
│
├── notebooks <- Jupyter notebooks. Naming convention is a number (for ordering),
│ the creator's initials, and a short `-` delimited description, e.g.
│ `1.0-jqp-initial-data-exploration`.
│
├── references <- Data dictionaries, manuals, and all other explanatory materials.
│
├── reports <- Generated analysis as HTML, PDF, LaTeX, etc.
│ └── figures <- Generated graphics and figures to be used in reporting
│
├── requirements.txt <- The requirements file for reproducing the analysis environment, e.g.
│ generated with `pip freeze > requirements.txt`
│
├── setup.py <- makes project pip installable (pip install -e .) so src can be imported
├── src <- Source code for use in this project.
│ ├── __init__.py <- Makes src a Python module
│ │
│ ├── data <- Scripts to download or generate data
│ │ └── make_dataset.py
│ │
│ ├── features <- Scripts to turn raw data into features for modeling
│ │ └── build_features.py
│ │
│ ├── models <- Scripts to train models and then use trained models to make
│ │ │ predictions
│ │ ├── predict_model.py
│ │ └── train_model.py
│ │
│ └── visualization <- Scripts to create exploratory and results oriented visualizations
│ └── visualize.py
│
└── tox.ini <- tox file with settings for running tox; see tox.readthedocs.io
How to setup project
-------------
NOTE: Python version used in development stage: 3.10.0.
Commands can slightly vary depending on your OS
On main directory, type:
- ``py -m venv .env``
- ``.\.env\Scripts\activate``
- ``py -m pip install --upgrade pip``
- ``pip install -r .\requirements.txt``
Once the dependencies are installed:
- To run the notebook, select the environment as kernel and run the cells sequentially
- The makefile contains all commands to run the project processes.
- If you cant run the makefile with your IDE you can copy the command and run it in the console. As an example: ``python src/models/train_model.py data/processed/ models/ src/models/logs/ 224 ConvModel 1 --random_state 49``
|
// Angular
import {Injectable} from '@angular/core';
// RxJS
import {map, mergeMap, tap} from 'rxjs/operators';
// NGRX
import {Actions, Effect, ofType} from '@ngrx/effects';
import {Store} from '@ngrx/store';
// Services
import {WalletService} from "../_services/wallet.service";
// State
import {AppState} from '../../../core/reducers';
import {
WalletActionTypes,
WalletsActionToggleLoading,
WalletsPageToggleLoading,
WalletsSaved,
WalletsUpdatedAmount
} from "../_actions/wallet.actions";
@Injectable()
export class WalletEffects {
showPageLoadingDistpatcher = new WalletsActionToggleLoading({isLoading: true});
hidePageLoadingDistpatcher = new WalletsPageToggleLoading({isLoading: false});
showActionLoadingDistpatcher = new WalletsActionToggleLoading({isLoading: true});
hideActionLoadingDistpatcher = new WalletsActionToggleLoading({isLoading: false});
// @Effect()
// loadSettingsPage$ = this.actions$
// .pipe(
// ofType<AllSettingsRequested>(SettingActionTypes.AllSettingsRequested),
// mergeMap(() => {
// this.store.dispatch(this.showPageLoadingDistpatcher);
// return this.service.all();
// }),
// map(response => {
// return new AllSettingsRequested({
// settings: response.items
// });
// }),
// );
@Effect()
SaveWallets$ = this.actions$
.pipe(
ofType<WalletsSaved>(WalletActionTypes.WalletsSaved),
mergeMap(({payload}) => {
return this.service.save(payload.wallets).pipe(
tap(res => {
this.store.dispatch(new WalletsSaved({wallets: res}));
})
);
}),
map(() => {
return this.hideActionLoadingDistpatcher;
}),
);
@Effect()
UpdateWallets$ = this.actions$
.pipe(
ofType<WalletsUpdatedAmount>(WalletActionTypes.WalletsUpdatedAmount),
mergeMap(({payload}) => {
this.store.dispatch(this.showPageLoadingDistpatcher);
return this.service.updateAmount(payload.wallets);
}),
map(() => {
return this.hideActionLoadingDistpatcher;
}),
);
constructor(private actions$: Actions, private service: WalletService, private store: Store<AppState>) {
}
}
|
---
post_id: 5679
title: 'Lire les corrections de lunettes'
date: '2022-02-27T22:10:40+01:00'
last_modified_at: '2022-02-27T23:03:52+01:00'
author: 'Rémi Peyronnet'
layout: post
guid: '/?p=5679'
slug: lire-les-corrections-de-lunettes
permalink: /2022/02/lire-les-corrections-de-lunettes/
image: /files/ophtalmologue-lunettes-correctrice-stockpack-adobe-stock.jpg
categories:
- Divers
tags:
- AppScript
- Astigmatie
- Calculette
- GitHub
- Google
- Myopie
- Ophtalmologie
- Spreadsheets
- Yeux
lang: fr
csp-frame-src: 'rpeyron.github.io www.lprp.fr'
---
Il existe plusieurs notations pour les verres de lunettes, et bien sûr les ophtalmologistes et les opticiens n’utilisent pas les mêmes. Pour s’y retrouver, il faut apprendre à lire et convertir les différentes écritures.
Je ne suis ni opticien, ni ophtalmologue, ni professionnel de la santé, les éléments ci-dessous sont issus de rapides recherches sur Internet et ne sauraient constituer une quelconque expertise médicale.
{: .notice-note title="Disclaimer" style="--notice-color: #91110c;"}
# Normaliser une correction
Voici un petit outil pour obtenir automatiquement une écriture normalisée qui correspond à celle utilisée par les opticiens.
<iframe allow="fullscreen" height="600" loading="lazy" src="https://rpeyron.github.io/verres/" width="600"></iframe>
{: .div-center}
Le code source de cette calculette est disponible sur [github](https://www.github.com/rpeyron/verres).
# Explications
La notation des verres est composée de plusieurs parties suivant votre correction :
- Pour tous :
- La sphère : il s’agit de la courbure principale du verre ; elle est exprimée en dioptries ; si elle est négative vous êtes myope (verres concaves) , si elle est positive vous êtes hypermétrope (verres convexes)
- Pour les astigmates seulement :
- Le cylindre : il s’agit de la correction du cylindre de correction de l’astigmatie, exprimée en dioptries
- L’angle : l’angle de l’axe du cylindre dans le plan du verre, exprimé en degrés
- Pour les myopes (verres progressifs) :
- L’addition : il s’agit de la correction ajoutée à la sphère pour la zone de vision de près, exprimée en dioptries.
Pour une correction simple, seule la sphère est exprimée. Là encore, suivant les ophtalmologues, tous ne notent pas les dioptries de la même façon. Certains se passeront de la virgule pour noter la valeur multipliée par 100. Ainsi, 1.5 dioptrie sera noté 150.
C’est pour l’astigmatie que la variété est plus grande, ainsi pour une même correction, on peut trouver :
- -3.00 (+1.00 à 110°) dite notation à cylindre positif, fréquent chez les opticiens
- -2.00 (-1.00 à 20°) dite notation à cylindre négatif, fréquent chez les ophtalmologues
- (20° -1.00) -2.00 toujours une notation à cylindre négatif, mais avec le cylindre qui précède la sphère
- 20 -100 -200 également une notation à cylindre négatif avec le cylindre qui précède, mais sans les parenthèses et avec les dioptries multipliées par 100
Pour passer d’une notation à cylindre positif vers une notation à cylindre négatif, il faut faire la transposition suivante : ajouter le cylindre à la sphère, inverser le cylindre, ajouter 90° à l’angle (et on retranchera 180° si cela dépasse 180°) ; l’article en référence (1) explique plus en détail ce mécanisme de transposition. L’article (2) permet de comprendre pourquoi ces transpositions donnent la même correction optique.
Enfin avec l’astigmatie, il n’est pas évident de savoir si la myopie évolue globalement ou non suivant la répartition. Pour cela il est utile de regarder la valeur moyenne de la sphère en ajoutant la sphère et la moitié du cylindre.
Références :
- (1) <http://www.thomassinclairlabs.com/vue/transposition.html> ; pour savoir comment transposer un cylindre négatif en cylindre positif
- (2) <https://www.gatinel.com/recherche-formation/astigmatisme/astigmatisme-definitions-et-formulations/> et [https://www.gatinel.co m/recherche-formation/astigmatisme/astigmatisme-representation-trigonometrique/](https://www.gatinel.com/recherche-formation/astigmatisme/astigmatisme-representation-trigonometrique/) ; pour comprendre ce que ça veut dire et en quoi les écritures donnent un résultat identique (et plein d’autres articles intéressants)
- (3) <https://www.essiloracademy.eu/en/publications/ophthalmic-optics-files> ; les manuels de formation Essilor des opticiens en réfraction, accessibles librement, pour devenir un pro des lunettes
# Fonctions dans Google Spreadsheets
Cette calculatrice est certes pratique, mais pas si vous souhaitez l’utiliser un grand nombre de fois. J’utilise d’habitude LibreOffice comme tableur, mais il ne semble pas qu’il permette d’intégrer facilement une nouvelle fonction en javascript. Or comme j’ai écrit le code ci-dessus en javascript, je n’ai pas envie de le réécrire en Basic de LibreOffice. Fort heureusement, Google Spreadsheet permet très simplement d’ajouter des fonctions en javascript.
Dans un nouveau document Google Spreadsheet, cliquez sur Extensions / AppScripts :
{: .img-center}
Puis dans l’éditeur de script qui s’est ouvert, copier le fichier disponible à cette adresse : https://github.com/rpeyron/verres/blob/main/src/lens.js
{: .img-center}
Et c’est tout ! Les fonctions sont maintenant disponibles dans votre tableur (voir exemple dans la 1ère image)
|
/*
Copyright (C) 2022-2023 ApeCloud Co., Ltd
This file is part of KubeBlocks project
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package model
import (
"slices"
. "github.com/onsi/ginkgo/v2"
. "github.com/onsi/gomega"
appsv1 "k8s.io/api/apps/v1"
corev1 "k8s.io/api/core/v1"
"sigs.k8s.io/controller-runtime/pkg/client"
"github.com/apecloud/kubeblocks/pkg/controller/builder"
"github.com/apecloud/kubeblocks/pkg/controller/graph"
)
var _ = Describe("graph client test.", func() {
const (
namespace = "foo"
name = "bar"
)
Context("GraphWriter", func() {
It("should work well", func() {
graphCli := NewGraphClient(nil)
dag := graph.NewDAG()
dagExpected := graph.NewDAG()
root := builder.NewStatefulSetBuilder(namespace, name).GetObject()
By("init root vertex")
graphCli.Root(dag, root.DeepCopy(), root, ActionStatusPtr())
dagExpected.AddVertex(&ObjectVertex{Obj: root, OriObj: root, Action: ActionStatusPtr()})
Expect(dag.Equals(dagExpected, DefaultLess)).Should(BeTrue())
By("create object")
obj0 := builder.NewPodBuilder(namespace, name+"0").GetObject()
obj1 := builder.NewPodBuilder(namespace, name+"1").GetObject()
obj2 := builder.NewPodBuilder(namespace, name+"2").GetObject()
graphCli.Create(dag, obj0)
graphCli.Create(dag, obj1)
graphCli.Create(dag, obj2)
graphCli.DependOn(dag, obj1, obj2)
v0 := &ObjectVertex{Obj: obj0, Action: ActionCreatePtr()}
v1 := &ObjectVertex{Obj: obj1, Action: ActionCreatePtr()}
v2 := &ObjectVertex{Obj: obj2, Action: ActionCreatePtr()}
dagExpected.AddConnectRoot(v0)
dagExpected.AddConnectRoot(v1)
dagExpected.AddConnectRoot(v2)
dagExpected.Connect(v1, v2)
Expect(dag.Equals(dagExpected, DefaultLess)).Should(BeTrue())
By("update&delete&status object")
graphCli.Status(dag, obj0, obj0.DeepCopy())
graphCli.Update(dag, obj1, obj1.DeepCopy())
graphCli.Delete(dag, obj2)
v0.Action = ActionStatusPtr()
v1.Action = ActionUpdatePtr()
v2.Action = ActionDeletePtr()
Expect(dag.Equals(dagExpected, DefaultLess)).Should(BeTrue())
By("replace an existing object")
newObj1 := builder.NewPodBuilder(namespace, name+"1").GetObject()
graphCli.Update(dag, nil, newObj1, ReplaceIfExistingOption)
Expect(dag.Equals(dagExpected, DefaultLess)).Should(BeTrue())
podList := graphCli.FindAll(dag, &corev1.Pod{})
Expect(podList).Should(HaveLen(3))
Expect(slices.IndexFunc(podList, func(obj client.Object) bool {
return obj == newObj1
})).Should(BeNumerically(">=", 0))
By("noop")
graphCli.Noop(dag, obj0)
Expect(dag.Equals(dagExpected, DefaultLess)).Should(BeFalse())
v0.Action = ActionNoopPtr()
Expect(dag.Equals(dagExpected, DefaultLess)).Should(BeTrue())
By("patch")
graphCli.Patch(dag, obj0.DeepCopy(), obj0)
Expect(dag.Equals(dagExpected, DefaultLess)).Should(BeFalse())
v0.Action = ActionPatchPtr()
Expect(dag.Equals(dagExpected, DefaultLess)).Should(BeTrue())
By("find objects exist")
podList = graphCli.FindAll(dag, &corev1.Pod{})
Expect(podList).Should(HaveLen(3))
for _, object := range []client.Object{obj0, newObj1, obj2} {
Expect(slices.IndexFunc(podList, func(obj client.Object) bool {
return obj == object
})).Should(BeNumerically(">=", 0))
}
Expect(slices.IndexFunc(podList, func(obj client.Object) bool {
return obj == obj1
})).Should(BeNumerically("<", 0))
By("find objects not existing")
Expect(graphCli.FindAll(dag, &appsv1.Deployment{})).Should(HaveLen(0))
By("find objects different with the given type")
newPodList := graphCli.FindAll(dag, &appsv1.StatefulSet{}, HaveDifferentTypeWithOption)
Expect(newPodList).Should(HaveLen(3))
// should have same result as podList
for _, object := range podList {
Expect(slices.IndexFunc(newPodList, func(obj client.Object) bool {
return obj == object
})).Should(BeNumerically(">=", 0))
}
By("find nil should return empty list")
Expect(graphCli.FindAll(dag, nil)).Should(HaveLen(0))
By("find all objects")
objectList := graphCli.FindAll(dag, nil, HaveDifferentTypeWithOption)
Expect(objectList).Should(HaveLen(4))
allObjects := podList
allObjects = append(allObjects, root)
for _, object := range allObjects {
Expect(slices.IndexFunc(objectList, func(obj client.Object) bool {
return obj == object
})).Should(BeNumerically(">=", 0))
}
})
It("post init root vertex", func() {
graphCli := NewGraphClient(nil)
dag := graph.NewDAG()
dagExpected := graph.NewDAG()
By("create none root vertex first")
obj := builder.NewPodBuilder(namespace, name+"0").GetObject()
graphCli.Root(dag, obj, obj, ActionCreatePtr())
v := &ObjectVertex{OriObj: obj, Obj: obj, Action: ActionCreatePtr()}
dagExpected.AddVertex(v)
Expect(dag.Equals(dagExpected, DefaultLess)).Should(BeTrue())
By("post create root vertex")
root := builder.NewStatefulSetBuilder(namespace, name).GetObject()
graphCli.Root(dag, root.DeepCopy(), root, ActionStatusPtr())
rootVertex := &ObjectVertex{Obj: root, OriObj: root, Action: ActionStatusPtr()}
dagExpected.AddVertex(rootVertex)
dagExpected.Connect(rootVertex, v)
Expect(dag.Equals(dagExpected, DefaultLess)).Should(BeTrue())
})
It("IsAction should work", func() {
graphCli := NewGraphClient(nil)
dag := graph.NewDAG()
By("create root vertex")
obj := builder.NewPodBuilder(namespace, name+"0").GetObject()
graphCli.Root(dag, obj, obj, ActionStatusPtr())
Expect(graphCli.IsAction(dag, obj, ActionStatusPtr())).Should(BeTrue())
Expect(graphCli.IsAction(dag, obj, ActionCreatePtr())).Should(BeFalse())
By("vertex not existing")
Expect(graphCli.IsAction(dag, &corev1.Pod{}, ActionStatusPtr())).Should(BeFalse())
Expect(graphCli.IsAction(dag, &corev1.Pod{}, ActionCreatePtr())).Should(BeFalse())
By("nil action")
graphCli.Root(dag, obj, obj, nil)
Expect(graphCli.IsAction(dag, obj, nil)).Should(BeTrue())
Expect(graphCli.IsAction(dag, obj, ActionCreatePtr())).Should(BeFalse())
})
})
})
|
<script>
import {defineComponent, reactive} from 'vue'
import {CodeJudgeStatus, JudgeStatus} from "../../constants";
import CodeEditor from "@/components/codeEditor.vue";
export default defineComponent({
name: "problemSubmission",
components: {CodeEditor},
computed: {
JudgeStatus() {
return JudgeStatus
}
},
data() {
return {
submissionList: [],
tableLoading: true,
page_size: 20,
page_num: 1,
total: 1,
pages: 1,
submissionInfoDialog: false
}
},
setup() {
const submissionInfo = reactive({})
let code_source = reactive('')
let language = reactive('')
return {
submissionInfo, code_source, language
}
},
created() {
this.getSubmissionList()
},
methods: {
CodeJudgeStatus() {
return CodeJudgeStatus
},
getSubmissionList() {
this.$axios.get('/submission/problem', {
params: {
pid: this.$route.params.pid,
size: this.page_size, page: this.page_num
}
}).then(res => {
this.submissionList = res.data.items
this.page_size = res.data.size
this.page_num = res.data.page
this.total = res.data.total
this.pages = res.data.pages
}).finally(() => {
this.tableLoading = false
})
},
getSubmissionDetail(r, c, e) {
this.$axios.get('/submission/status', {params: {submission_id: r.id}}).then(res => {
this.submissionInfo = JSON.parse(JSON.stringify(res.data))
this.code_source = this.submissionInfo.code_source
this.language = this.submissionInfo.language
this.submissionInfoDialog = true
this.$forceUpdate()
})
}
}
})
</script>
<template>
<el-table style="cursor: pointer;border: 1px solid var(--el-border-color);" stripe :data="submissionList"
v-loading="tableLoading" @row-click="getSubmissionDetail">
<el-table-column label="ID" prop="id"></el-table-column>
<el-table-column label="language" prop="language"></el-table-column>
<el-table-column label="create_time" prop="create_time">
<template #default="slot">
{{ $formatDate(slot.row.create_time) }}
</template>
</el-table-column>
<el-table-column label="result" prop="result">
<template #default="slot">
<el-text :type="CodeJudgeStatus()[parseInt(slot.row.result) + 2].type"
style="font-weight: bold;"
>
{{ CodeJudgeStatus()[parseInt(slot.row.result) + 2].name }}
</el-text>
</template>
</el-table-column>
</el-table>
<el-pagination
style="margin-top: 20px;"
v-model:current-page="page_num"
:hide-on-single-page="false"
:page-size="page_size"
layout="prev, pager, next"
:page-count="pages"
@current-change="getSubmissionList"
></el-pagination>
<el-dialog title="Submission" v-model="submissionInfoDialog">
<el-descriptions column="1" border
v-if="submissionInfoDialog"
>
<el-descriptions-item label="id">
{{ submissionInfo.id }}
</el-descriptions-item>
<el-descriptions-item label="language">{{ submissionInfo.language }}</el-descriptions-item>
<el-descriptions-item label="create_time">{{ $formatDate(submissionInfo.create_time) }}
</el-descriptions-item>
<el-descriptions-item label="result">
<el-text :type="CodeJudgeStatus()[parseInt(submissionInfo.result) + 2].type">
{{ CodeJudgeStatus()[parseInt(submissionInfo.result) + 2].name }}
</el-text>
</el-descriptions-item>
</el-descriptions>
<div class="code">
<code-editor
ref="codeEditor"
:model-value="code_source"
:init_language="language"
is_disabled
v-if="submissionInfoDialog"
></code-editor>
</div>
</el-dialog>
</template>
<style scoped lang="scss">
.code {
//padding: 10px;
//border: 1px solid var(--el-border-color);
border-top: none;
}
</style>
|
// src/services/TrackService.js
const API_URL = "http://localhost:8001/tracks";
const getAllTracks = async () => {
try {
const response = await fetch(API_URL);
if (!response.ok) {
throw new Error(`Error fetching tracks: ${response.statusText}`);
}
return response.json();
} catch (error) {
console.error("Error in getAllTracks:", error);
throw error;
}
};
const getTrackById = async (id) => {
try {
const response = await fetch(`${API_URL}/${id}`);
if (!response.ok) {
throw new Error(`Error fetching track by ID: ${response.statusText}`);
}
return response.json();
} catch (error) {
console.error("Error in getTrackById:", error);
throw error;
}
};
const createTrack = async (track) => {
try {
const response = await fetch(API_URL, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(track),
});
if (!response.ok) {
throw new Error(`Error creating track: ${response.statusText}`);
}
return response.json();
} catch (error) {
console.error("Error in createTrack:", error);
throw error;
}
};
const updateTrack = async (id, track) => {
try {
const response = await fetch(`${API_URL}/${id}`, {
method: "PUT",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(track),
});
console.log("Update response:", response);
if (!response.ok) {
throw new Error(`Error updating track: ${response.statusText}`);
}
return response.json();
} catch (error) {
console.error("Error in updateTrack:", error);
throw error;
}
};
const deleteTrack = async (id) => {
try {
const response = await fetch(`${API_URL}/${id}`, {
method: "DELETE",
});
if (!response.ok) {
throw new Error(`Error deleting track: ${response.statusText}`);
}
} catch (error) {
console.error("Error in deleteTrack:", error);
throw error;
}
};
export { getAllTracks, getTrackById, createTrack, updateTrack, deleteTrack };
|
# IMDB Top 50 Actors Web-App with Streamlit
This is a Streamlit web application that displays an IMDb dataset in a Master-Detail-View format,
with actor names in the master column and all of the movies of the actors in the details view.
## Requirements
- Python 3.x
- Streamlit (1.10 or higher)
- pandas
- plotly
## Setup
1. Clone or download the repository
2. In your Anaconda or code editor environment install requried packages running:
pip install streamlit
pip install pandas
pip install plotly
3. To test if setup works:
streamlit hello
4. Run streamlit app localy in Browser (served at http://localhost:8501/):
streamlit run Homepage.py
## Usage
1. Select an actor from the sidebar
2. The app will display a dataframe of all the movies of the selected actor an other details about the actor
3. Second Page can be selected in the left upper corner to see charts
The relativ paths are in Windows format with "\\". When you run this on
Linux or Mac-OS replace it with "/"
|
/*
* Copyright (C) 2015 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tencent.matrix.resource.analyzer.model;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
/**
* Created by tangyinsheng on 2017/6/2.
*
* This class is ported from LeakCanary.
*/
public final class ActivityLeakResult extends AnalyzeResult {
public static ActivityLeakResult noLeak(long analysisDurationMs) {
return new ActivityLeakResult(false, false, null, null, null, analysisDurationMs);
}
public static ActivityLeakResult leakDetected(boolean excludedLeak, String className,
ReferenceChain referenceChain, long analysisDurationMs) {
return new ActivityLeakResult(true, excludedLeak, className, referenceChain, null, analysisDurationMs);
}
public static ActivityLeakResult failure(Throwable failure, long analysisDurationMs) {
return new ActivityLeakResult(false, false, null, null, failure, analysisDurationMs);
}
/**
* True if a leak was found in the heap dump.
*/
public final boolean mLeakFound;
/**
* True if {@link #mLeakFound} is true and the only path to the leaking reference is
* through excluded references. Usually, that means you can safely ignore this report.
*/
public final boolean mExcludedLeak;
/**
* Class name of the object that leaked if {@link #mLeakFound} is true, null otherwise.
* The class name format is the same as what would be returned by {@link Class#getName()}.
*/
public final String mClassName;
/**
* Shortest path to GC roots for the leaking object if {@link #mLeakFound} is true, null
* otherwise. This can be used as a unique signature for the leak.
*/
public final ReferenceChain referenceChain;
/**
* Null unless the analysis failed.
*/
public final Throwable mFailure;
/**
* Total time spent analyzing the heap.
*/
public final long mAnalysisDurationMs;
private ActivityLeakResult(boolean mLeakFound, boolean mExcludedLeak, String mClassName,
ReferenceChain referenceChain, Throwable mFailure, long mAnalysisDurationMs) {
this.mLeakFound = mLeakFound;
this.mExcludedLeak = mExcludedLeak;
this.mClassName = mClassName;
this.referenceChain = referenceChain;
this.mFailure = mFailure;
this.mAnalysisDurationMs = mAnalysisDurationMs;
}
//构件成 JSON格式
@Override
public void encodeToJSON(JSONObject jsonObject) throws JSONException {
final JSONArray leakTraceJSONArray = new JSONArray();
if (referenceChain != null) {
for (ReferenceTraceElement element : referenceChain.elements) {
leakTraceJSONArray.put(element.toString());
}
}
jsonObject.put("leakFound", mLeakFound)
.put("excludedLeak", mExcludedLeak)
.put("className", mClassName)
.put("failure", String.valueOf(mFailure))
.put("analysisDurationMs", mAnalysisDurationMs)
.put("referenceChain", leakTraceJSONArray);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("Leak Reference:");
if (referenceChain != null) {
for (ReferenceTraceElement element : referenceChain.elements) {
sb.append(element.toString()).append(";");
}
}
return sb.toString();
}
}
|
---
title: 傳回店面體驗
description: 瞭解您的客戶如何從店面的帳戶管理產品退貨。
exl-id: c276ca2c-3d8b-4019-a9aa-e7631080f331
feature: Returns, Storefront
source-git-commit: 8b5af316ab1d2e632ed5fc2066974326830ab3f7
workflow-type: tm+mt
source-wordcount: '208'
ht-degree: 1%
---
# 傳回店面體驗
{{ee-feature}}
客戶可使用下列其中一種方式,向店面請求RMA:
- [訂單與退貨Widget](../content-design/widget-orders-returns.md) 在側欄中
- _訂購與退貨_ 頁尾中的連結
最佳做法是,確保在客戶服務政策中包含RMA需求和程式的說明。
>[!NOTE]
>
>如果您想要收集與退貨相關的其他資訊,可以新增自己的自訂 [傳回屬性](attributes-returns.md).
所有客戶RMA資訊都會顯示在 **[!UICONTROL My Returns]** 客戶帳戶控制面板中的頁面。
{width="700" zoomable="yes"}
## 請求RMA
客戶在店面完成下列步驟以提交RMA:
1. 在頁尾中,按一下 **[!UICONTROL Orders and Returns]**.
1. 輸入訂單資訊:
- 訂單ID
- 帳單姓氏
- 電子郵件
1. 點擊數 **[!UICONTROL Continue]**.
{width="700" zoomable="yes"}
1. 在訂購日期下方,按一下 **[!UICONTROL Return]**.
{width="700" zoomable="yes"}
1. 選擇要傳回的專案,並輸入 **[!UICONTROL Quantity to Return]**.
1. 集合 **[!UICONTROL Resolution]** 變更為下列其中一項:
- Exchange
- [退款](../customers/refunds-customer-account.md)
- [商店點數](../customers/store-credit-using.md)
1. 集合 **[!UICONTROL Item Condition]** 變更為下列其中一項:
- `Unopened`
- `Opened`
- `Damaged`
1. 集合 **[!UICONTROL Reason to Return]** 變更為下列其中一項:
- `Wrong Color`
- `Wrong Size`
- `Out of Service`
- `Other`
{width="700" zoomable="yes"}
1. 如有需要,設定 **[!UICONTROL Contact Email Address]** 和 **[!UICONTROL Comments]**.
>[!NOTE]
>
>如果訂單包含數個料號,而客戶想要退回其他料號,可以按一下 **[!UICONTROL Add Item To Return]**,選取專案,然後設定所有提及的選項。
1. 點擊數 **[!UICONTROL Submit]**.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.