repo_name
stringlengths 7
111
| __id__
int64 16.6k
19,705B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
151
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 2
values | repo_url
stringlengths 26
130
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 14.6k
687M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 12
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
10.2M
⌀ | gha_stargazers_count
int32 0
178k
⌀ | gha_forks_count
int32 0
88.9k
⌀ | gha_open_issues_count
int32 0
2.72k
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 1
class | gha_disabled
bool 1
class | content
stringlengths 10
2.95M
| src_encoding
stringclasses 5
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 10
2.95M
| extension
stringclasses 19
values | num_repo_files
int64 1
202k
| filename
stringlengths 4
112
| num_lang_files
int64 1
202k
| alphanum_fraction
float64 0.26
0.89
| alpha_fraction
float64 0.2
0.89
| hex_fraction
float64 0
0.09
| num_lines
int32 1
93.6k
| avg_line_length
float64 4.57
103
| max_line_length
int64 7
931
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Kangaru94/search_for_element_in_list_test | 1,340,029,818,582 | 25d2ddde54c14ca85fccc6ea222f16b6a194ae38 | bbb28047042fd415361e2202495d2bd85b6fdb36 | /search_for_element_in_list.py | da65bd45b2757d311f1626bcd7d7e56fb46a7e0c | []
| no_license | https://github.com/Kangaru94/search_for_element_in_list_test | f296911daab5ad95dbb091ce3496031ff7f12441 | 1c333ad2acb39739820e4b0e4e0935aec1e51df0 | refs/heads/master | 2020-06-19T14:32:24.619860 | 2019-07-13T16:45:11 | 2019-07-13T16:45:11 | 196,744,580 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #testing "in" START
def returns_True_or_False(list):
if 1 in list:
print("True")
else:
print("False")
list1 = [1, "a", "b"]
list2 = ["a", "b", "c"]
list3 = list1 + list2
returns_True_or_False(list1)
returns_True_or_False(list2)
returns_True_or_False(list3)
#teting "in" END | UTF-8 | Python | false | false | 282 | py | 1 | search_for_element_in_list.py | 1 | 0.64539 | 0.609929 | 0 | 16 | 16.6875 | 32 |
ArchyCillp/VinceJudgeChatRoom | 4,621,384,812,484 | 2cd7841717b26b3d2323953b8f4f96a88eb0d32f | be5cd9756a6426a4ca7e240d9c4b2a16b7c0ff28 | /src/client.py | 27ca2a5a6da880fe1cde03e400fa8fc53aa27e3a | [
"MIT"
]
| permissive | https://github.com/ArchyCillp/VinceJudgeChatRoom | 718d2753cd722777f8b7ca26f2960706de3a0447 | 1b4ad1e3ca8ffe97faf5e15cf7e2e1c59e827a99 | refs/heads/master | 2021-06-23T13:46:45.725408 | 2019-08-08T03:36:30 | 2019-08-08T03:36:30 | 148,513,659 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import socket
import threading
import select
def connect_to_server(addr):
server_socket = socket.socket()
server_socket.connect(addr)
return server_socket
def listen_to_server(server_socket):
listening = [server_socket]
while True:
active_socket_list, w, e = select.select(listening, [], [])
if server_socket in active_socket_list:
try:
print(server_socket.recv(1024).decode('utf-8'))
except socket.error as e:
print(e, ' Failed to receive message from server.')
def send_to_server(server_socket):
while True:
try:
print('>>>', end='')
msg = input()
if msg == '$exit':
server_socket.sendall(msg.encode())
server_socket.close()
exit()
except Exception as e:
print(e)
try:
server_socket.sendall(msg.encode())
except Exception as e:
print(e)
if __name__ == '__main__':
server_ip = input('Server IP:')
server_port = int(input('Server Port:'))
print('\n')
server_socket = connect_to_server((server_ip, server_port))
listen_thr = threading.Thread(target=listen_to_server, args=(server_socket,))
send_thr = threading.Thread(target=send_to_server, args=(server_socket,))
listen_thr.start()
send_thr.start()
| UTF-8 | Python | false | false | 1,387 | py | 4 | client.py | 2 | 0.573901 | 0.570296 | 0 | 48 | 27.895833 | 81 |
brython-dev/brython | 7,885,559,956,763 | c63ccc364e5cef5f26cc35ea205b2dd6dc79566d | 8d402df39c18eba7e1c86c762f205c944357c5df | /www/speed/benchmarks/set_dict_item.py | 39b19d57ebf5a4fac2a97c7077b4d2574d355555 | [
"BSD-3-Clause"
]
| permissive | https://github.com/brython-dev/brython | 87cc023e25550dec9ce459ba68774189f33712b6 | b33958bff0e8c7a280babc30232dc389a2500a7a | refs/heads/master | 2023-09-04T04:49:29.156209 | 2023-09-01T06:36:08 | 2023-09-01T06:36:08 | 24,046,239 | 6,569 | 625 | BSD-3-Clause | false | 2023-07-05T06:13:32 | 2014-09-15T06:58:21 | 2023-07-05T01:48:46 | 2023-07-05T06:13:31 | 247,074 | 6,052 | 499 | 35 | Python | false | false | a = {0: 0}
for i in range(1000000):
a[0] = i
JS_CODE = '''
var a = {0: 0}
for (var i = 0; i < 1000000; i++) {
a[0] = i
}
'''
| UTF-8 | Python | false | false | 135 | py | 772 | set_dict_item.py | 454 | 0.414815 | 0.259259 | 0 | 11 | 11.272727 | 35 |
dtbhatt/Python | 16,810,502,021,873 | 4e3a42f578744f5b1deb02f7f7c95cc41a575f03 | 7d6406b6ba112b69fd1d0b5387793703e4220137 | /animals.py | 9fad44fdf391c24720e87f11f6158f412613ee93 | []
| no_license | https://github.com/dtbhatt/Python | 6113fd007d02bb03d10fe996d570d12e28063ea3 | c7cee8a5dd8bda9987e8918f2e4581c8585e2b50 | refs/heads/master | 2021-01-25T07:55:22.909000 | 2017-06-08T19:54:05 | 2017-06-08T19:54:05 | 93,695,407 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | class Animal(object):
def __init__(self, name, health):
self.name = name
self.health = health
def walk(self):
self.health -= 1
return self
def run(self):
self.health -= 5
return self
def display(self):
print self.health
return self
dog1 = Animal("Dog", 150)
dog1.walk().walk().walk().run().run().display()
class Dog(Animal):
def __init__(self, name):
super(Dog, self).__init__(name, 150)
def pet(self):
self.health += 10
return self
class Dragon(Animal):
def __init__(self,name):
super(Dragon, self).__init__(name, 170)
def fly(self):
self.health -= 10
return self
dragon = Dragon("Cali")
dragon.fly().display()
# dog = Dog("Cali")
# dog.pet().display() | UTF-8 | Python | false | false | 824 | py | 15 | animals.py | 15 | 0.536408 | 0.515777 | 0 | 42 | 18.642857 | 47 |
Quik-e/PSK-Simulation | 12,000,138,655,385 | c73531a230f051d665e3de34bd057a5c36d100cc | 6ed345123cc8dd4766b2386e7ceef99feef70cd6 | /8PSK_ModDemod_FC/8PSK_ModDemod_FC/E8PSK_ModDemod_FC.py | c24f460c55c87172ae2e7a28827600232eb35d5f | [
"MIT"
]
| permissive | https://github.com/Quik-e/PSK-Simulation | 23052921811d194d6cbe6adde7e9885a52225833 | 4dd96e9aba41d7ace96b2b2f5a828741600eb27e | refs/heads/master | 2023-03-02T22:03:01.178738 | 2022-03-12T01:54:41 | 2022-03-12T01:54:41 | 259,562,893 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
##################################################
# GNU Radio Python Flow Graph
# Title: 8PSK Modulation and Demodulation with Frequency Correction Simulation
# Author: Enrique Quik-e Cametti
# Generated: Fri Mar 13 15:27:15 2020
##################################################
from distutils.version import StrictVersion
if __name__ == '__main__':
import ctypes
import sys
if sys.platform.startswith('linux'):
try:
x11 = ctypes.cdll.LoadLibrary('libX11.so')
x11.XInitThreads()
except:
print "Warning: failed to XInitThreads()"
from PyQt5 import Qt
from PyQt5 import Qt, QtCore
from gnuradio import analog
from gnuradio import blocks
from gnuradio import channels
from gnuradio import digital
from gnuradio import eng_notation
from gnuradio import filter
from gnuradio import gr
from gnuradio import qtgui
from gnuradio.eng_option import eng_option
from gnuradio.filter import firdes
from gnuradio.qtgui import Range, RangeWidget
from optparse import OptionParser
import get_first_byte
import numpy as np
import repeat_first_byte
import sip
import sync_decoder
import sync_encoder
import sys
from gnuradio import qtgui
class E8PSK_ModDemod_FC(gr.top_block, Qt.QWidget):
def __init__(self):
gr.top_block.__init__(self, "8PSK Modulation and Demodulation with Frequency Correction Simulation")
Qt.QWidget.__init__(self)
self.setWindowTitle("8PSK Modulation and Demodulation with Frequency Correction Simulation")
qtgui.util.check_set_qss()
try:
self.setWindowIcon(Qt.QIcon.fromTheme('gnuradio-grc'))
except:
pass
self.top_scroll_layout = Qt.QVBoxLayout()
self.setLayout(self.top_scroll_layout)
self.top_scroll = Qt.QScrollArea()
self.top_scroll.setFrameStyle(Qt.QFrame.NoFrame)
self.top_scroll_layout.addWidget(self.top_scroll)
self.top_scroll.setWidgetResizable(True)
self.top_widget = Qt.QWidget()
self.top_scroll.setWidget(self.top_widget)
self.top_layout = Qt.QVBoxLayout(self.top_widget)
self.top_grid_layout = Qt.QGridLayout()
self.top_layout.addLayout(self.top_grid_layout)
self.settings = Qt.QSettings("GNU Radio", "E8PSK_ModDemod_FC")
if StrictVersion(Qt.qVersion()) < StrictVersion("5.0.0"):
self.restoreGeometry(self.settings.value("geometry").toByteArray())
else:
self.restoreGeometry(self.settings.value("geometry", type=QtCore.QByteArray))
##################################################
# Variables
##################################################
self.RangeRow = RangeRow = 0
self.ConsRow = ConsRow = RangeRow+1
self.nfilts = nfilts = 100
self.SampSymb = SampSymb = 8
self.FreqRow = FreqRow = ConsRow+1
self.samp_rate = samp_rate = 1e6
self.rrc_taps_0 = rrc_taps_0 = firdes.root_raised_cosine(nfilts, nfilts, 1.0/float(SampSymb), 0.35, 45*nfilts)
self.Values = Values = 2
self.TimeRow = TimeRow = FreqRow+1
self.SPS = SPS = 2
self.QPSK_CO = QPSK_CO = digital.constellation_qpsk().base()
self.Noise = Noise = 0
self.FreqOff = FreqOff = 0.005
self.FDP = FDP = 0.005
##################################################
# Blocks
##################################################
self._Noise_range = Range(0, 1, 0.01, 0, 200)
self._Noise_win = RangeWidget(self._Noise_range, self.set_Noise, 'Channel Noise', "counter_slider", float)
self.top_grid_layout.addWidget(self._Noise_win, 0, 1, 1, 2)
[self.top_grid_layout.setRowStretch(r,1) for r in range(0,1)]
[self.top_grid_layout.setColumnStretch(c,1) for c in range(1,3)]
self._FreqOff_range = Range(-1, 1, 0.001, 0.005, 200)
self._FreqOff_win = RangeWidget(self._FreqOff_range, self.set_FreqOff, 'Frequency Offset', "counter_slider", float)
self.top_grid_layout.addWidget(self._FreqOff_win, 0, 3, 1, 1)
[self.top_grid_layout.setRowStretch(r,1) for r in range(0,1)]
[self.top_grid_layout.setColumnStretch(c,1) for c in range(3,4)]
self.sync_encoder = sync_encoder.blk()
self.sync_decoder = sync_decoder.blk()
self.repeat_first_byte = repeat_first_byte.blk(repeat=2)
self.qtgui_time_sink_x_0_0 = qtgui.time_sink_f(
512, #size
samp_rate, #samp_rate
"Original vs Received Data", #name
2 #number of inputs
)
self.qtgui_time_sink_x_0_0.set_update_time(0.064)
self.qtgui_time_sink_x_0_0.set_y_axis(-1, 1)
self.qtgui_time_sink_x_0_0.set_y_label('Amplitude', "")
self.qtgui_time_sink_x_0_0.enable_tags(-1, True)
self.qtgui_time_sink_x_0_0.set_trigger_mode(qtgui.TRIG_MODE_FREE, qtgui.TRIG_SLOPE_POS, 0.0, 0, 0, "")
self.qtgui_time_sink_x_0_0.enable_autoscale(True)
self.qtgui_time_sink_x_0_0.enable_grid(False)
self.qtgui_time_sink_x_0_0.enable_axis_labels(True)
self.qtgui_time_sink_x_0_0.enable_control_panel(False)
self.qtgui_time_sink_x_0_0.enable_stem_plot(False)
if not True:
self.qtgui_time_sink_x_0_0.disable_legend()
labels = ['Original', 'Received', '', '', '',
'', '', '', '', '']
widths = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
colors = ["blue", "red", "green", "black", "cyan",
"magenta", "yellow", "dark red", "dark green", "blue"]
styles = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
markers = [-1, -1, -1, -1, -1,
-1, -1, -1, -1, -1]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(2):
if len(labels[i]) == 0:
self.qtgui_time_sink_x_0_0.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_time_sink_x_0_0.set_line_label(i, labels[i])
self.qtgui_time_sink_x_0_0.set_line_width(i, widths[i])
self.qtgui_time_sink_x_0_0.set_line_color(i, colors[i])
self.qtgui_time_sink_x_0_0.set_line_style(i, styles[i])
self.qtgui_time_sink_x_0_0.set_line_marker(i, markers[i])
self.qtgui_time_sink_x_0_0.set_line_alpha(i, alphas[i])
self._qtgui_time_sink_x_0_0_win = sip.wrapinstance(self.qtgui_time_sink_x_0_0.pyqwidget(), Qt.QWidget)
self.top_layout.addWidget(self._qtgui_time_sink_x_0_0_win)
self.qtgui_number_sink_0 = qtgui.number_sink(
gr.sizeof_float,
0,
qtgui.NUM_GRAPH_NONE,
1
)
self.qtgui_number_sink_0.set_update_time(0.10)
self.qtgui_number_sink_0.set_title("")
labels = ['First Byte', '', '', '', '',
'', '', '', '', '']
units = ['', '', '', '', '',
'', '', '', '', '']
colors = [("black", "black"), ("black", "black"), ("black", "black"), ("black", "black"), ("black", "black"),
("black", "black"), ("black", "black"), ("black", "black"), ("black", "black"), ("black", "black")]
factor = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
for i in xrange(1):
self.qtgui_number_sink_0.set_min(i, 0)
self.qtgui_number_sink_0.set_max(i, 255)
self.qtgui_number_sink_0.set_color(i, colors[i][0], colors[i][1])
if len(labels[i]) == 0:
self.qtgui_number_sink_0.set_label(i, "Data {0}".format(i))
else:
self.qtgui_number_sink_0.set_label(i, labels[i])
self.qtgui_number_sink_0.set_unit(i, units[i])
self.qtgui_number_sink_0.set_factor(i, factor[i])
self.qtgui_number_sink_0.enable_autoscale(False)
self._qtgui_number_sink_0_win = sip.wrapinstance(self.qtgui_number_sink_0.pyqwidget(), Qt.QWidget)
self.top_layout.addWidget(self._qtgui_number_sink_0_win)
self.qtgui_freq_sink_x_2 = qtgui.freq_sink_c(
1024, #size
firdes.WIN_BLACKMAN_hARRIS, #wintype
0, #fc
samp_rate, #bw
"", #name
1 #number of inputs
)
self.qtgui_freq_sink_x_2.set_update_time(0.064)
self.qtgui_freq_sink_x_2.set_y_axis(-140, 10)
self.qtgui_freq_sink_x_2.set_y_label('Relative Gain', 'dB')
self.qtgui_freq_sink_x_2.set_trigger_mode(qtgui.TRIG_MODE_FREE, 0.0, 0, "")
self.qtgui_freq_sink_x_2.enable_autoscale(False)
self.qtgui_freq_sink_x_2.enable_grid(False)
self.qtgui_freq_sink_x_2.set_fft_average(1.0)
self.qtgui_freq_sink_x_2.enable_axis_labels(True)
self.qtgui_freq_sink_x_2.enable_control_panel(False)
if not True:
self.qtgui_freq_sink_x_2.disable_legend()
if "complex" == "float" or "complex" == "msg_float":
self.qtgui_freq_sink_x_2.set_plot_pos_half(not True)
labels = ['', '', '', '', '',
'', '', '', '', '']
widths = [1, 1, 1, 1, 1,
1, 1, 1, 1, 1]
colors = ["blue", "red", "green", "black", "cyan",
"magenta", "yellow", "dark red", "dark green", "dark blue"]
alphas = [1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0]
for i in xrange(1):
if len(labels[i]) == 0:
self.qtgui_freq_sink_x_2.set_line_label(i, "Data {0}".format(i))
else:
self.qtgui_freq_sink_x_2.set_line_label(i, labels[i])
self.qtgui_freq_sink_x_2.set_line_width(i, widths[i])
self.qtgui_freq_sink_x_2.set_line_color(i, colors[i])
self.qtgui_freq_sink_x_2.set_line_alpha(i, alphas[i])
self._qtgui_freq_sink_x_2_win = sip.wrapinstance(self.qtgui_freq_sink_x_2.pyqwidget(), Qt.QWidget)
self.top_layout.addWidget(self._qtgui_freq_sink_x_2_win)
self.get_first_byte = get_first_byte.blk()
self.digital_psk_mod_0 = digital.psk.psk_mod(
constellation_points=8,
mod_code="gray",
differential=True,
samples_per_symbol=SampSymb,
excess_bw=0.35,
verbose=False,
log=False,
)
self.digital_psk_demod_0 = digital.psk.psk_demod(
constellation_points=8,
differential=True,
samples_per_symbol=SampSymb,
excess_bw=0.35,
phase_bw=6.28/100.0,
timing_bw=6.28/100.0,
mod_code="gray",
verbose=False,
log=False,
)
self.channels_channel_model_0 = channels.channel_model(
noise_voltage=Noise,
frequency_offset=FreqOff,
epsilon=1.0,
taps=(1.0 + 1.0j, ),
noise_seed=0,
block_tags=False
)
self.blocks_uchar_to_float_0_0 = blocks.uchar_to_float()
self.blocks_uchar_to_float_0 = blocks.uchar_to_float()
self.blocks_throttle_0 = blocks.throttle(gr.sizeof_char*1, samp_rate,True)
self.blocks_skiphead_0 = blocks.skiphead(gr.sizeof_char*1, 30)
self.blocks_repack_bits_bb_0 = blocks.repack_bits_bb(1, 8, "", False, gr.GR_MSB_FIRST)
self.blocks_multiply_xx_1 = blocks.multiply_vcc(1)
self.blocks_multiply_xx_0_0 = blocks.multiply_vcc(1)
self.blocks_multiply_xx_0 = blocks.multiply_vcc(1)
self.blocks_multiply_conjugate_cc_0 = blocks.multiply_conjugate_cc(1)
self.blocks_file_sink_1 = blocks.file_sink(gr.sizeof_char*1, '/home/teddy/Documents/DVB_last_stand/Received_Files/Test_Text_8PSK_FC_received.txt', False)
self.blocks_file_sink_1.set_unbuffered(False)
self.blocks_delay_0 = blocks.delay(gr.sizeof_char*1, 5)
self.blocks_add_xx_0 = blocks.add_vcc(1)
self.band_pass_filter_0_0 = filter.interp_fir_filter_ccf(1, firdes.band_pass(
1, samp_rate, (2*samp_rate/float(SampSymb)-100e3), (2*samp_rate/float(SampSymb)+100e3), 10e3, firdes.WIN_HAMMING, 6.76))
self.band_pass_filter_0 = filter.interp_fir_filter_ccf(1, firdes.band_pass(
1, samp_rate, (samp_rate/float(SampSymb)-samp_rate/float(SampSymb)*FDP*2), (samp_rate/float(SampSymb)+samp_rate/float(SampSymb)*FDP*2), 10e3, firdes.WIN_HAMMING, 6.76))
self.analog_sig_source_x_0_0_0 = analog.sig_source_c(samp_rate, analog.GR_SIN_WAVE, samp_rate/SampSymb, 1, 0)
self.analog_sig_source_x_0_0 = analog.sig_source_c(samp_rate, analog.GR_SIN_WAVE, samp_rate/SampSymb, 1, 0)
self.analog_pll_refout_cc_0 = analog.pll_refout_cc(0.2, 2*np.pi*(1/float(SampSymb)-1/float(SampSymb)*FDP), 2*np.pi*(1/float(SampSymb)+1/float(SampSymb)*FDP))
self._Values_range = Range(0, 255, 1, 2, 200)
self._Values_win = RangeWidget(self._Values_range, self.set_Values, 'Vector Values', "counter_slider", int)
self.top_grid_layout.addWidget(self._Values_win, 0, 0, 1, 1)
[self.top_grid_layout.setRowStretch(r,1) for r in range(0,1)]
[self.top_grid_layout.setColumnStretch(c,1) for c in range(0,1)]
self.Text_Source = blocks.file_source(gr.sizeof_char*1, '/home/teddy/Documents/DVB_last_stand/Source_Files/Test_text.txt', True)
##################################################
# Connections
##################################################
self.connect((self.Text_Source, 0), (self.blocks_throttle_0, 0))
self.connect((self.analog_pll_refout_cc_0, 0), (self.blocks_multiply_xx_1, 0))
self.connect((self.analog_sig_source_x_0_0, 0), (self.blocks_add_xx_0, 1))
self.connect((self.analog_sig_source_x_0_0, 0), (self.blocks_multiply_xx_0_0, 0))
self.connect((self.analog_sig_source_x_0_0, 0), (self.blocks_multiply_xx_0_0, 1))
self.connect((self.analog_sig_source_x_0_0_0, 0), (self.blocks_multiply_xx_1, 1))
self.connect((self.band_pass_filter_0, 0), (self.analog_pll_refout_cc_0, 0))
self.connect((self.band_pass_filter_0_0, 0), (self.blocks_multiply_conjugate_cc_0, 0))
self.connect((self.blocks_add_xx_0, 0), (self.channels_channel_model_0, 0))
self.connect((self.blocks_delay_0, 0), (self.blocks_repack_bits_bb_0, 0))
self.connect((self.blocks_multiply_conjugate_cc_0, 0), (self.digital_psk_demod_0, 0))
self.connect((self.blocks_multiply_conjugate_cc_0, 0), (self.qtgui_freq_sink_x_2, 0))
self.connect((self.blocks_multiply_xx_0, 0), (self.blocks_add_xx_0, 0))
self.connect((self.blocks_multiply_xx_0_0, 0), (self.blocks_multiply_xx_0, 1))
self.connect((self.blocks_multiply_xx_1, 0), (self.blocks_multiply_conjugate_cc_0, 1))
self.connect((self.blocks_repack_bits_bb_0, 0), (self.blocks_skiphead_0, 0))
self.connect((self.blocks_skiphead_0, 0), (self.sync_decoder, 0))
self.connect((self.blocks_throttle_0, 0), (self.blocks_uchar_to_float_0_0, 0))
self.connect((self.blocks_throttle_0, 0), (self.repeat_first_byte, 0))
self.connect((self.blocks_uchar_to_float_0, 0), (self.qtgui_time_sink_x_0_0, 1))
self.connect((self.blocks_uchar_to_float_0_0, 0), (self.qtgui_time_sink_x_0_0, 0))
self.connect((self.channels_channel_model_0, 0), (self.band_pass_filter_0, 0))
self.connect((self.channels_channel_model_0, 0), (self.band_pass_filter_0_0, 0))
self.connect((self.digital_psk_demod_0, 0), (self.blocks_delay_0, 0))
self.connect((self.digital_psk_mod_0, 0), (self.blocks_multiply_xx_0, 0))
self.connect((self.get_first_byte, 0), (self.qtgui_number_sink_0, 0))
self.connect((self.repeat_first_byte, 0), (self.sync_encoder, 0))
self.connect((self.sync_decoder, 0), (self.blocks_file_sink_1, 0))
self.connect((self.sync_decoder, 0), (self.blocks_uchar_to_float_0, 0))
self.connect((self.sync_decoder, 0), (self.get_first_byte, 0))
self.connect((self.sync_encoder, 0), (self.digital_psk_mod_0, 0))
def closeEvent(self, event):
self.settings = Qt.QSettings("GNU Radio", "E8PSK_ModDemod_FC")
self.settings.setValue("geometry", self.saveGeometry())
event.accept()
def get_RangeRow(self):
return self.RangeRow
def set_RangeRow(self, RangeRow):
self.RangeRow = RangeRow
self.set_ConsRow(self.RangeRow+1)
def get_ConsRow(self):
return self.ConsRow
def set_ConsRow(self, ConsRow):
self.ConsRow = ConsRow
self.set_FreqRow(self.ConsRow+1)
def get_nfilts(self):
return self.nfilts
def set_nfilts(self, nfilts):
self.nfilts = nfilts
self.set_rrc_taps_0(firdes.root_raised_cosine(self.nfilts, self.nfilts, 1.0/float(self.SampSymb), 0.35, 45*self.nfilts))
def get_SampSymb(self):
return self.SampSymb
def set_SampSymb(self, SampSymb):
self.SampSymb = SampSymb
self.set_rrc_taps_0(firdes.root_raised_cosine(self.nfilts, self.nfilts, 1.0/float(self.SampSymb), 0.35, 45*self.nfilts))
self.band_pass_filter_0_0.set_taps(firdes.band_pass(1, self.samp_rate, (2*self.samp_rate/float(self.SampSymb)-100e3), (2*self.samp_rate/float(self.SampSymb)+100e3), 10e3, firdes.WIN_HAMMING, 6.76))
self.band_pass_filter_0.set_taps(firdes.band_pass(1, self.samp_rate, (self.samp_rate/float(self.SampSymb)-self.samp_rate/float(self.SampSymb)*self.FDP*2), (self.samp_rate/float(self.SampSymb)+self.samp_rate/float(self.SampSymb)*self.FDP*2), 10e3, firdes.WIN_HAMMING, 6.76))
self.analog_sig_source_x_0_0_0.set_frequency(self.samp_rate/self.SampSymb)
self.analog_sig_source_x_0_0.set_frequency(self.samp_rate/self.SampSymb)
self.analog_pll_refout_cc_0.set_max_freq(2*np.pi*(1/float(self.SampSymb)-1/float(self.SampSymb)*self.FDP))
self.analog_pll_refout_cc_0.set_min_freq(2*np.pi*(1/float(self.SampSymb)+1/float(self.SampSymb)*self.FDP))
def get_FreqRow(self):
return self.FreqRow
def set_FreqRow(self, FreqRow):
self.FreqRow = FreqRow
self.set_TimeRow(self.FreqRow+1)
def get_samp_rate(self):
return self.samp_rate
def set_samp_rate(self, samp_rate):
self.samp_rate = samp_rate
self.qtgui_time_sink_x_0_0.set_samp_rate(self.samp_rate)
self.qtgui_freq_sink_x_2.set_frequency_range(0, self.samp_rate)
self.blocks_throttle_0.set_sample_rate(self.samp_rate)
self.band_pass_filter_0_0.set_taps(firdes.band_pass(1, self.samp_rate, (2*self.samp_rate/float(self.SampSymb)-100e3), (2*self.samp_rate/float(self.SampSymb)+100e3), 10e3, firdes.WIN_HAMMING, 6.76))
self.band_pass_filter_0.set_taps(firdes.band_pass(1, self.samp_rate, (self.samp_rate/float(self.SampSymb)-self.samp_rate/float(self.SampSymb)*self.FDP*2), (self.samp_rate/float(self.SampSymb)+self.samp_rate/float(self.SampSymb)*self.FDP*2), 10e3, firdes.WIN_HAMMING, 6.76))
self.analog_sig_source_x_0_0_0.set_sampling_freq(self.samp_rate)
self.analog_sig_source_x_0_0_0.set_frequency(self.samp_rate/self.SampSymb)
self.analog_sig_source_x_0_0.set_sampling_freq(self.samp_rate)
self.analog_sig_source_x_0_0.set_frequency(self.samp_rate/self.SampSymb)
def get_rrc_taps_0(self):
return self.rrc_taps_0
def set_rrc_taps_0(self, rrc_taps_0):
self.rrc_taps_0 = rrc_taps_0
def get_Values(self):
return self.Values
def set_Values(self, Values):
self.Values = Values
def get_TimeRow(self):
return self.TimeRow
def set_TimeRow(self, TimeRow):
self.TimeRow = TimeRow
def get_SPS(self):
return self.SPS
def set_SPS(self, SPS):
self.SPS = SPS
def get_QPSK_CO(self):
return self.QPSK_CO
def set_QPSK_CO(self, QPSK_CO):
self.QPSK_CO = QPSK_CO
def get_Noise(self):
return self.Noise
def set_Noise(self, Noise):
self.Noise = Noise
self.channels_channel_model_0.set_noise_voltage(self.Noise)
def get_FreqOff(self):
return self.FreqOff
def set_FreqOff(self, FreqOff):
self.FreqOff = FreqOff
self.channels_channel_model_0.set_frequency_offset(self.FreqOff)
def get_FDP(self):
return self.FDP
def set_FDP(self, FDP):
self.FDP = FDP
self.band_pass_filter_0.set_taps(firdes.band_pass(1, self.samp_rate, (self.samp_rate/float(self.SampSymb)-self.samp_rate/float(self.SampSymb)*self.FDP*2), (self.samp_rate/float(self.SampSymb)+self.samp_rate/float(self.SampSymb)*self.FDP*2), 10e3, firdes.WIN_HAMMING, 6.76))
self.analog_pll_refout_cc_0.set_max_freq(2*np.pi*(1/float(self.SampSymb)-1/float(self.SampSymb)*self.FDP))
self.analog_pll_refout_cc_0.set_min_freq(2*np.pi*(1/float(self.SampSymb)+1/float(self.SampSymb)*self.FDP))
def main(top_block_cls=E8PSK_ModDemod_FC, options=None):
if StrictVersion("4.5.0") <= StrictVersion(Qt.qVersion()) < StrictVersion("5.0.0"):
style = gr.prefs().get_string('qtgui', 'style', 'raster')
Qt.QApplication.setGraphicsSystem(style)
qapp = Qt.QApplication(sys.argv)
tb = top_block_cls()
tb.start()
tb.show()
def quitting():
tb.stop()
tb.wait()
qapp.aboutToQuit.connect(quitting)
qapp.exec_()
if __name__ == '__main__':
main()
| UTF-8 | Python | false | false | 21,338 | py | 12 | E8PSK_ModDemod_FC.py | 3 | 0.601743 | 0.56786 | 0 | 459 | 45.488017 | 281 |
cgrobbin/Recipe-Master | 8,839,042,728,624 | b936f2b38c7e06d6319ce57f4639785b8c8dc066 | 181159d7534f23db7fd3478c1346a8d5b76ad5e8 | /main_app/migrations/0003_auto_20210405_1922.py | deb66b72978e9dec587d1f264e35243567b7602d | []
| no_license | https://github.com/cgrobbin/Recipe-Master | 87523a860b790da9e6cfe4e7843d421e57c421cb | 619e9d5dc8f0cb0afca8163f79cbf9cb7d1b52db | refs/heads/main | 2023-03-29T16:53:34.152054 | 2021-04-12T16:03:32 | 2021-04-12T16:03:32 | 354,648,846 | 0 | 0 | null | false | 2021-04-11T16:09:34 | 2021-04-04T21:19:05 | 2021-04-11T15:59:17 | 2021-04-11T16:09:33 | 791 | 0 | 0 | 0 | HTML | false | false | # Generated by Django 3.1.7 on 2021-04-05 19:22
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main_app', '0002_auto_20210405_1855'),
]
operations = [
migrations.AlterField(
model_name='recipe',
name='ingredients',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=50), size=None),
),
]
| UTF-8 | Python | false | false | 495 | py | 24 | 0003_auto_20210405_1922.py | 6 | 0.642424 | 0.575758 | 0 | 19 | 25.052632 | 115 |
kimballa/stitch | 17,136,919,533,600 | 41a8de27d9b163b2a397d336f5a361e422b5e921 | fb68aae4c91fb889fe0e6654289a0e0ff3ded258 | /src/stitch/util/output.py | befa6acb6b62595374993d2ccbd22f6487ce4dd4 | [
"Apache-2.0"
]
| permissive | https://github.com/kimballa/stitch | e5b8028e2f48bb9ad906c665d491dd4b142abc97 | 5b7fec4c08514ed6ec1bd06920d29d9eeb33c98e | refs/heads/master | 2021-01-18T04:06:09.448877 | 2009-07-17T23:30:13 | 2009-07-17T23:30:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # (c) Copyright 2009 Cloudera, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Manages Cloudera-specific usage of the python 'logging' module
# Rather than use 'logging' directly, import this module instead.
# This module includes and reexports all elements of the logging module
#
# This module defines the common command-line arguments used to
# control output verbosity for stdout/stderr. Applications are encouraged
# to use these same argument names for consistency
#
# These are:
# (VERBOSE_FLAG) --verbose, -v Print CRITICAL through VERBOSE
# (DEBUG_FLAG) --debug Print CRITICAL through DEBUG
# (QUIET_FLAG) --quiet, -q Only emit CRITICAL output.
# (Overrides --verbose and --debug)
#
# Default action is to print CRITICAL through INFO
#
# Properties:
#
# The actual governing of what data is printed to the screen is handled
# by the properties object passed to the stream's constructor.
#
# The properties in question are:
# output.quiet Sets quiet mode
# output.verbose Enables verbose outputs
# output.debug Enables debug outputs
#
#
# The attachOutputArgParser() method will instantiate an ArgParser for
# these flags and bind it to a Properties object
#
# Your program should call initLogging() before doing virtually anything
# else. (see its comments for why). When you know what log level you want,
# you should then call setupConsole(). (In practice, these will be called
# by the ensureConsole() method if you use any of the println*() methods
# of this module.)
#
# "VERBOSE" is not defined by their logging system; in addition to
# the levels CRITICAL, ERROR, WARNING, INFO, and DEBUG (defined in
# the logging module, and re-exported here), we add a new level
# VERBOSE between INFO and DEBUG.
#
# A standard log file argument is now available. This flag (LOG_FILENAME_FLAG)
# takes as an argument the name of a log file. If present, this implies that
# the root logger should also send a copy of its output to the indicated log
# file. This log will be installed during the call to setupConsole().
# The getAutoFileHandler() method will return the Handler object installed by
# this process. The getAutoLogName() method will return the filename used.
#
# The flag itself is "--log-filename" and sets the property output.auto.logfile.
# The verbosity level of this automatic log is handled by "--log-level" which
# sets the property output.auto.level. These flags are handled by
# attachOutputArgParser().
import atexit
from logging import *
import sys
from stitch.util.argparser import ArgParser
QUIET_PROP = "output.quiet"
VERBOSE_PROP = "output.verbose"
DEBUG_PROP = "output.debug"
QUIET_FLAG = "--quiet"
VERBOSE_FLAG = "--verbose"
DEBUG_FLAG = "--debug"
# applications that use this output framework can have their log file usage
# automatically handled by this flag.
LOG_FILENAME_FLAG = "--log-filename"
LOG_FILENAME_PROP = "output.auto.logfile"
# The verbosity level string to apply to this log file.
# Default (if unset) is whatever the screen's level is.
LOG_VERBOSITY_FLAG = "--log-level"
LOG_VERBOSITY_PROP = "output.auto.level"
# register the verbose level
VERBOSE = 15
addLevelName(VERBOSE, "VERBOSE")
# when the program terminates, clean up logs as best as possible
atexit.register(shutdown)
def attachOutputArgParser(properties):
""" Given a Properties object, attach an arg parser that will use standard
command-line flags to modify the above properties. These standard
properties govern our use of the stdout stream which is set up by
the setupConsole() method """
argMap = {}
# Screen verbosity level arguments
argMap["-q"] = QUIET_PROP
argMap["-v"] = VERBOSE_PROP
argMap[DEBUG_FLAG] = DEBUG_PROP
argMap[QUIET_FLAG] = QUIET_PROP
argMap[VERBOSE_FLAG] = VERBOSE_PROP
booleans = [ DEBUG_FLAG, QUIET_FLAG, VERBOSE_FLAG, "-q", "-v" ]
argMap[LOG_FILENAME_FLAG] = LOG_FILENAME_PROP
argMap[LOG_VERBOSITY_FLAG] = LOG_VERBOSITY_PROP
argParser = ArgParser(argMap, booleans)
properties.addArgParser(argParser)
initCalled = False
def initLogging():
""" This should be called absolutely first in the program. This
sets the logging system to be silent; you then call setupConsole()
after you've determined what log level you want. The reason for
this is because basicConfig() will be called automatically if you
call any other logging methods; then you do not have access to
the default handle to reconfigure it later. """
global initCalled
# set up the basic configuration to /dev/null
basicConfig(level=CRITICAL, format="%(message)s", \
filename="/dev/null", filemode="w")
initCalled = True
def getDefaultLogLevel(properties):
""" Returns the log level specified by the properties given """
if properties.getBoolean(QUIET_PROP):
return CRITICAL
elif properties.getBoolean(DEBUG_PROP):
return DEBUG
elif properties.getBoolean(VERBOSE_PROP):
return VERBOSE
else:
return INFO
# private internal persistent state for setupConsole
# (and how it interacts with ensureConsole)
consoleHandler = None
curConsoleLevel = None
def setupConsole(properties):
""" Given a properties file, set up the logging module to take over
stdout, use a reasonable format, and pick a default log level.
This must be called every time we change the values of the properties
which govern logging, for those properties to take effect. (An
equally valid method is to just call getLogger().setLevel(newlevel).
If properties is not modified, calling this method with the same
properties object multiple times is idempotent.
This will also look for the presence of auto log file properties. If
these are set (and no auto log file was yet installed), this will
install the auto logfile. If an auto logger is already installed,
changing the properties here will have no effect. You should
use getAutoFileHandler() to manipulate the handler it installs directly.
"""
global consoleHandler
global curConsoleLevel
global initCalled
if not initCalled:
initLogging()
if properties == None:
defaultLvl = curConsoleLevel
else:
defaultLvl = getDefaultLogLevel(properties)
# Set the logger to pass everything through it; we do the filtering
# at the handlers.
getLogger().setLevel(DEBUG)
if defaultLvl != curConsoleLevel:
if consoleHandler != None:
consoleHandler.setLevel(defaultLvl)
curConsoleLevel = defaultLvl
if consoleHandler == None:
formatter = Formatter("%(message)s")
# Create a console logger
consoleHandler = StreamHandler(sys.stdout)
consoleHandler.setLevel(defaultLvl)
consoleHandler.setFormatter(formatter)
# and attach it to the root logger
getLogger().addHandler(consoleHandler)
if properties != None:
setupAutoFileLogging(properties)
def ensureConsole():
""" called by the println*() methods below to ensure that we have
a console ready and waiting for us """
if consoleHandler == None:
setupConsole(None)
def installFileLogger(filename, level=None):
""" install a handler on the root logger to output to a particular
file. Uses the provided level. If this is None, then use the
curConsoleLevel """
# TODO(aaron): Consider using TimedRotatingFileHandler instead
global curConsoleLevel
if level == None:
ensureConsole()
level = curConsoleLevel
handler = FileHandler(filename)
handler.setFormatter(Formatter(
"[%(asctime)s] %(levelname)s %(name)s : %(message)s"))
handler.setLevel(level)
getLogger().addHandler(handler)
return handler
# if we automatically install a Handler to log to a file, stash it here.
autoFileHandler = None
def getAutoFileHandler():
""" Return the automatically-installed root file log handler, if any """
global autoFileHandler
return autoFileHandler
def setupAutoFileLogging(properties):
""" Called by setupConsole() to automatically set up a FileHandler for
the root level logger, if the user provided us with the appropriate
command line flags / properties. If the automatic file handler is
already in place, repeated calls to this method do nothing. (You
should use getAutoFileHandler() to get the handler and change its
settings yourself. """
if getAutoFileHandler() != None:
# one's already installed. Do nothing more.
return
autoFilename = properties.getProperty(LOG_FILENAME_PROP)
if autoFilename == None:
# no auto logfile requested.
return
logLevelName = properties.getProperty(LOG_VERBOSITY_PROP)
if logLevelName == None:
# this wasn't set. Grab the default.
logLevelName = getDefaultLogLevel(properties)
# if logLevelName was set programmatically, it might be an actual
# integer log level rather than a string. If so, just use that.
logLevel = None
try:
if logLevelName == int(logLevelName):
logLevel = logLevelName # yup
except ValueError:
pass # no, it was a string.
if logLevel == None:
logLevel = getLevelName(logLevelName)
# getLevelName() will return a string "Level foo" if this is not a
# registered level. Test this by making sure we got a real integer back.
try:
logLevelInt = int(logLevel)
logLevelErr = False
except ValueError:
# The provided level string is invalid. Flag the error here (log it to the
# file itself, later), and use the user's screen logging level.
logLevelInt = getDefaultLogLevel(properties)
logLevelErr = True
# actually install the log
global autoFileHandler
autoFileHandler = installFileLogger(autoFilename, logLevelInt)
printlnDebug("Opened log file " + autoFilename \
+ " for logging at level " + str(logLevelName))
if logLevelErr:
printlnError("No such log level " + str(logLevelName) \
+ " for --log-level; using default level of: " \
+ str(getLevelName(logLevelInt)))
# The following methods should be used instead of the 'print' statement
# throughout our code base, if you want something to go to the output
# stream as well as any underlying logs.
def printlnError(thing):
ensureConsole()
error(str(thing))
def printlnInfo(thing):
ensureConsole()
info(str(thing))
def printlnVerbose(thing):
ensureConsole()
log(VERBOSE, str(thing))
def printlnDebug(thing):
ensureConsole()
debug(str(thing))
def printlnLevel(level, thing):
ensureConsole()
log(level, str(thing))
| UTF-8 | Python | false | false | 11,063 | py | 66 | output.py | 60 | 0.725662 | 0.724758 | 0 | 331 | 32.416918 | 80 |
dougheea/ECE434 | 10,806,137,741,486 | 4a828325ba7c16ae6394da5785159dc67ed6af26 | 609e140a7b7ea757ce573e945a9eb5d109d77936 | /hw07/etch_a_sketch_blynk.py | 66900fb886b1df3837bda68393997e63c766dec9 | []
| no_license | https://github.com/dougheea/ECE434 | a09cba79ece7c17010e3467d7eb4f8e3cec31b9f | 05e98fefd90c3d8117b38ddb7a0b909c5d0a9e60 | refs/heads/master | 2023-01-20T11:46:08.328136 | 2020-11-16T18:25:19 | 2020-11-16T18:25:19 | 292,934,476 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
# From: https://towardsdatascience.com/python-webserver-with-flask-and-raspberry-pi-398423cc6f5d
'''
Raspberry Pi GPIO Status and Control
'''
import sys
import numpy as np
import Adafruit_BBIO.GPIO as GPIO
import time
import smbus
import os
import blynklib
import blynktimer
global newcur_x
global newcur_y
global cur_x
global cur_y
global lightboard
bus = smbus.SMBus(2) # Use i2c bus 1
matrix = 0x70 # Use address 0x70
# Get the autherization code (See setup.sh)
BLYNK_AUTH = os.getenv('BLYNK_AUTH')
# Initialize Blynk
blynk = blynklib.Blynk(BLYNK_AUTH)
# create timers dispatcher instance
timer = blynktimer.Timer()
print("Welcome to Etch-A-Sketch! To start playing simply enter the dimensions ",
"of the board you wish to play on. Then you will be prompted which direction ",
"you would like to move in by using the buttons. The | shows where you currently",
"are. You can clear the board by pressing the clear button. Enjoy! \n")
x =8
y =9
#an empty board is generated
lightboard = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
print("you can move left, right, up, down, or shake to clear")
newcur_y =1
newcur_x = 0
cur_x = 0
cur_y =1
lightboard[cur_x*2] = 0x80 #initalize starting position of cursor
bus.write_i2c_block_data(matrix, 0, lightboard) #sets the lights on the matrix
print(cur_x)
@blynk.handle_event('write V0')
def move_left(pin, value):
global newcur_x
global newcur_y
global cur_x
global cur_y
global lightboard
if cur_x == 0: #edge detection & correction
newcur_x = x-1
else:
newcur_x = cur_x - 1 #adjusting the coordinates of the cursor
cur_x=newcur_x #updating the cursor position
cur_y=newcur_y
lightboard[2*cur_x]=lightboard[2*cur_x] | (1<<(8-cur_y)) #uses bit shiftingto find the right row to light up
bus.write_i2c_block_data(matrix, 0, lightboard) #lights up the designat0000000ed LED green
print("moving left!")
@blynk.handle_event('write V1')
def move_right(pin, value):
global newcur_x
global newcur_y
global cur_x
global cur_y
global lightboard
if cur_x == x-1:
newcur_x = 0
else:
newcur_x = cur_x + 1
cur_x=newcur_x
cur_y=newcur_y
lightboard[2*cur_x]=lightboard[2*cur_x] | (1<<(8-cur_y))
bus.write_i2c_block_data(matrix, 0, lightboard)
print("moving right!")
@blynk.handle_event('write V2')
def move_up(pin, value):
global newcur_x
global newcur_y
global cur_x
global cur_y
global lightboard
if cur_y == 1:
newcur_y = y-1
else:
newcur_y = cur_y - 1
cur_x=newcur_x
cur_y=newcur_y
lightboard[2*cur_x]=lightboard[2*cur_x] | (1<<(8-cur_y))
bus.write_i2c_block_data(matrix, 0, lightboard)
print("moving up!")
@blynk.handle_event('write V3')
def move_down(pin, value):
global newcur_x
global newcur_y
global cur_x
global cur_y
global lightboard
if cur_y == y-1:
newcur_y = 1
else:
newcur_y = cur_y + 1
cur_x=newcur_x
cur_y=newcur_y
lightboard[2*newcur_x]=lightboard[2*newcur_x] | (1<<(8-newcur_y))
bus.write_i2c_block_data(matrix, 0, lightboard)
print("moving down!")
@blynk.handle_event('write V4')
def clear(pin, value):
global newcur_x
global newcur_y
global cur_x
global cur_y
global lightboard
lightboard= [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
lightboard[2*cur_x]=lightboard[2*cur_x] | (1<<(8-cur_y)) #keeps cursor where it was
bus.write_i2c_block_data(matrix, 0, lightboard)
print("clearing the board!")
#return render_template('index3.html') #write the info to the page
while True:
blynk.run()
time.sleep(0.2) | UTF-8 | Python | false | false | 4,011 | py | 36 | etch_a_sketch_blynk.py | 19 | 0.635253 | 0.589629 | 0 | 152 | 25.394737 | 112 |
huzuohuyou/AutoArticle | 8,263,517,091,827 | 70e139a652c0ce7fedd6ad72ae15df811c94d01f | b084ef3bc3c63a11699d7c3bac3481bc1e16ceaa | /washManuscript.py | 2f496c7282c08d925fda6f8215df4f7b17fb7931 | []
| no_license | https://github.com/huzuohuyou/AutoArticle | f1cdb8b8a67117dafaafa9fd1857255d9a00321c | 459b27af19a5e6226597d83be7ab268a4acbfe25 | refs/heads/master | 2022-12-15T05:47:10.326543 | 2020-09-18T09:01:48 | 2020-09-18T09:01:48 | 296,262,116 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import urllib
import http.cookiejar
import ssl
import requests
import json
def washAiticle(article):
headers = {
'POST http':'http://18.217.155.9/api/open/xi HTTP/1.1',
'Host':'18.217.155.9',
'Connection':'keep-alive',
'Content-Length':'733',
'Originv':'http://mutou888.com',
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.75 Safari/537.36',
'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8',
'Accept':'application/json, text/javascript, */*; q=0.01',
'Referer':'http://mutou888.com/projects/guling/index.html',
'Accept-Encoding':'gzip, deflate',
'Accept-Language':'zh-CN,zh;q=0.9,en;q=0.8'}#\
#'pgc_id=6873288350631985667&source=0&content=%3Cp%3E2e%3C%2Fp%3E&title=11111&search_creation_info=%7B%22abstract%22%3A%22%22%7D&title_id=1600311678909_1678044895007756&extra=%7B%22content_word_cnt%22%3A2%2C%22gd_ext%22%3A%7B%22entrance%22%3A%22hotspots%22%2C%22from_page%22%3A%22publisher_mp%22%2C%22enter_from%22%3A%22PC%22%2C%22device_platform%22%3A%22mp%22%2C%22is_message%22%3A0%7D%7D&mp_editor_stat=%7B%7D&educluecard=&draft_form_data=%7B%22coverType%22%3A2%7D&pgc_feed_covers=%5B%5D&claim_origin=0&origin_debut_check_pgc_normal=0&is_fans_article=0&govern_forward=0&praise=0&disable_praise=0&extern_link=&article_ad_type=2&tree_plan_article=0&activity_tag=0&trends_writing_tag=0&community_sync=0&save=0&timer_status=0&timer_time='}
data = {'content': article,}
r = requests.post('http://18.217.155.9/api/open/xi', data=data, headers=headers)
return r.text
json_str = washAiticle('6873288350631985630')
data = json.loads(json_str)
print(data['msg']) | UTF-8 | Python | false | false | 1,732 | py | 4 | washManuscript.py | 4 | 0.702079 | 0.558314 | 0 | 29 | 58.758621 | 737 |
lindo-zy/leetcode | 7,885,559,959,398 | 5335188e74c75407977102aec6bb4d706ba4c10a | a606893da1e354c7c617d0c9247b23118be2813a | /easy/e15.py | 222bdef06016c055b462096a9410bc86373da2ed | []
| no_license | https://github.com/lindo-zy/leetcode | 4ce6cb9ded7eeea0a6953b6d8152b5a9657965da | f4277c11e620ddd748c2a2f3d9f5f05ee58e5716 | refs/heads/master | 2023-07-22T06:19:00.589026 | 2023-07-16T12:35:14 | 2023-07-16T12:35:14 | 229,958,065 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/python3
# -*- coding:utf-8 -*-
class Solution:
def plusOne(self, digits):
return list(map(int, str(int(''.join([str(i) for i in digits])) + 1)))
if __name__ == '__main__':
s = Solution()
digits = [1, 2, 3] # [1,2,4]
digits = [4, 3, 2, 1] # [4,3,2,2]
# digits = [0] # [1]
# digits = [9, 9, 9] # [1,0,0,0]
result = s.plusOne(digits)
print(result)
| UTF-8 | Python | false | false | 402 | py | 324 | e15.py | 321 | 0.487562 | 0.422886 | 0 | 15 | 25.8 | 78 |
CamiPon/MemPy | 4,947,802,343,278 | 811bc927e880900aeec4ae73dc2d212e92e0073d | 028bf06db19b584bc6322d8427bb27bf77f54469 | /ActividadGrupal1/src/windows/v_configuraciones.py | 9136cb6b5d36a901ea3282b587d250f0fa2f7623 | []
| no_license | https://github.com/CamiPon/MemPy | 241189c0a16cdd27b6f7296d4bd295003438d17e | 46eaa106709e8fdd46d7f1c4b82a1fddaaa9d0dc | refs/heads/master | 2023-06-05T23:32:23.270248 | 2021-06-22T02:45:48 | 2021-06-22T02:45:48 | 361,834,846 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import PySimpleGUI as sg
from ..windows import Utilidades as u
def build(configuracion):
pad_t = ((0,0),(0,15))
pad_i = ((10,0),(0,15))
l_cont_form = [
u.texts("Textos",17, pad = pad_t),
u.texts("Tiempo maximo",17, pad = pad_t) + [sg.Spin([i for i in range(120,300)], initial_value= configuracion.tiempo, font = ("Verdana"), size = (3,1), key = "-TIEMPO-",pad = pad_i )],
u.texts("Cantidad de casillas",17, pad = pad_t) + [sg.Combo(["8x8", "10x10", "12x12"],default_value = configuracion.cant_casillas,font = ("Verdana"),key = "-CASILLAS-", pad = pad_i )],
u.texts("Cantidad de coincidencias",17, pad = pad_t) + [sg.Spin([i for i in range(1,4)], initial_value= configuracion.coicidencias, font = ("Verdana"), size = (3,1),key = "-COINCIDENCIAS-", pad = pad_i )],
u.texts("Tipo de casillas",17, pad = pad_t) + [sg.Combo(["Palabras", "Imagenes", "Ambas"],default_value = configuracion.tipo_elementos,font = ("Verdana"),key = "-ELEMENTOS-", pad = pad_i )],
u.texts("Estilos",17, pad = pad_t) + [sg.Combo(["t1", "t2", "t3", "t4", "t5"],default_value = configuracion.estilo,font = ("Verdana"), size = (15,1),key = "-ESTILO-", pad = pad_i )],
u.texts("Ayudas",17, pad = pad_t) + [sg.Combo(["Si", "No"] , default_value= configuracion.ayudas, font = ("Verdana"), size = (3,1),key = "-AYUDAS-", pad = pad_i )],
]
l_cont = [
u.texts("Configuraciones",25,pad = ((0,0),(20,16))),
[sg.Column(l_cont_form, background_color="#536162", element_justification="l",pad = pad_t)],
u.buttons("GUARDAR",14,"-GUARDAR-", pad =((10,10),(0,10)), size = (30,1)),
u.buttons("VOLVER",13,"-VOLVER-",pad =((10,20),(0,10)),size = (15,1)) + u.buttons("RESTABLECER",13,"-RESTABLECER-",pad =((0,10),(0,10)),size = (15,1)),
]
layout = [
[sg.Text("MemPy", font=("Helvetica", 45), text_color="#f3f4ed",background_color="#424642",pad = ((0,0),(0,20)) )],
[sg.Column(l_cont, background_color="#536162", element_justification="c", pad=(0,0))]
]
return sg.Window("MemPy", layout,background_color="#424642", element_justification="c", margins = (20,20))
"""window = build()
while True:
event, values = window.read()
if event == "OK" or event == sg.WIN_CLOSED:
break
window.close()""" | UTF-8 | Python | false | false | 2,256 | py | 17 | v_configuraciones.py | 11 | 0.595745 | 0.534131 | 0 | 39 | 56.871795 | 209 |
caotianwei/learning-ML-DL | 12,799,002,577,755 | 5f57d76817cb7f0784637daec92ac9336c643241 | c8d29ad4dc1835ce4d4af1d76262cd01aa89178e | /mnist_softmax.py | a1f6b9d18adc98830e7d434cf83333b98dd08ae0 | []
| no_license | https://github.com/caotianwei/learning-ML-DL | e60b86474d82574e5f6796638463fb5e3426409d | f62b43a92bdbfead5127278a026f8f90998e7c0e | refs/heads/master | 2021-01-22T21:38:24.120814 | 2017-03-19T12:44:43 | 2017-03-19T12:44:43 | 85,457,493 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets('MNIST_data/', one_hot=True)
X = tf.placeholder(tf.float32, [None, 28 * 28])
W = tf.Variable(tf.zeros([28 * 28, 10]))
b = tf.Variable(tf.zeros([10]))
Y = tf.nn.softmax(tf.matmul(X, W) + b)
Y_ = tf.placeholder("float", [None, 10])
cross_entropy = - tf.reduce_sum(Y_ * tf.log(Y))
learning_rate = 0.01
epoches = 2000
batch_size = 100
train_step = tf.train.GradientDescentOptimizer(learning_rate).minimize(cross_entropy)
init = tf.initialize_all_variables()
sess = tf.Session()
sess.run(init)
for epoch in range(epoches):
xs, ys = mnist.train.next_batch(batch_size)
_, c = sess.run([train_step, cross_entropy], feed_dict={X : xs, Y_: ys})
total_batch = int(mnist.train.num_examples / batch_size)
if epoch % 10 == 0:
print("Epoch:", '%04d' % (epoch + 1), "cost=", "{:.9f}".format(c/total_batch))
correct_predict = tf.equal(tf.argmax(Y,1), tf.argmax(Y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_predict, "float"))
result = sess.run(accuracy, feed_dict={X : mnist.test.images, Y_ : mnist.test.labels})
print("result=",result)#about 0.91 | UTF-8 | Python | false | false | 1,183 | py | 3 | mnist_softmax.py | 2 | 0.67202 | 0.639899 | 0 | 34 | 33.823529 | 86 |
marceljungle/label-releases-explorer | 18,554,258,753,805 | 6ef552ee959e0a91f1d662bcdcf6829c3497d80d | a7df89aaf01a0e449aff006b073c502ed95cb1ce | /principal/models.py | caa71ef755de94123fea689a5509e77c78462cc6 | []
| no_license | https://github.com/marceljungle/label-releases-explorer | 3aa26f88be89790f91889f2cd43b3ca1f8bdce4e | c70b68e83d0150b2fec9e2061bafde8f4395f893 | refs/heads/master | 2023-06-03T23:38:30.106815 | 2021-06-21T09:37:02 | 2021-06-21T09:37:02 | 376,323,685 | 0 | 0 | null | false | 2021-06-21T09:37:03 | 2021-06-12T15:27:27 | 2021-06-19T15:22:09 | 2021-06-21T09:37:02 | 12,691 | 0 | 0 | 0 | JavaScript | false | false | # encoding:utf-8
from django.db import models
class ReleasesBeatport(models.Model):
artist = models.TextField(verbose_name='Artist')
catalog_number = models.TextField(verbose_name='Catalog Number')
title = models.TextField(verbose_name='Title')
year = models.TextField(verbose_name='Year')
image = models.TextField(verbose_name='Image')
def __str__(self):
return self.catalog_number
class Meta:
ordering = ('-year', )
class ReleasesDiscogs(models.Model):
artist = models.TextField(verbose_name='Artist')
catalog_number = models.TextField(verbose_name='Catalog Number')
title = models.TextField(verbose_name='Title')
year = models.TextField(verbose_name='Year')
image = models.TextField(verbose_name='Image')
def __str__(self):
return self.catalog_number
class Meta:
ordering = ('-year', )
class ReleasesJuno(models.Model):
artist = models.TextField(verbose_name='Artist')
catalog_number = models.TextField(verbose_name='Catalog Number')
title = models.TextField(verbose_name='Title')
year = models.TextField(verbose_name='Year')
image = models.TextField(verbose_name='Image')
def __str__(self):
return self.catalog_number
class Meta:
ordering = ('-year', )
class AllReleases(models.Model):
artist = models.TextField(verbose_name='Artist')
catalog_number = models.TextField(verbose_name='Catalog Number')
title = models.TextField(verbose_name='Title')
year = models.TextField(verbose_name='Year')
image = models.TextField(verbose_name='Image')
def __str__(self):
return self.catalog_number
class Meta:
ordering = ('-year', )
| UTF-8 | Python | false | false | 1,716 | py | 17 | models.py | 14 | 0.672494 | 0.671911 | 0 | 58 | 28.586207 | 68 |
260980/260980_Daily_Commits | 17,592,186,083,943 | e2dd7f855d9e0c1f46bf03a81319d2fe8ea9aa43 | d1a7531c04b08e133d39b8cad3e2b561236b85d3 | /Assignment/If-elif-else/If-elif-else_2.py | 722635cbe9dbb9d0e52531a78acf7f8da9f45917 | []
| no_license | https://github.com/260980/260980_Daily_Commits | 8759d81e703b56a96677f9697851cb68088e8f7e | 20e38e5a20cf51e6c941bf95696c5aa84f1a7849 | refs/heads/master | 2023-04-03T13:48:07.010808 | 2021-04-25T15:00:08 | 2021-04-25T15:00:08 | 359,049,154 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Write a python program to check the user input abbreviation.
str1 = input('Enter the abbreviation')
if str1 == 'lol':
print("laughing out loud")
elif str1 == 'rofl':
print("rolling on the floor laughing")
elif str1 == 'lmk':
print("let me know")
elif str1 == 'smh':
print("shaking my head")
else:
print("Enter the correct input")
| UTF-8 | Python | false | false | 352 | py | 17 | If-elif-else_2.py | 16 | 0.661932 | 0.647727 | 0 | 12 | 28.333333 | 62 |
Dodzik/Language_Python | 19,585,050,900,473 | 2d044c7eb7c16c307f7efcebec7d6f40a59a6b6d | 8d6b9b914f91ed4dfe7000cdca764a35f0ff7daf | /lab09/mojmodul.py | a7985ccde7a06acd9eb70d26eb41d56e8ea3a6b7 | []
| no_license | https://github.com/Dodzik/Language_Python | 7c991226284d4085fd9d22b8a8cbd5411053ca8a | bf869322e739e6b5ef4c96b61e702168c4aaf629 | refs/heads/main | 2023-05-08T00:05:59.909117 | 2021-05-19T17:23:05 | 2021-05-19T17:23:05 | 368,875,667 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import random
import matplotlib.pyplot as plt
def fun1 (n,los):
x = random.random()
y = random.random()
argumenty=[]
wartosci=[]
with open('wyniki1.txt','w') as f:
f.write(str(x) + " "+ str(y)+'\n')
argumenty.append(x)
wartosci.append(y)
for i in range(n):
chos = random.choices(los,[1,7,7,85])
chos = chos[0]
x,y = chos[0]*x+chos[1]*y+chos[2],chos[3]*x+chos[4]*y+chos[5]
argumenty.append(x)
wartosci.append(y)
f.write(str(x) + " "+ str(y)+'\n')
plt.plot(argumenty,wartosci,'bo', markersize='0.1' )
plt.savefig("zad1.png")
import scipy.integrate
def fun2(funkcja,q,p):
wynik_squad = scipy.integrate.quad(funkcja,q,p)
with open('wyniki2.txt','w' )as f:
f.write("właściwy wynik: \n"+str(wynik_squad)+'\n')
t=0
step=0.0001
min= 0
max= 0
lso = q
#szukanie najwiekszej wartości i najmniejszej
while lso <p:
if funkcja(lso)>max:
max = funkcja(lso)
if funkcja(lso)<min:
min = funkcja(lso)
lso = lso + step
num_iteration =0
while True:
x = random.uniform(q,p)
y = random.uniform(min, max)
if 0 < y and y <= funkcja(x):
t = t+1
if y >= funkcja(x) and y < 0:
t = t-1
num_iteration = num_iteration + 1
if wynik_squad[0] - step < abs(max - min)*abs(max - min)*t/num_iteration and abs(max - min)*abs(max - min)*t/num_iteration < wynik_squad[0] + step:
f.write("wynik z funkcji:\n" + str(abs(max - min)*abs(max - min)*t/num_iteration) +"\nilosc iteracji "+str(num_iteration))
break
def fun3(funkcja,q,p):
wynik_squad = scipy.integrate.quad(funkcja,q,p)
with open('wyniki3.txt','w' )as f:
f.write("właściwy wynik: \n"+str(wynik_squad)+'\n')
step=0.0001
suma=0
num_iteration =0
while True:
x = random.uniform(q,p)
suma = suma + funkcja(x)
num_iteration = num_iteration + 1
#nieskonczonba petla
break
if wynik_squad[0] - step < abs(q,p)*suma/num_iteration and abs(q,p)*suma/num_iteration < wynik_squad[0] + step:
f.write("Wynik z funkcji" + str(abs(q,p)*suma/num_iteration) + "ilosc iteracji"+str(num_iteration))
break | UTF-8 | Python | false | false | 2,069 | py | 13 | mojmodul.py | 11 | 0.627422 | 0.604651 | 0 | 76 | 26.171053 | 150 |
mtlynch/GreenPiThumb | 11,278,584,156,256 | 9e974ec7b6f6ae61837e14508ab2870c19b4065a | 48290742dd0bb2cf6186bdd891bf0728dcf53dd1 | /greenpithumb/clock.py | f8bd42e91c1991e457d9ca29bbae34b57850323e | [
"Apache-2.0"
]
| permissive | https://github.com/mtlynch/GreenPiThumb | 08363f660513ccdc8e7956936da0471da8fa328c | e824f3d3b5298b6fbbff97e1a709929d27d294e7 | refs/heads/master | 2021-01-17T23:46:32.632630 | 2017-01-10T17:27:20 | 2017-01-10T17:27:20 | 56,197,493 | 3 | 0 | null | true | 2016-05-14T17:47:38 | 2016-04-14T01:21:54 | 2016-04-17T19:11:02 | 2016-05-14T17:46:12 | 34 | 0 | 0 | 0 | Python | null | null | import datetime
import time
import pytz
import tzlocal
class Clock(object):
"""A wrapper for managing clock time functions."""
def wait(self, wait_time_seconds):
"""Wait for the specified number of seconds.
Args:
wait_time_seconds: Number of seconds to wait.
"""
if wait_time_seconds < 0.0:
raise ValueError('Wait time cannot be negative: %f' %
wait_time_seconds)
time.sleep(wait_time_seconds)
def now(self):
return datetime.datetime.now(tz=pytz.utc)
class LocalClock(Clock):
"""An implementation of Clock that operates in the local time zone."""
def now(self):
time_utc = super(LocalClock, self).now()
return time_utc.astimezone(tzlocal.get_localzone())
| UTF-8 | Python | false | false | 801 | py | 40 | clock.py | 36 | 0.619226 | 0.616729 | 0 | 31 | 24.83871 | 74 |
rileyjohngibbs/ICS-PA-2018-2019 | 1,125,281,437,174 | 59dae669b495b663de1010f0bae396c4efd4443c | 01b402a1637918b27ac5343c757d551c49b1acc4 | /inclass_scratchwork/2018-11-30_pokemon.py | 37faaeaf0b10feb6978fc3ce03842427818b6abe | []
| no_license | https://github.com/rileyjohngibbs/ICS-PA-2018-2019 | 7670d7f1eeb87d62af108a08326c953625071a1f | b53def4fd655a2defb09006ad8951d78a7fc3402 | refs/heads/master | 2020-03-28T11:56:05.779537 | 2019-06-13T16:45:52 | 2019-06-13T16:45:52 | 148,257,287 | 0 | 3 | null | null | null | null | null | null | null | null | null | null | null | null | null | from random import randint
STRENGTHS = {
"grass": "water",
"water": "fire",
"fire": "grass"
}
class Pokemon(object):
def __init__(self, name, type_):
self.name = name
self.type_ = type_
self.hp = 50
def attack(self, target):
base_damage = self.roll_damage()
damage = self.modify_damage(base_damage, target)
print(f"{self.name} attacks {target.name} for {damage} damage!")
target.take_damage(damage)
def roll_damage(self):
return randint(1, 10) + randint(1, 10)
def modify_damage(self, base_damage, target):
strength = STRENGTHS[self.type_]
target_strength = STRENGTHS[target.type_]
if strength == target.type_:
damage = base_damage * 2
if target_strength == self.type_:
damage = int(base_damage / 2)
return damage
def take_damage(self, damage):
self.hp = max(self.hp - damage, 0)
if self.hp == 0:
print(f"{self.name} has fainted!")
bulbasaur = Pokemon("Bulbasaur", "grass")
charmander = Pokemon("Charmander", "fire")
squirtle = Pokemon("Squirtle", "water") | UTF-8 | Python | false | false | 1,155 | py | 59 | 2018-11-30_pokemon.py | 36 | 0.58355 | 0.57316 | 0 | 44 | 25.272727 | 72 |
lianruiying/TranscriptProcessing | 8,315,056,730,040 | bc1a934da688c74061ad69e8445583dbc6767211 | d5e18d55b40964b5bf08e5097db127b5c6fbefee | /__init__.py | 5dcb0bb560873d297e4a1583fb074f975970cd29 | []
| no_license | https://github.com/lianruiying/TranscriptProcessing | de8c4ae7a0fd5caf5812d9e7ee4657cfaaebfd8a | 94a1c32f4cc7ab373936529df9baeee3fca63a39 | refs/heads/master | 2020-03-23T14:12:45.600063 | 2018-07-20T01:04:46 | 2018-07-20T01:04:46 | 141,663,213 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import Get_Trans
import raw_url
import urlhub
file = open("scripts.txt",'a')
index = 0
url = "http://www.ted.com/"
raw_urlhub =raw_url.raw_url(url)
urlhub = urlhub.refine(raw_urlhub)
#print(urlhub)
for url in urlhub:
try:
script = Get_Trans.transcript(url)
index += 1
file.write(format(index))
file.write(script)
except:
print("Some Kind of Error Occored.")
file.close()
| UTF-8 | Python | false | false | 429 | py | 5 | __init__.py | 4 | 0.624709 | 0.620047 | 0 | 24 | 16.875 | 44 |
Feteya/AaronTools.py | 9,560,597,204,710 | 9a2e20c4ae8440bd3873e54986420aa84985dc35 | fc1ce41908b2734d30f0bf8cf9e1f9906e5bbd88 | /test/test_trajectory.py | cf82c85daa488164326792fcda32f29d5b593cf1 | []
| no_license | https://github.com/Feteya/AaronTools.py | 0acbe94e323aed4a222c920a20c4cc776d90a586 | 1e104e1ceec46486120c85e8bb0d590575ebf097 | refs/heads/master | 2020-09-06T07:35:01.477275 | 2019-11-07T21:53:56 | 2019-11-07T21:53:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
import unittest
from AaronTools.trajectory import Pathway
from AaronTools.geometry import Geometry
from AaronTools.test import TestWithTimer, prefix, rmsd_tol
from numpy.linalg import inv
from numpy import dot, finfo
class TestPathway(TestWithTimer):
t60 = Geometry(prefix + "test_files/torsion-60.xyz")
t90 = Geometry(prefix + "test_files/torsion-90.xyz")
def test_interpolating_structure(self):
#test to see if interpolated geometry is correct
ref = Geometry(prefix + "ref_files/torsion_interpolation.xyz")
S = Pathway([self.t60, self.t90])
geom = S.Geom_func(0.4)
rmsd = geom.RMSD(ref, align=True)
self.assertTrue(rmsd < rmsd_tol(ref, superLoose=True))
def test_splines_values(self):
# test cubic splines function values
# ought to have two splines:
# g(x) = -10x^3 + 15x^2
# h(x) = 10x^3 + -15x^2 + 5
ref = [0, 0.78125, 2.5, 5, 4.21875, 2.5, 0]
ref_d = [0, 5.625, 7.5, 0, -5.625, -7.5, 0]
test_t = [0, 0.125, 0.25, 0.5, 0.625, 0.75, 1]
tolerance = 50*finfo(float).eps
ev = [0, 5, 0]
m = Pathway.get_splines_mat(3)
mi = inv(m)
b = Pathway.get_splines_vector(ev)
c = dot(mi, b)
f, df = Pathway.get_E_func(c, [1, 1])
for i in range(0, len(test_t)):
v = f(test_t[i])
dv = df(test_t[i])
self.assertTrue(abs(v-ref[i]) <= tolerance)
self.assertTrue(abs(dv-ref_d[i]) <= tolerance)
def suite():
suite = unittest.TestSuite()
suite.addTest(TestPathway("test_interpolating_structure"))
suite.addTest(TestPathway("test_splines_values"))
return suite
if __name__ == "__main__":
runner = unittest.TextTestRunner()
runner.run(suite())
| UTF-8 | Python | false | false | 1,819 | py | 42 | test_trajectory.py | 39 | 0.595932 | 0.547004 | 0 | 54 | 32.685185 | 70 |
RideGreg/LeetCode | 584,115,554,105 | 0b796a4a65b149c8a22776bea9db3b9b64256277 | fc75506dc1f278585630a9d7d3b70cbb92d3d9f5 | /Python/perfect-squares.py | fc656373544214677ddf010f0f8cb1984659f7f8 | [
"MIT"
]
| permissive | https://github.com/RideGreg/LeetCode | a533b5193b2680f23c08572391eecaa3866c3cef | e1d19b5e18ece5190277317595b554ab50efb900 | refs/heads/master | 2022-08-24T16:16:03.392756 | 2022-08-12T23:17:53 | 2022-08-12T23:17:53 | 115,889,638 | 2 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Time: O(n * sqrt(n))
# Space: O(n)
# 279
# Given a positive integer n, find the least number of perfect
# square numbers (for example, 1, 4, 9, 16, ...) which sum to n.
#
# For example, given n = 12, return 3 because 12 = 4 + 4 + 4;
# given n = 13, return 2 because 13 = 4 + 9.
#
class Solution(object):
def numSquares(self, n):
"""
:type n: int
:rtype: int
"""
dp = [0]
for i in range(1, n+1):
dp.append(1 + min(dp[-k*k] for k in range(1, int(i**0.5)+1)))
return dp[n]
print(Solution().numSquares(12)) # 3, 12 = 4+4+4
print(Solution().numSquares(13)) # 2, 13 = 4 + 9 | UTF-8 | Python | false | false | 645 | py | 822 | perfect-squares.py | 821 | 0.537984 | 0.466667 | 0 | 24 | 25.916667 | 73 |
fmirandaTN/Api_TN | 3,539,053,090,500 | e114359e194368860cba66a044467a71c448971a | 0d3698d107efe9b1e17311ba5f44bbcd6ebe4b27 | /api/models/user_token.py | 7cdc67a7aed0ba99de6c45a23acbd7aaa649f2ff | []
| no_license | https://github.com/fmirandaTN/Api_TN | 5406921977011a6693d560c47ab5958544a509e4 | 5665d90d6f2bf09969bd833d64ab8a9e24957641 | refs/heads/master | 2022-12-08T23:07:01.617341 | 2020-09-17T14:07:57 | 2020-09-17T14:07:57 | 293,366,842 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from datetime import datetime, date, timedelta
from django.db import models
class UserToken (models.Model):
token = models.CharField(max_length=500)
owner = models.ForeignKey(
'api.User', related_name="token_register", on_delete=models.CASCADE, null=True)
validation = models.BooleanField(default=False)
recovery = models.BooleanField(default=False) | UTF-8 | Python | false | false | 374 | py | 161 | user_token.py | 149 | 0.745989 | 0.737968 | 0 | 9 | 40.666667 | 87 |
btg1998/Information-Systems | 13,056,700,616,473 | c271b29cfe659728e3d8cfc9c39218c2b7861209 | 7634b9e9b1b78dd08ef7a629ea7bb6b07713b9bb | /Eigen Values and Eigen Vectors.py | 8ec690593a863e148b0f78e909bcfc263df9105a | []
| no_license | https://github.com/btg1998/Information-Systems | 74e7f3a09d0f25a14d30890944ff853b9386bdc2 | 481cec62fc9510082bd5b3d24febc3f3283f1a39 | refs/heads/master | 2020-03-26T05:56:52.599814 | 2018-08-13T13:31:53 | 2018-08-13T13:31:53 | 144,582,191 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Wed Aug 23 19:22:15 2017
@author: bharg
"""
import scipy
from scipy import linalg
import numpy as np
a=np.array([[1,8,-9,7,5],
[0,1,0,4,4],
[0,0,1,2,5],
[0,0,0,1,-5],
[0,0,0,0,1]])
print("Determinant: ")
print(scipy.linalg.det(a))
print("Inverse: ")
print(linalg.inv(a))
lam,evec=linalg.eig(a)
print("Eigen Pairs: ")
print("Eigen Values: ")
print(lam.real)
print("Corresponding Eigen Vectors: ")
print(np.around(evec,decimals=2))
print("Transpose: ")
print(a.transpose())
| UTF-8 | Python | false | false | 587 | py | 48 | Eigen Values and Eigen Vectors.py | 41 | 0.572402 | 0.505963 | 0 | 27 | 19.666667 | 38 |
walberjose/COOL | 17,300,128,299,071 | 9559bb1bb49f1fb55e2c84d9576cd26fb65baa9d | ee340a4025f1b041410a06e1794632e1108257ad | /mininet_topo/simple_linear_2links.py | a0265690b6afebcdef452387aed2ccd138424a03 | []
| no_license | https://github.com/walberjose/COOL | 37264431b5bb07a55d194276988c3fa7967952e8 | 0e878f4a868095140a4c7757766e460abb45e81a | refs/heads/master | 2020-03-29T04:09:24.448947 | 2018-09-19T22:11:14 | 2018-09-19T22:11:14 | 149,518,099 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/python
"""
This example shows how to add an interface (for example a real
hardware interface) to a network after the network is created.
"""
import re
import sys
from mininet.topo import Topo
from mininet.cli import CLI
from mininet.log import setLogLevel, info, error
from mininet.net import Mininet
from mininet.link import Intf
from mininet.node import RemoteController
from mininet.link import TCLink
#from mininet.topolib import TreeTopo
from mininet.util import quietRun
from mininet.nodelib import NAT
'''
IT DOES NOT WORK APPROPRIATE BECAUSE NX DOES NOT RECOGNIZE TWO LINKS BETWEEN A PAIR OF NODES. It treats as the same.
sudo python ryu/app/COOL/mininet_topo/simple_linear_2links.py
'''
class GeneratedTopo( Topo ):
"Internet Topology Zoo Specimen."
def __init__( self, **opts ):
"Create a topology."
# Initialize Topology
Topo.__init__( self, **opts )
# add nodes, switches first...
s1 = self.addSwitch( 's1' , protocols=["OpenFlow13"])
s2 = self.addSwitch( 's2' , protocols=["OpenFlow13"])
# s3 = self.addSwitch( 's3' , protocols=["OpenFlow13"])
# s4 = self.addSwitch( 's4' , protocols=["OpenFlow13"])
# ... and now hosts
h1_host = self.addHost('h1', ip='10.0.0.01/24', mac='00:00:00:00:00:01')
h2_host = self.addHost('h2', ip='10.0.0.02/24', mac='00:00:00:00:00:02')
h3_host = self.addHost('h3', ip='10.0.0.03/24', mac='00:00:00:00:00:03')
# add edges between switch and corresponding host
self.addLink( s1 , h1_host, bw=10, delay='0.0ms')
self.addLink( s2 , h2_host, bw=10, delay='0.0ms')
self.addLink( s2 , h3_host, bw=10, delay='0.0ms')
self.addLink( s1 , s2, bw=10, delay='0.0ms')
self.addLink( s1 , s2, bw=10, delay='0.0ms')
#you can call addHost(cls=NAT...) directly if you don't like addNAT() - addNAT() is just a convenience method
#self.natIP = '10.0.0.1/24'#kwargs.pop('natIP', '10.0.0.254')
#self.connect = kwargs.pop('connect', 's1')
#self.hopts.update(defaultRoute='via ' + self.natIP)
#nat0 = self.addNode('nat0', cls=NAT, ip='10.0.0.1/24', inNamespace=False)
#self.addLink(s1, nat0)
# add edges between switches
# self.addLink( s1 , s2 , bw=10, delay='0.0ms')
# self.addLink( s2 , s3 , bw=10, delay='0.0ms')
# self.addLink( s3 , s4 , bw=10, delay='0.0ms')
# self.addLink( s4 , s1 , bw=10, delay='0.0ms')
#intfName = sys.argv[1] if len(sys.argv) > 1 else 'server1'
topos = { 'generated': ( lambda: GeneratedTopo() ) }
# def checkIntf( intf ):
# "Make sure intf exists and is not configured."
# config = quietRun( 'ifconfig %s 2>/dev/null' % intf, shell=True )
# if not config:
# error( 'Error:', intf, 'does not exist!\n' )
# exit( 1 )
# ips = re.findall( r'\d+\.\d+\.\d+\.\d+', config )
# if ips:
# error( 'Error:', intf, 'has an IP address,'
# 'and is probably in use!\n' )
# exit( 1 )
if __name__ == '__main__':
simple_linear_2links = GeneratedTopo()
setLogLevel( 'info' )
# try to get hw intf from the command line; by default, use server1
#intfName = sys.argv[ 1 ] if len( sys.argv ) > 1 else 'server1'
#info( '*** Connecting to hw intf: %s' % intfName )
#info( '*** Checking', intfName, '\n' )
#checkIntf( intfName )
info( '*** Creating network\n' )
controller = RemoteController('c0',ip='127.0.0.1', port=6633)
net = Mininet(simple_linear_2links, controller=controller, link=TCLink)#topo=TreeTopo( depth=1, fanout=2 ) )
# s1 = net.switches[0]
# _intf_linkC = Intf('linkC', node=s1)
# _intf_linkB = Intf('linkB', node=s1)
# _intf_linkA = Intf('linkA', node=s1)
#_intf_link8 = Intf('link8', node=s1)
#net.addNAT().configDefault()
#_intf_link8 = Intf('wlan0', node=s1)
# switch = net.switches[ 0 ]
# info( '*** Adding hardware interface', intfName, 'to switch',
# switch.name, '\n' )
# _intf = Intf( intfName, node=switch )
info( '*** Note: you may need to reconfigure the interfaces for '
'the Mininet hosts:\n', net.hosts, '\n' )
#net.addNAT().configDefault()
net.start()
# cmd = 'route add default gw 10.0.0.1'
# for host in net.hosts:
# host.cmd( cmd )#+ ' ' + opts + '&' )
# if host.name == "nat1":
# host.cmd('nat1 route add -net 10.0.1.0 netmask 255.255.255.0 gw 10.0.1.2')
CLI( net )
net.stop() | UTF-8 | Python | false | false | 4,545 | py | 46 | simple_linear_2links.py | 38 | 0.5967 | 0.546755 | 0 | 130 | 33.969231 | 117 |
2019-jbedu-g2/Test_Server | 11,115,375,376,139 | 5e85b06aee595018f44b5c64f413ed3e1877dd72 | e26463722143c2fd18f4e78b47e1d42aa53c6c7c | /store/store/models.py | a1de7c4e2226d0698a401e0395e7684334f483e6 | []
| no_license | https://github.com/2019-jbedu-g2/Test_Server | 5b1aeaabfc732bd8bfc61fa05c0d596447f1abc7 | a85d9f65355380f51554adf7e3ecf29d752e51aa | refs/heads/master | 2020-06-18T13:34:12.050325 | 2019-08-15T05:48:26 | 2019-08-15T05:48:26 | 196,300,820 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Make sure each ForeignKey has `on_delete` set to the desired behavior.
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
from django.db import models
class Accountdb(models.Model):
storenum = models.ForeignKey('Storedb', models.DO_NOTHING, db_column='storenum')
storeid = models.CharField(primary_key=True, max_length=20)
storepwd = models.CharField(max_length=20)
class Meta:
managed = False
db_table = 'accountdb'
class Queuedb(models.Model):
barcode = models.CharField(primary_key=True, max_length=20)
onoffline = models.BooleanField()
storenum = models.ForeignKey('Storedb', models.DO_NOTHING, db_column='storenum')
createtime = models.DateTimeField()
updatetime = models.DateTimeField(blank=True, null=True)
status = models.CharField(max_length=10)
class Meta:
managed = False
db_table = 'queuedb'
class Storedb(models.Model):
storenum = models.CharField(primary_key=True, max_length=10)
storename = models.CharField(max_length=50)
category = models.CharField(max_length=20)
latitude = models.CharField(max_length=40)
longitude = models.CharField(max_length=40)
intro = models.CharField(max_length=200, blank=True, null=True)
menu = models.CharField(max_length=300, blank=True, null=True)
inform = models.CharField(max_length=500, blank=True, null=True)
latencytime = models.CharField(max_length=10)
class Meta:
managed = False
db_table = 'storedb'
class Storeview(models.Model):
storenum = models.CharField(primary_key=True, max_length=10)
storename = models.CharField(max_length=50)
category = models.CharField(max_length=20)
latitude = models.CharField(max_length=40)
longitude = models.CharField(max_length=40)
intro = models.CharField(max_length=200, blank=True, null=True)
menu = models.CharField(max_length=300, blank=True, null=True)
inform = models.CharField(max_length=500, blank=True, null=True)
latencytime = models.CharField(max_length=10)
waitingcount = models.IntegerField(default=0)
class Meta:
managed = False
db_table = 'storeview' | UTF-8 | Python | false | false | 2,505 | py | 20 | models.py | 17 | 0.707784 | 0.687425 | 0 | 64 | 38.15625 | 104 |
akeilox/FYP-Face_Recognition_Based_Attendance_System | 18,897,856,111,838 | 50bf08058fa02f37f5a51dcd8ef0ce7638739140 | 8dbea32bdfd830782cab1ff42320d4d9fcdf70e3 | /Final Year Project - 022319/bin/Debug/benchmarker_DS6.py | ca900ff4f8f51cbecb9450ab459355d4b9cca07f | []
| no_license | https://github.com/akeilox/FYP-Face_Recognition_Based_Attendance_System | 0702c9bd17ea847eca4f3f40177b4c311141a038 | d502c82353de5f1712ba3028fd3654829b584d60 | refs/heads/master | 2020-06-06T08:04:30.479065 | 2019-02-25T13:17:03 | 2019-02-25T13:17:03 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import os
import random
import cv2
from PIL import Image
import numpy as np
import shutil
import importlib
import imp
testset_amount = 5
selected_Dataset = None
trainingSeq = 0
testSeq = 0
test_Question = []
test_Dir = []
bm = None
MajorDataset = "Dataset 5"
def fetchTrainingData(DS_DIR, TRAIN_DIR = "Training Image"):
if DS_DIR == MajorDataset and TRAIN_DIR == "Training Image":
global trainingSeq
if trainingSeq == 0:
trainingSeq = 1
return fetchTrain(DS_DIR, "Training-Pure")
elif trainingSeq == 1:
trainingSeq = 2
return fetchTrain(DS_DIR, "Training-Mix")
else:
return fetchTrain(DS_DIR, TRAIN_DIR)
def fetchTestQuestion(test_Name = "Test Image"):
if selected_Dataset == MajorDataset and test_Name == "Test Image":
global testSeq
if testSeq == 0:
testArr = fetchTest("Test 1")
testSeq = 1
return testArr
elif testSeq == 1:
testSeq = 2
return fetchTest("Test 2 - Angle")
elif testSeq == 2:
testSeq = 3
return fetchTest("Test 3 - Lighting")
else:
return fetchTest(test_Name)
def fetchTest(test_Name = "Test Image"):
global test_Question
global test_Dir
if selected_Dataset is not None:
DS_DIR = selected_Dataset
test_Dir.clear()
test_Question.clear()
test_set = []
print("** Fetching Test Images... **")
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
img_dir = os.path.join(BASE_DIR, DS_DIR)
testset_data = os.path.join(img_dir, test_Name)
total_files = len(os.listdir(testset_data)) * testset_amount
rand = random.sample(range(0, total_files), total_files)
for ran in rand:
iden_id = int(ran / testset_amount)
img_no = ran % testset_amount
iden_dir = os.path.join(testset_data, os.listdir(testset_data)[iden_id])
img = os.listdir(iden_dir)[img_no]
testimg_dir = os.path.join(iden_dir, img)
image = cv2.imread(testimg_dir)
test_Dir.append(testimg_dir)
test_set.append(image)
test_Question.append(os.listdir(testset_data)[iden_id].replace(" ", "-").lower())
#name = bm.testAlgo(image, DS_DIR)
print("** Fetch Completed **")
return test_set
def submitAnswer(ansArr):
global test_Question
global test_Dir
correctAns = 0
wrongAns = 0
if not test_Question:
print("Please fetch the test questions before submitting")
else:
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
wrong_dir = os.path.join(BASE_DIR, "Incorrect Answer")
if not os.path.exists(wrong_dir):
os.makedirs(wrong_dir)
else:
shutil.rmtree(wrong_dir)
os.makedirs(wrong_dir)
print("** Checking your answer **")
for x in range(len(test_Question)):
print("Question: " + test_Question[x].replace(" ", "-").lower())
if ansArr[x] is not None:
print("Answer: " + ansArr[x].replace(" ", "-").lower())
else:
print("Answer: ")
print("")
if ansArr[x] is not None and ansArr[x].replace(" ", "-").lower() == test_Question[x]:
correctAns += 1
else:
wrongAns += 1
copyDir = str(x) + " Qn-" + test_Question[x] + " Ans-" + ansArr[x].replace(" ", "-").lower()
shutil.copyfile(test_Dir[x], (wrong_dir + "\\" + str(copyDir)))
print("No of correct: " + str(correctAns))
print("No of wrong: " + str(wrongAns))
acc = (correctAns / (correctAns + wrongAns)) * 100
print("Accuracy is " + "{0:.1f}".format(acc) + "%\n")
print("Press (Function + Alt + F4) to Exit!" + "\n")
return correctAns, wrongAns, acc
def feedTestData(DS_DIR, TEST_DIR="Test Image"):
print("**Initiating Test, calling testAlgo() method **")
try:
correctAns = 0
wrongAns = 0
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
img_dir = os.path.join(BASE_DIR, DS_DIR)
testset_data = os.path.join(img_dir, TEST_DIR)
wrong_dir = os.path.join(BASE_DIR,"Incorrect Answer")
if not os.path.exists(wrong_dir):
os.makedirs(wrong_dir)
else:
shutil.rmtree(wrong_dir)
os.makedirs(wrong_dir)
print("Total Number of test = " +str(len(os.listdir(testset_data))))
total_files = len(os.listdir(testset_data)) * testset_amount
rand = random.sample(range(0, total_files), total_files)
for ran in rand:
iden_id = int(ran / testset_amount)
img_no = ran % testset_amount
iden_dir = os.path.join(testset_data, os.listdir(testset_data)[iden_id])
img = os.listdir(iden_dir)[img_no]
testimg_dir = os.path.join(iden_dir, img)
image = cv2.imread(testimg_dir)
name = bm.testAlgo(image, DS_DIR)
print("Question: " + os.listdir(testset_data)[iden_id].replace(" ", "-").lower())
if name is not None:
print("Answer: " + name.replace(" ", "-").lower())
else:
print("Answer: ")
print("")
if name is not None and name.replace(" ", "-").lower() == os.listdir(testset_data)[iden_id].replace(" ", "-").lower():
correctAns += 1
else:
wrongAns += 1
shutil.copyfile(testimg_dir, (wrong_dir + "\\" + img))
print("No of correct: " + str(correctAns))
print("No of wrong: " + str(wrongAns))
acc = (correctAns / (correctAns+wrongAns))*100
print("Accuracy is " + "{0:.1f}".format(acc) + "%\n")
print("Press (Function + Alt + F4) to Exit!" + "\n")
return correctAns, wrongAns, acc
except Exception as e:
print("Please ensure you code have a method name testAlgo(image)")
print (e)
def fetchTrain(DS_DIR, TRAIN_DIR = "Training Image"):
try:
imageArr = []
labelArr = []
global selected_Dataset
selected_Dataset = DS_DIR
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
img_dir = os.path.join(BASE_DIR, DS_DIR)
training_data = os.path.join(img_dir, TRAIN_DIR)
print("Preparing images for training...")
for root, dirs, files in os.walk(training_data):
for file in files:
if file.lower().endswith("png") or file.lower().endswith("jpg") or file.lower().endswith("jpeg"):
path = os.path.join(root, file)
label = os.path.basename(os.path.dirname(path)).replace(" ", "-").lower()
image = cv2.imread(path)
imageArr.append(image)
labelArr.append(label)
#bm.testAlgo(imageArr)
return imageArr, labelArr, DS_DIR
#bm.trainAlgo(imageArr,labelArr ,DS_DIR)
except Exception as e:
print(e)
#print("Please ensure you code have a method name trainAlgo(imageArray[], label[], DS_NAME)")
return None
def main():
pythonFile = input("Key in your ALGORITHM file name (without .py): ")
try:
global bm
bm = importlib.import_module(pythonFile, ".")
menu = True
spam_info = imp.find_module(pythonFile)
#print(spam_info)
print("Import ALGORITHM FILE successful")
print("**Initiating training, calling trainAlgo() method.**")
print("")
imageArr, labelArr, DS_DIR = fetchTrainingData("Dataset 6")
bm.trainAlgo(imageArr, labelArr, DS_DIR)
feedTestData("Dataset 6")
except Exception as e:
print(e)
print("Fail to import ALGORITHM file. Please check that ")
if __name__ == "__main__":
main()
| UTF-8 | Python | false | false | 7,969 | py | 35 | benchmarker_DS6.py | 1 | 0.559041 | 0.553394 | 0 | 232 | 33.293103 | 130 |
rehadhawan/WWR-Data-Vis | 19,112,604,506,577 | 43848c309bf933d2bd0d1f23b06fcf296ec55f61 | ade61fdefb397ea0944914b0250bf9314de180ca | /WWR Assignment 2 Yearly rainfall and temp (1).py | 9e16e40add0023b0a3ab2672647c9c7a73d5bf76 | []
| no_license | https://github.com/rehadhawan/WWR-Data-Vis | ba34b2a388476afbd8c13aeff48f7db377bb0312 | cf7917d2e778c4c69a591e6349df9bb43f52c353 | refs/heads/main | 2023-05-25T16:44:13.796976 | 2023-05-16T13:08:22 | 2023-05-16T13:08:22 | 305,786,538 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
# coding: utf-8
# In[14]:
import pandas as pd
import csv
path = r"C:\Users\reha_\Downloads\rainYearly.csv"
file = open(path)
df = pd.read_csv(file)
print (df)
result = df.dtypes
print(result)
df["Year"] = df[ 'Year'].astype(float)
df.dtypes
# In[15]:
path2 = r"C:\Users\reha_\Downloads\tempYearly.csv"
file1 = open(path2)
df1 = pd.read_csv(file1)
print (df1)
df1.dtypes
# In[16]:
left = pd.DataFrame(df)
right = pd.DataFrame(df1)
res = pd.merge(left, right, how = 'inner', on = 'Year')
print (res)
# In[19]:
import matplotlib.pyplot as plt
res.plot(x="Year", y=["Rainfall", "Temperature"], kind='line')
plt.show()
# In[ ]:
| UTF-8 | Python | false | false | 690 | py | 2 | WWR Assignment 2 Yearly rainfall and temp (1).py | 1 | 0.643068 | 0.617994 | 0 | 46 | 13.652174 | 65 |
WebDevBren/SimpaticoTAEServer | 6,760,278,546,655 | ff9f77ddb68546caaea7450c8cdd64e72aa63a0d | 1bcd70ca4fa48004a363061d4f312c950b56a8bd | /main_TAE_server/tests/Syntactic_Spanish_Test.py | b9630a89e6b05261b53ee1a3db38dcee45e859e1 | []
| no_license | https://github.com/WebDevBren/SimpaticoTAEServer | 5c0a4d09159100b99fb3d5f5d9d81ce54b31bb41 | 152efe99c6df8de9c538f303d734b61552bbdddb | refs/heads/master | 2021-04-12T10:51:53.942628 | 2017-08-10T13:19:34 | 2017-08-10T13:19:34 | 94,534,959 | 0 | 0 | null | true | 2017-06-16T10:47:38 | 2017-06-16T10:47:38 | 2017-01-11T13:11:56 | 2017-05-14T12:33:34 | 33,355 | 0 | 0 | 0 | null | null | null | # -*- coding: utf-8 -*-
import urllib2
url = 'http://localhost:8080/?type=syntactic&sentence=Si%20la%20persona%20beneficiaria%20abandonase%20la%20estancia%20una%20vez%20iniciada%20,%20no%20tendrá%20derecho%20a%20ningún%20tipo%20de%20devolución%20.'
content = urllib2.urlopen(url).read()
print content
| UTF-8 | Python | false | false | 308 | py | 18 | Syntactic_Spanish_Test.py | 14 | 0.777049 | 0.629508 | 0 | 9 | 32.888889 | 209 |
Swanson-Hysell/EPS88_Jupyter_Book | 12,498,354,872,039 | 276d85df77487bb64c6cededdc965a2a235db1c9 | 08c93f0d50621bb3b85bbbae753cc3b3e92de93e | /_build/jupyter_execute/folder_01/W1_tabular_data.py | 086ffcd35a5d5d5c08d64c0f983220697874ccfd | []
| no_license | https://github.com/Swanson-Hysell/EPS88_Jupyter_Book | b9244c403127fe441f0ba85c2728d7a406acac69 | 4b32ab147e1b512425663d28187f94baf32806e9 | refs/heads/master | 2023-03-15T22:33:39.505706 | 2020-11-29T17:59:29 | 2020-11-29T17:59:29 | 586,075,599 | 2 | 0 | null | true | 2023-01-06T21:54:26 | 2023-01-06T21:54:25 | 2020-11-29T17:59:42 | 2020-11-29T17:59:39 | 146,016 | 0 | 0 | 0 | null | false | false | # 1.2 What is Data Science?
**Goals:** Introduce the broad concepts of data science and data structures. Prepare to look at tabular data.
**Outline:**
* Tables
* Indexing
## Additional Assigned Reading (or Review)
Data 8 textbook "Computational and Inferential Thinking: The Foundations of Data Science" By Ani Adhikari and John DeNero [Chapter 1 Data Science](https://www.inferentialthinking.com/chapters/01/what-is-data-science.html) & [Chapter 2 Causality and Experiments](https://www.inferentialthinking.com/chapters/02/causality-and-experiments.html). This should overlap with your assigned reading for Data 8. An excerpt:
## What is Data Science?
> Data Science is about drawing useful conclusions from large and diverse data sets through exploration, prediction, and inference. Exploration involves identifying patterns in information. Prediction involves using information we know to make informed guesses about values we wish we knew. Inference involves quantifying our degree of certainty: will the patterns that we found in our data also appear in new observations? How accurate are our predictions? Our primary tools for exploration are visualizations and descriptive statistics, for prediction are machine learning and optimization, and for inference are statistical tests and models.
>Statistics is a central component of data science because statistics studies how to make robust conclusions based on incomplete information. Computing is a central component because programming allows us to apply analysis techniques to the large and diverse data sets that arise in real-world applications: not just numbers, but text, images, videos, and sensor readings. Data science is all of these things, but it is more than the sum of its parts because of the applications. Through understanding a particular domain, data scientists learn to ask appropriate questions about their data and correctly interpret the answers provided by our inferential and computational tools.
## Tables
Imagine you're preparing to collect some data. Say you play Yahtzee with your Grandma once a week and want to track your scores. What's the first thing you would do? Make a table!
| Date | My Score | G-Ma Score |
|-------|----------|------------|
| 06/06 | 150 | 230 |
| 06/13 | 165 | 166 |
| 06/20 | 136 | 198 |
| 06/27 | 195 | 260 |
| 07/04 | 168 | 154 |
| 07/11 | 138 | 520 |
| 07/18 | 220 | 320 |
| 07/25 | 196 | 175 |
| 08/01 | 127 | 188 |
A table is just a way of organizing data using columns and rows. While intuitive for a basic dataset they are also very powerful. Just by transfering the scores from scattered score cards to this table a pattern begins to emerge: your grandma is way better at Yahtzee than you! This table has three columns, we will consider each column a "variable". The first column "Date" is the independent variable, it is just when we made our observations. The second and third columns are the observations of our experiment. This counting of columns lead right to our next topic: indexing.
## Indexing
Indexing is the method for navigating around through a dataset. We use numbers to reference each row and column of a table. The python language indexes starting at 0. So in the previous section I should have written: "The zeroth column 'Date' is the independent variable, it is just when we made our observations. The first and second columns are the observations of our experiment."
|Row Index| Date | My Score | G-Ma Score |
|-----|-------|----------|------------|
| 0 | 06/06 | 150 | 230 |
| 1 | 06/13 | 165 | 166 |
| 2 | 06/20 | 136 | 198 |
| 3 | 06/27 | 195 | 260 |
| 4 | 07/04 | 168 | 154 |
| 5 | 07/11 | 138 | 520 |
| 6 | 07/18 | 220 | 320 |
| 7 | 07/25 | 196 | 175 |
| 8 | 08/01 | 127 | 188 |
Both rows and columns are indexed, starting at zero.
| 0 | 1 | 2 |
|-------|----------|------------|
| 06/06 | 150 | 230 |
| 06/13 | 165 | 166 |
| 06/20 | 136 | 198 |
| 06/27 | 195 | 260 |
| 07/04 | 168 | 154 |
| 07/11 | 138 | 520 |
| 07/18 | 220 | 320 |
| 07/25 | 196 | 175 |
| 08/01 | 127 | 188 |
The convention is to put the row index first. So the `[4,1]` element of our table is `168` which is from row `07/04` and column `My Score`.
| UTF-8 | Python | false | false | 4,551 | py | 87 | W1_tabular_data.py | 25 | 0.662272 | 0.595913 | 0 | 65 | 69 | 679 |
tmichalak/actions | 7,138,235,651,553 | 3f55c2de1bd8cbb6d96d0cde1c3bf00cdfc209f2 | 681116601c01d08fbbe190f4d5974ff0bc085d4a | /includes/actions/python/run-installed-tests/get-pytest-ini-and-run-tests.py | a7895abada5532e882cd5e8d1cdfe8c708092294 | [
"ISC"
]
| permissive | https://github.com/tmichalak/actions | 923c99956c5372b80fee91d8a3fa46ee2a35d50f | fab6b84075e4aab10b505e257cc1237b5343247d | refs/heads/main | 2023-05-26T19:32:19.261524 | 2021-04-27T16:57:42 | 2021-04-27T16:57:42 | 375,796,008 | 0 | 0 | ISC | true | 2021-06-10T18:39:38 | 2021-06-10T18:39:38 | 2021-04-29T15:26:16 | 2021-06-09T15:55:45 | 91 | 0 | 0 | 0 | null | false | false | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2021 The SymbiFlow Authors.
#
# Use of this source code is governed by a ISC-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/ISC
#
# SPDX-License-Identifier: ISC
from __future__ import print_function
import pprint
import urllib
import urllib.request
import os
import os.path
import sys
from pkg_resources import get_distribution
module_name = os.environ['PYTHON_MODULE']
# Download pytest.ini
if not os.path.exists('pytest.ini'):
dry_run = os.environ.get('CI') != 'true'
repo = os.environ['GITHUB_REPOSITORY']
sha = os.environ['GITHUB_SHA']
url = 'https://raw.githubusercontent.com/{repo}/{sha}/pytest.ini'.format(**locals())
print('Downloading', url)
data = urllib.request.urlopen(url).read().decode('utf-8')
print('Got following data')
print('-'*75)
pprint.pprint(data.splitlines())
print('-'*75)
with open('pytest.ini', 'w') as f:
f.write(data)
# Print info about installed module
module = get_distribution(module_name)
version = '.'.join(module.version.split('.'))
print()
print(module_name, 'version:', version)
print(module_name, 'location:', module.location)
print()
sys.stdout.flush()
sys.stderr.flush()
# Run pytest against the library
import pytest
sys.exit(pytest.main())
| UTF-8 | Python | false | false | 1,356 | py | 22 | get-pytest-ini-and-run-tests.py | 5 | 0.69174 | 0.683628 | 0 | 54 | 24.111111 | 88 |
niazzaki/ce888labs | 6,674,379,225,111 | d6c4fcdd76b17f56ed3a2c4bb33751ef82927566 | 4bc99a2dc0db339f3e1e3eeacb01fa48b78fb165 | /lab8/mycode.py | 2bfb8e4f73cc3d9d7c0ebe809d974a2cf5e5c096 | []
| no_license | https://github.com/niazzaki/ce888labs | a7c4d451fd79a75c0275d432a4793ec2d74ed3bb | d2cc126ba81325227c70947ce53afb1af8db5c98 | refs/heads/master | 2021-05-05T18:50:41.411265 | 2018-04-26T00:20:35 | 2018-04-26T00:20:35 | 117,615,116 | 0 | 0 | null | false | 2018-01-16T02:15:28 | 2018-01-16T01:32:43 | 2018-01-16T01:32:43 | 2018-01-16T02:15:27 | 0 | 0 | 0 | 0 | null | false | null | from keras.layers import Dense, Activation, Embedding, Flatten, Input, Dropout, Conv1D, GlobalMaxPooling1D, LSTM
from keras.datasets import imdb
from __future__ import print_function
import numpy as np
np.random.seed(1337)
from keras.preprocessing import sequence
from keras.models import Model, Sequential
max_features = 20000
maxlen = 80 # cut texts after this number of words
batch_size = 32
(X_train, y_train), (X_test, y_test) = imdb.load_data(nb_words=max_features)
print(len(X_train), 'train sequences')
print(len(X_test), 'test sequences')
print (X_train[0])
print('Pad sequences (samples x time)')
X_train = sequence.pad_sequences(X_train, maxlen=maxlen)
X_test = sequence.pad_sequences(X_test, maxlen=maxlen)
print('X_train shape:', X_train.shape)
print('X_test shape:', X_test.shape)
inputs = Input(shape=(maxlen,))
x = inputs
y = Embedding(max_features, 128, dropout=0.2)(x)
z = LSTM(32)(y)
h = Dense(1)(z)
predictions = Activation("sigmoid")(h)
model = Model(input=inputs, output=predictions)
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=15,
validation_data=(X_test, y_test))
score, acc = model.evaluate(X_test, y_test,
batch_size=batch_size)
print('score:', score)
print('accuracy:', acc) | UTF-8 | Python | false | false | 1,379 | py | 28 | mycode.py | 11 | 0.696157 | 0.677302 | 0 | 46 | 29 | 112 |
hn4002/streamer | 2,508,260,935,155 | 17358e6198741cb77da8ab483fd34181ab962ace | 02fc3d0fe20bb3d30b39fc92d4e595f234550b06 | /mysite/common/stockcheckup.py | 17c71b1f7a08a2b0670ab8578c3cbabf7224cf7b | []
| no_license | https://github.com/hn4002/streamer | e4e0acb292a1f35eb36c57ee2d74c30e50b40244 | 140035ebfd53a559ecd7d89fb47896d37efabd43 | refs/heads/master | 2017-12-18T15:48:19.687825 | 2017-11-13T06:34:41 | 2017-11-13T06:34:41 | 77,081,814 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import json
import os
import mysite.settings as settings
stockcheckupJsonFile = "stocks.json"
stockcheckupJsonFilePath = os.path.join(settings.WORKING_DIR, stockcheckupJsonFile)
data = None
def getStockDetail(symbol):
loadData()
stocks = data["stocks"]
if symbol in stocks:
stock = stocks[symbol]
# Common stuffs
common = {}
stock["common"] = common
common["laStockCheckupScoreMax"] = data["laStockCheckupScoreMax"]
common["membersScoreMax"] = data["membersScoreMax"]
common["petalumaStockCheckupScoreMax"] = data["petalumaStockCheckupScoreMax"]
# Fundamentals
fundamentals = {}
stock["fundamentals"] = fundamentals
A = {}
fundamentals["A"] = A
roePlusPtm = stock["petalumaStockCheckup"]["annualROE"]["value"] + stock["petalumaStockCheckup"]["annualPreTaxMargin"]["value"]
scg = stock["petalumaStockCheckup"]["laStockCheckupGrade"]["value"]
A["key"] = "95-95-30-AB"
A["value"] = str(stock["petalumaStockCheckup"]["compRating"]["value"]) + " . " + \
str(stock["petalumaStockCheckup"]["epsRating"]["value"]) + " . " + \
str(roePlusPtm) + " . " + \
str(stock["petalumaStockCheckup"]["laStockCheckupGrade"]["value"])
if stock["petalumaStockCheckup"]["compRating"]["value"] >= 95 and \
stock["petalumaStockCheckup"]["epsRating"]["value"] >= 95 and \
roePlusPtm >= 30 and \
(scg.startswith("A") or scg.startswith("B") ):
A["passFailRating"] = "PASSED"
else:
A["passFailRating"] = "FAILED"
B = {}
fundamentals["B"] = B
B["key"] = "25-25-25-25"
B["value"] = str(stock["laStockCheckup"]["epsPctChgLastQtr"]["value"]) + " . " + \
str(stock["laStockCheckup"]["salesPctChgLastQtr"]["value"]) + " . " + \
str(stock["laStockCheckup"]["epsEstPctChgForCurrentYear"]["value"]) + " . " + \
str(stock["laStockCheckup"]["epsEstPctChgCurrentQtr"]["value"])
if stock["laStockCheckup"]["epsPctChgLastQtr"]["value"] >= 25 and \
stock["laStockCheckup"]["salesPctChgLastQtr"]["value"] >= 25 and \
stock["laStockCheckup"]["epsEstPctChgForCurrentYear"]["value"] >= 25 and \
stock["laStockCheckup"]["epsEstPctChgCurrentQtr"]["value"] >= 25:
B["passFailRating"] = "PASSED"
else:
B["passFailRating"] = "FAILED"
"""
{{stockDetails.laStockCheckup.epsEstPctChgCurrentQtr.passFailRating}}
PASSED.gif" width="11" height="12">
{% else %}
<img src="{{ STATIC_URL }}images/FAILED.gif" width="11" height="12">
{% endif %}
</td>
<tr>
<td class="criteria-description" colspan="2" >
→
{{stockDetails.petalumaStockCheckup.compRating.value}} /
{{stockDetails.petalumaStockCheckup.epsRating.value}} /
{{stockDetails.petalumaStockCheckup.annualROE.value|add:stockDetails.petalumaStockCheckup.annualPreTaxMargin.value}} /
{{stockDetails.laStockCheckupGrade}}
</td>
<td class="criteria-paasfail">
{% if stockDetails.petalumaStockCheckup.compRating.value > 95 and stockDetails.petalumaStockCheckup.epsRating.value > 95 %}
<img src="{{ STATIC_URL }}images/PASSED.gif" width="11" height="12">
{% else %}
<img src="{{ STATIC_URL }}images/FAILED.gif" width="11" height="12">
{% endif %}
</td>
</tr>
<tr>
<td class="criteria-description" colspan="2" >
→
{{stockDetails.laStockCheckup.epsPctChgLastQtr.value}} /
{{stockDetails.petalumaStockCheckup.salesPctChgLastQtr.value}} /
{{stockDetails.laStockCheckup.epsEstPctChgForCurrentYear.value}} /
{{stockDetails.laStockCheckup.epsEstPctChgCurrentQtr.value}}
</td>
<td class="criteria-paasfail">
{% if stockDetails.laStockCheckup.epsPctChgLastQtr.value > 25 and stockDetails.petalumaStockCheckup.salesPctChgLastQtr.value > 25 and stockDetails.laStockCheckup.epsEstPctChgForCurrentYear.value > 25 and stockDetails.laStockCheckup.epsEstPctChgCurrentQtr.value > 25 %}
<img src="{{ STATIC_URL }}images/PASSED.gif" width="11" height="12">
{% else %}
<img src="{{ STATIC_URL }}images/FAILED.gif" width="11" height="12">
{% endif %}
</td>
"""
return stock
else:
return None
def getSymbols():
loadData()
stocks = data["stocks"]
symbols = []
for symbol in stocks:
symbols.append(symbol)
symbols.sort()
return symbols
def loadData():
global data
if data is None:
#print("Not using cache. Reloading data.")
with open(stockcheckupJsonFilePath) as data_file:
data = json.load(data_file)
else:
#print("Using cache")
pass
def invalidateCache():
global data
print("Invalidating cache")
data = None | UTF-8 | Python | false | false | 5,580 | py | 110 | stockcheckup.py | 61 | 0.546774 | 0.534946 | 0 | 128 | 42.601563 | 288 |
gautamdayal/USACO | 5,669,356,832,348 | acd7b3440e7be156bd162057718fd6fcae658687 | 62cd41a215c50f62c5ae43abc91367a7df6198ef | /notlast.py | 6db2c03fdc503eca78d4b9f628e2c57d33855263 | []
| no_license | https://github.com/gautamdayal/USACO | 7f66d4cbfe43011defe4ae07a5c3b725c5207a8c | 3aacb083cb47c7d44dff79a16ceb724aaff44ecc | refs/heads/master | 2020-04-11T15:47:24.211061 | 2019-02-23T16:27:35 | 2019-02-23T16:27:35 | 161,903,050 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # All test cases correct.
inFile = open('notlast.in', 'r')
outFile = open('notlast.out', 'w')
cows = [s.split() for s in inFile.readlines()[1::]]
for l in cows:
l[1] = int(l[1])
totalmilk = {}
for cow in cows:
name = cow[0]
amount = cow[1]
if name not in totalmilk:
totalmilk[name] = 0
totalmilk[name] += amount
amounts = list(totalmilk.values())
sortedamounts = []
while len(amounts) > 0:
sortedamounts.append(min(amounts))
amounts.remove(min(amounts))
second = 0
secondname = ''
minimum = sortedamounts[0]
for n in sortedamounts[1::]:
if n > minimum:
second = n
break
count = 0
for n in sortedamounts[1::]:
if n == second:
count += 1
for cow in totalmilk:
if totalmilk[cow] == second:
secondname = cow
if len(cows) == 1:
outFile.write(cows[0][0])
else:
if count == 1:
outFile.write(secondname)
else:
outFile.write('Tie')
outFile.write('\n')
outFile.close()
| UTF-8 | Python | false | false | 994 | py | 10 | notlast.py | 9 | 0.589537 | 0.572435 | 0 | 52 | 18.115385 | 51 |
nagar-omer/Dafna_ex4_vecation | 15,573,551,417,412 | d582c7168c496ef12c9c028f5d2421f8f5d24f68 | 6c2674ca51055d931f9a22590137eb81c85f9081 | /activator/activator_params.py | 85aaf74db93095f8906a913467655a7541eb5782 | []
| no_license | https://github.com/nagar-omer/Dafna_ex4_vecation | 5c677ed584505dcd7cfcbf93433802ef51196657 | ba094a7b146baa547e581403165a3ce81203bb58 | refs/heads/master | 2023-06-09T18:52:39.725509 | 2019-06-24T16:36:46 | 2019-06-24T16:36:46 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import os
from torch.nn.functional import cross_entropy
class VoiceActivatorParams:
def __init__(self):
self.LOSS = cross_entropy # relu
self.BATCH_SIZE = 64
self.GPU = False
self.EPOCHS = 30
self.VALIDATION_RATE = 200
self.PIN_MEMORY = True
self.NUM_WORKERS = 4
| UTF-8 | Python | false | false | 327 | py | 7 | activator_params.py | 6 | 0.608563 | 0.584098 | 0 | 13 | 24.153846 | 45 |
dhruvshrivastava/Flask-segmentation-application | 14,156,212,220,783 | d8593e1e7f17062367a054802812cc184e1b12ce | 2b402523c32b8a9fa4c8a94dedc06bee51e83977 | /routes/segmentation.py | 9f974d53e82e3ba9a59c514cc350dbbe9e3ca182 | [
"MIT"
]
| permissive | https://github.com/dhruvshrivastava/Flask-segmentation-application | 3204af566dd2e457cf405acb08e579549d24816c | 305baa1c9b407516f826f5ef59b37288e16e6779 | refs/heads/main | 2023-05-09T19:08:05.047832 | 2021-04-27T11:51:53 | 2021-04-27T11:51:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from python.config import app
import simplejson as json
from flask import render_template, request, session
import pandas as pd
from io import StringIO
from controllers.segmentation import segmentation
@app.route('/segmentation', methods=["POST", "GET"])
def dashboard():
if request.method == 'POST':
indexCol = request.form.get('index_col')
mainCol = request.form.get('main_col')
jsonDf = request.form.get('jsonDf')
df = pd.read_json(jsonDf)
results = segmentation(df, mainCol, indexCol)
session['results'] = results
else:
results = session['results']
df = results['df']
histogramCols = results['features']
plots = results['plots']
results = results['results']
return render_template('dashboard.html', results=results, df=df, plots=plots, histogramCols=histogramCols)
| UTF-8 | Python | false | false | 855 | py | 11 | segmentation.py | 6 | 0.683041 | 0.683041 | 0 | 23 | 36.173913 | 110 |
suningwz/odoo-bebepolis | 18,485,539,250,947 | b1bc913747a2541568bcdd54669dfe7a10c8075f | 4d01b758fb3e491d1fba9ead224742189818fb82 | /prestashop_connector_gt/models/sale_shop.py | cf18bf203c69d12b55143a027fa43f20e94204d7 | []
| no_license | https://github.com/suningwz/odoo-bebepolis | 3945e5835203c1503db71a8859ba8a8bb5835943 | 18b5812360f5ceb42d9d85eae4a5aa2912ace724 | refs/heads/main | 2023-04-27T17:50:39.936503 | 2021-05-04T18:42:26 | 2021-05-04T18:42:26 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
#############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from odoo import api, fields, models, _
from datetime import timedelta, datetime, date, time
from odoo.exceptions import UserError, ValidationError
import logging
logger = logging.getLogger('__name__')
from odoo.addons.prestashop_connector_gt.prestapyt.prestapyt import PrestaShopWebService as PrestaShopWebService
from odoo.addons.prestashop_connector_gt.prestapyt.prestapyt import PrestaShopWebServiceDict as PrestaShopWebServiceDict
from odoo.addons.prestashop_connector_gt.prestapyt.prestapyt import PrestaShopWebServiceImage as PrestaShopWebServiceImage
logger = logging.getLogger('stock')
class SaleShop(models.Model):
_inherit = "sale.shop"
code = fields.Char(string='Code')
name = fields.Char('Name')
prestashop_shop = fields.Boolean(string='Prestashop Shop')
prestashop_instance_id = fields.Many2one('prestashop.instance',string='Prestashop Instance',readonly=True)
presta_id = fields.Char(string='shop Id')
### Product Configuration
product_import_condition = fields.Boolean(string="Create New Product if Product not in System while import order",default=True)
route_ids = fields.Many2many('stock.location.route', 'shop_route_rel', 'shop_id', 'route_id', string='Routes')
# Order Information
company_id = fields.Many2one('res.company', string='Company', required=False,
default=lambda s: s.env['res.company']._company_default_get('stock.warehouse'))
prefix = fields.Char(string='Prefix')
suffix = fields.Char(string='Suffix')
shipment_fee_product_id = fields.Many2one('product.product', string="Shipment Fee",domain="[('type', '=', 'service')]")
discount_product_id = fields.Many2one('product.product', string="Discount Fee",domain="[('type', '=', 'service')]")
gift_wrapper_fee_product_id = fields.Many2one('product.product', string="Gift Wrapper Fee",domain="[('type', '=', 'service')]")
sale_journal = fields.Many2one('account.journal')
pricelist_id = fields.Many2one('product.pricelist', 'Pricelist')
partner_id = fields.Many2one('res.partner', string='Customer')
workflow_id = fields.Many2one('import.order.workflow', string="Order Workflow")
# stock Configuration
on_fly_update_stock = fields.Boolean(string="Update on Shop at time of Odoo Inventory Change",default=True)
warehouse_id = fields.Many2one('stock.warehouse', string='Warehouse')
# Schedular Configuration
auto_import_order = fields.Boolean(string="Auto Import Order", default=True)
auto_import_products = fields.Boolean(string="Auto Import Products", default=True)
auto_update_inventory = fields.Boolean(string="Auto Update Inventory", default=True)
auto_update_order_status = fields.Boolean(string="Auto Update Order Status", default=True)
auto_update_product_data = fields.Boolean(string="Auto Update Product data", default=True)
auto_update_price = fields.Boolean(string="Auto Update Price", default=True)
# Import last date
last_prestashop_inventory_import_date = fields.Datetime(string='Last Inventory Import Time')
last_prestashop_product_import_date = fields.Datetime(string='Last Product Import Time')
last_presta_product_attrs_import_date = fields.Datetime(string='Last Product Attributes Import Time')
last_presta_cart_rule_import_date = fields.Datetime(string='Last Cart Rule Import Time')
last_presta_catalog_rule_import_date = fields.Datetime(string='Last Catalog Rule Import Time')
last_prestashop_order_import_date = fields.Datetime(string='Last Order Import Time')
last_prestashop_carrier_import_date = fields.Datetime(string='Last Carrier Import Time')
last_prestashop_msg_import_date = fields.Datetime(string='Last Message Import Time')
last_prestashop_customer_import_date = fields.Datetime(string='Last Customer Import Time')
last_prestashop_category_import_date = fields.Datetime(string='Last Category Import Time')
last_prestashop_customer_address_import_date = fields.Datetime(string='Last Customer Address Import Time')
last_attr_id = fields.Char("Attribute Id")
#Update last date
prestashop_last_update_category_date = fields.Datetime(string='Presta last update category date')
prestashop_last_update_cart_rule_date = fields.Datetime(string='Presta last update cart rule date')
prestashop_last_update_catalog_rule_date = fields.Datetime(string='Presta last update catalog rule date')
prestashop_last_update_product_data_date = fields.Datetime(string='Presta last update product data rule date')
prestashop_last_update_order_status_date = fields.Datetime(string='Presta last update order status date')
#Export last date
prestashop_last_export_product_data_date = fields.Datetime(string= 'Last Product Export Time')
shop_physical_url = fields.Char(string="Physical URL", required=False, )
last_product_attrs_id_import=fields.Integer('Last Product Attributes ID Import',default=0)
last_product_attrs_values_id_import=fields.Integer('Last Product Attributes Values ID Import',default=0)
last_product_category_id_import=fields.Integer('Last Product Category ID Import',default=0)
last_product_id_import=fields.Integer('Last Product ID Import',default=0)
last_order_id_id_import=fields.Integer('Last Order ID Import',default=0)
last_message_id_import=fields.Integer('Last Message ID Import',default=0)
last_catalog_rule_id_import=fields.Integer('Last Catalog Rule ID Import',default=0)
last_cart_rule_id_import=fields.Integer('Last Cart Rule ID Import',default=0)
last_product_inventory_import=fields.Integer('Last Product Inventory ID Import',default=0)
last_delivery_carrier_import=fields.Integer('Last Product Inventory Import',default=0)
last_customer_id_import=fields.Integer('Last Customer ID Import',default=0)
last_supplier_id_import=fields.Integer('Last Supplier ID Import',default=0)
last_manufacturers_id_import=fields.Integer('Last Manufacturers ID Import',default=0)
last_address_id_import=fields.Integer('Last Address ID Import',default=0)
last_country_id_import=fields.Integer('Last Country ID Import',default=0)
last_state_id_import=fields.Integer('Last State ID Import',default=0)
# import_prestashop_products_scheduler
# @api.model
def import_prestashop_products_scheduler(self, cron_mode=True):
search_ids = self.search([('prestashop_shop', '=', True), ('auto_import_products', '=', True)])
if search_ids:
search_ids.sorted(reverse=True)
search_ids.import_products()
return True
# update_prestashop_product_data_scheduler
# @api.model
def update_prestashop_product_data_scheduler(self, cron_mode=True):
search_ids = self.search([('prestashop_shop', '=', True), ('auto_update_product_data', '=', True)])
if search_ids:
search_ids.sorted(reverse=True)
search_ids.update_products()
return True
# update_prestashop_inventory_scheduler
# @api.model
def update_prestashop_inventory_scheduler(self, cron_mode=True):
search_ids = self.search([('prestashop_shop', '=', True), ('auto_update_inventory', '=', True)])
if search_ids:
search_ids.sorted(reverse=True)
search_ids.update_presta_product_inventory()
return True
# update_prestashop_order_status_scheduler
# @api.model
def update_prestashop_order_status_scheduler(self, cron_mode=True):
search_ids = self.search([('prestashop_shop', '=', True), ('auto_update_order_status', '=', True)])
if search_ids:
search_ids.sorted(reverse=True)
search_ids.update_order_status()
return True
def presta_connect(self):
if self.prestashop_instance_id:
presta_instance=self.prestashop_instance_id
else:
context = dict(self._context or {})
active_id = context.get('active_ids')
presta_instance=self.env['prestashop.instance'].browse(active_id)
location=presta_instance.location
webservicekey=presta_instance.webservice_key
# try:
prestashop = PrestaShopWebService(location,webservicekey)
# except e:
#PrestaShopWebServiceError
# print(str(e))
return prestashop
# @api.one
def get_value_data(self, value):
if isinstance(value, dict):
return value.get('value')
else:
return value
# @api.one
def create_attribute(self, attribute, prestashop):
attrs_id=False
try:
prod_att_obj = self.env['product.attribute']
prod_attr_vals_obj = self.env['product.attribute.value']
attribute_value = {
# 'name':attribute.get('name').get('language')[0].get('value'),
'name':attribute.get('name').get('language').get('value'),
# 'public_name':attribute.get('public_name').get('language')[0].get('value'),
'public_name':attribute.get('public_name').get('language').get('value'),
'presta_id': attribute.get('id'),
'display_type': attribute.get('group_type'),
'is_presta': True
}
attrs_id = prod_att_obj.search([('presta_id','=', attribute.get('id')),('is_presta','=',True)],limit=1)
if not attrs_id:
attrs_id = prod_att_obj.create(attribute_value)
else:
attrs_id.write(attribute_value)
self.env.cr.execute("select attr_id from attr_shop_rel where attr_id = %s and shop_id = %s" % (attrs_id.id, self.id))
attr_data = self.env.cr.fetchone()
if attr_data == None:
self.env.cr.execute("insert into attr_shop_rel values(%s,%s)" % (attrs_id.id, self.id))
self.env.cr.commit()
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': "New error",'log_id':log_id})
else:
log_id_obj = self.env['prestashop.log'].create({'all_operations':'import_attributes','error_lines': [(0,0, {'log_description': 'atrs error'})]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
return attrs_id
def _create_attribute_values(self, attributes_vlaue, prestashop):
attrs_value_id=False
try:
prod_att_obj = self.env['product.attribute']
prod_attr_vals_obj = self.env['product.attribute.value']
attribute_id=False
if attributes_vlaue.get('id_attribute_group'):
attribute_id = prod_att_obj.search([('presta_id','=',attributes_vlaue.get('id_attribute_group')),('is_presta','=',True)],limit=1)
if not attribute_id:
attribute_dict = prestashop.get('product_options', attributes_vlaue.get('id_attribute_group'))
attribute_id = self.create_attribute(attribute_dict.get('product_option'),prestashop)
attribute_value = {
# 'name':attributes_vlaue.get('name').get('language')[0].get('value'),
'name':attributes_vlaue.get('name').get('language').get('value'),
'presta_id': attributes_vlaue.get('id'),
'attribute_id': attribute_id.id,
'html_color': attributes_vlaue.get('color'),
'is_presta': True
}
attrs_value_id = prod_attr_vals_obj.search([('presta_id','=', attributes_vlaue.get('id')),('is_presta','=',True)],limit=1)
if not attrs_value_id:
attrs_value_id = prod_attr_vals_obj.create(attribute_value)
else:
attrs_value_id.write(attribute_value)
self.env.cr.execute("select attr_val_id from attr_val_shop_rel where attr_val_id = %s and shop_id = %s" % (attrs_value_id.id, self.id))
attr_vals_data = self.env.cr.fetchone()
if attr_vals_data == None:
self.env.cr.execute("insert into attr_val_shop_rel values(%s,%s)" % (attrs_value_id.id, self.id))
logger.info("Attribute value created ==> %s ==> att_id ==> %d" % (attribute_value['name'], attribute_value['attribute_id']))
self.env.cr.commit()
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': "New error",'log_id':log_id})
else:
log_id_obj = self.env['prestashop.log'].create({'all_operations':'import_attributes','error_lines': [(0,0, {'log_description': 'atrs error'})]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
return attrs_value_id
# @api.multi
def import_product_attributes(self):
for shop in self:
try:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location, shop.prestashop_instance_id.webservice_key or None)
filters = {'display': 'full', 'filter[id]': '>[%s]' % self.last_product_attrs_id_import, 'limit': 1000}
product_options = prestashop.get('product_options', options=filters)
if product_options.get('product_options') and product_options.get('product_options').get('product_option'):
attributes = product_options.get('product_options').get('product_option')
if isinstance(attributes, list):
attributes = attributes
else:
attributes = [attributes]
for attribute in attributes:
shop.create_attribute(attribute, prestashop)
shop.write({'last_product_attrs_id_import': int(attribute.get('id'))})
logger.info('Product Attribute created ===> %s' % attribute.get('id'))
self.env.cr.commit()
value_filters = {'display': 'full', 'filter[id]': '>[%s]' % self.last_product_attrs_values_id_import, 'limit': 2000}
product_options_vals = prestashop.get('product_option_values', options=value_filters)
if 'product_option_values' in product_options_vals and 'product_option_value' in product_options_vals.get('product_option_values'):
attributes_vlaues = product_options_vals.get('product_option_values').get('product_option_value')
if isinstance(attributes_vlaues, list):
attributes_vlaues = attributes_vlaues
else:
attributes_vlaues = [attributes_vlaues]
for attributes_vlaue in attributes_vlaues:
shop._create_attribute_values(attributes_vlaue, prestashop)
shop.write({'last_product_attrs_values_id_import': int(attributes_vlaue.get('id'))})
self.env.cr.commit()
except Exception as e:
raise ValidationError(_(str(e)))
return True
# @api.one
def action_check_isinstance(self, data):
if isinstance(data, list):
data = data
else:
data = [data]
return data
def create_presta_category(self, category, prestashop):
prod_category_obj = self.env['product.category']
parent_id = categ_id = active = False
try:
if 'id_parent' in category and category.get('id_parent') != '0':
parent_ids = prod_category_obj.search([('presta_id', '=', category.get('id_parent')), ('is_presta', '=', True)], limit=1)
if parent_ids:
parent_id = parent_ids.id
else:
parent_ids = prod_category_obj.search([('presta_id', '=', category.get('id_parent')), ('is_presta', '=', True),('active', '=', False)], limit=1)
if not parent_ids:
parent_id = parent_ids.id
if category.get('active') == '1':
active = True
vals = {'presta_id': category.get('id'),
'parent_id': parent_id,
'is_presta': True,
'active': active,
'shop_ids': [(6,0,[self.id])],
'meta_title': self.action_check_isinstance(category.get('meta_title').get('language'))[0].get('value'),
'meta_description': self.action_check_isinstance(category.get('meta_description').get('language'))[0].get('value'),
'name': self.action_check_isinstance(category.get('name').get('language'))[0].get('value'),
}
categ_id = prod_category_obj.create(vals)
self.env.cr.commit()
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create({'all_operations':'import_categories','error_lines': [(0,0, {'log_description': str(e),})]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
return categ_id
# @api.multi
def import_categories(self):
try:
for shop in self:
prod_category_obj = self.env['product.category']
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
filters = {'display': 'full', 'filter[id]': '>[%s]' % self.last_product_category_id_import, 'limit': 500}
presta_categ_data = prestashop.get('categories', options=filters)
if presta_categ_data.get('categories') and presta_categ_data.get('categories').get('category'):
presta_categ = presta_categ_data.get('categories').get('category')
if isinstance(presta_categ, list):
presta_categ = presta_categ
else:
presta_categ = [presta_categ]
for category in presta_categ:
category_id = prod_category_obj.search([('presta_id', '=', category.get('id')), ('is_presta', '=', True)], limit=1)
if not category_id:
if category.get('id_parent') != '0':
parent_id = prod_category_obj.search([('presta_id', '=', category.get('id_parent')), ('is_presta', '=', True)],limit=1)
if not parent_id:
parent_id = prod_category_obj.search([('presta_id', '=', category.get('id_parent')), ('is_presta', '=', True),('active','=', False)], limit=1)
if not parent_id:
try:
parent_presta_categ_data = prestashop.get('categories', category.get('id_parent'))
shop.create_presta_category(parent_presta_categ_data.get('category'), prestashop)
self.env.cr.commit()
except Exception as e:
logger.info('Parent category no found in prestashop ===> %s' % (e))
shop.create_presta_category(category, prestashop)
shop.write({'last_product_category_id_import':int(category.get('id'))})
except Exception as e:
raise ValidationError(_(str(e)))
return True
def update_lang_presta_load_lang(self, id_lang,prestashop):
lang_obj = self.env['res.lang']
lang_id=False
try:
lang_data = prestashop.get('languages',id_lang)
if lang_data and lang_data.get('language').get('iso_code'):
lang_code = self.get_value_data(lang_data.get('language').get('iso_code'))
if lang_code:
lang_id = lang_obj.search([('iso_code','=',lang_code)])
if not lang_id:
lang_id = lang_obj.search([('iso_code', '=', lang_code),('active','=',False)])
if lang_id:
lang_id.write({'presta_id': id_lang,'active':True})
self.env.cr.commit()
except Exception as e:
logger.info('Res Lang ===> %s' % (e))
return lang_id
# @api.one
def create_customer(self, customer_detail, prestashop):
res_partner_obj = self.env['res.partner']
lang_obj= self.env['res.lang']
cust_id=False
dob = self.get_value_data(customer_detail.get('birthday'))
date_obj = False
try:
if dob and dob != '0000-00-00':
date_obj = datetime.strptime(dob, '%Y-%m-%d')
lang_id = lang_obj.search([('presta_id', '=', customer_detail.get('id_lang'))])
if not lang_id:
lang_id= self.update_lang_presta_load_lang(customer_detail.get('id_lang'),prestashop)
vals = {
'presta_id': customer_detail.get('id'),
'name': customer_detail.get('firstname') + ' ' + customer_detail.get('lastname') or ' ',
'comment':customer_detail.get('note'),
'lang':customer_detail.get('id_lang'),
'customer_rank': 1,
'supplier_rank': 0,
'email': customer_detail.get('email'),
'lang': lang_id.code,
'website': customer_detail.get('website'),
'prestashop_customer': True,
'date_of_birth': date_obj and date_obj.date() or False,
}
if self.get_value_data(customer_detail.get('passwd')):
customer_ids = res_partner_obj.search([('presta_id', '=',customer_detail.get('id')),('prestashop_customer', '=', True)],limit=1)
if not customer_ids:
cust_id = res_partner_obj.create(vals)
logger.info('Created Customer ===> %s'%(cust_id.id))
else:
cust_id = customer_ids
customer_ids.write(vals)
if cust_id:
self.env.cr.execute("select cust_id from customer_shop_rel where cust_id = %s and shop_id = %s" % (cust_id.id, self.id))
cust_data = self.env.cr.fetchone()
if cust_data== None:
self.env.cr.execute("insert into customer_shop_rel values(%s,%s)" % (cust_id.id, self.id))
self.env.cr.commit()
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'import_customers', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
return cust_id
# @api.multi
def import_customers(self):
for shop in self:
res_partner_obj=self.env['res.partner']
try:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
filters = {'display': 'full', 'filter[id]': '>[%s]' % self.last_customer_id_import, 'limit': 2000}
customers_data = prestashop.get('customers',options=filters)
if 'customers' in customers_data and 'customer' in customers_data.get('customers'):
customers = customers_data.get('customers').get('customer')
if isinstance(customers, list):
customers = customers
else:
customers = [customers]
for customer in customers:
customer_id = res_partner_obj.search([('presta_id', '=', customer.get('id')),('prestashop_customer', '=', True)],limit=1)
if not customer_id:
self.create_customer(customer, prestashop)
self.write({'last_customer_id_import': int(customer.get('id'))})
self.env.cr.commit()
self.env.cr.commit()
except Exception as e:
raise ValidationError(_(str(e)))
return True
# @api.one
def create_presta_supplier(self, supplier):
res_partner_obj = self.env['res.partner']
try:
vals = {
'presta_id': supplier.get('id'),
'name': supplier.get('name'),
'supplier_rank': 0,
'customer_rank': 0,
'manufacturer': False,
'prestashop_supplier': True,
}
logger.info('===vals======> %s',vals)
supplier_id = res_partner_obj.search([('presta_id', '=', supplier.get('id')),('prestashop_supplier','=',True)],limit=1)
if not supplier_id:
supplier_id = res_partner_obj.create(vals)
logger.info('Created Supplier ===> %s' % (supplier_id.id))
if supplier_id:
self.env.cr.execute(
"select cust_id from customer_shop_rel where cust_id = %s and shop_id = %s" % (supplier_id.id, self.id))
supplier_data = self.env.cr.fetchone()
if supplier_data == None:
self.env.cr.execute("insert into customer_shop_rel values(%s,%s)" % (supplier_id.id, self.id))
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'import_suppliers', 'error_lines': [(0, 0, {'log_description': str(e)})]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
return supplier_id
# @api.multi
def import_suppliers(self):
for shop in self:
try:
res_partner_obj = self.env['res.partner']
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
filters = {'display': 'full', 'filter[id]': '>[%s]' % self.last_supplier_id_import, 'limit': 2000}
supplier_data = prestashop.get('suppliers', options=filters)
if supplier_data.get('suppliers') and supplier_data.get('suppliers').get('supplier'):
suppliers = supplier_data.get('suppliers').get('supplier')
if isinstance(suppliers, list):
suppliers = suppliers
else:
suppliers = [suppliers]
for supplier in suppliers:
supplier_id = res_partner_obj.search([('presta_id', '=', supplier.get('id')), ('prestashop_supplier', '=', True)], limit=1)
if not supplier_id:
shop.create_presta_supplier(supplier)
except Exception as e:
raise ValidationError(_(str(e)))
return True
# @api.one
def create_presta_manufacturers(self, manufacturer):
res_partner_obj = self.env['res.partner']
try:
vals = {
'presta_id': manufacturer.get('id'),
'name': manufacturer.get('name'),
'manufacturer': True,
'customer_rank': 0,
'supplier_rank': 0,
}
manufact_id = res_partner_obj.search([('presta_id', '=', manufacturer.get('id')),('manufacturer', '=', True)],limit=1)
if not manufact_id:
manufact_id = res_partner_obj.create(vals)
self.env.cr.commit()
logger.info('Created manufacturer successfully ===> %s' % (manufact_id.id))
if manufact_id:
self.env.cr.execute("select cust_id from customer_shop_rel where cust_id = %s and shop_id = %s" % (manufact_id.id, self.id))
manufacturer_data = self.env.cr.fetchone()
if manufacturer_data == None:
self.env.cr.execute("insert into customer_shop_rel values(%s,%s)" % (manufact_id.id, self.id))
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'import_manufacturers', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
return manufact_id
# get manufacturers data from prestashop and create in odoo
def import_manufacturers(self):
for shop in self:
try:
res_partner_obj = self.env['res.partner']
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
filters = {'display': 'full', 'filter[id]': '>[%s]' % self.last_manufacturers_id_import, 'limit': 2000}
manufacturer_data=prestashop.get('manufacturers', options=filters)
if manufacturer_data.get('manufacturers') and manufacturer_data.get('manufacturers').get('manufacturer'):
manufacturers = manufacturer_data.get('manufacturers').get('manufacturer')
if isinstance(manufacturers, list):
manufacturers = manufacturers
else:
manufacturers = [manufacturers]
for manufacturer in manufacturers:
manufacturer_id = res_partner_obj.search([('presta_id', '=', manufacturer.get('id')), ('manufacturer', '=', True)], limit=1)
if not manufacturer_id:
shop.create_presta_manufacturers(manufacturer)
self.write({'last_manufacturers_id_import': int(manufacturer.get('id'))})
self.env.cr.commit()
except Exception as e:
raise ValidationError(_(str(e)))
return True
def get_value(self, data):
lang = self.env['prestashop.language'].search([])
if isinstance(data, list):
data = data
lang_id = self.env['prestashop.language'].search([('code','=','it'), ('presta_instance_id','=', self.prestashop_instance_id.id)])
if not lang_id:
lang = self.env['prestashop.language'].search([])
lang_id = self.env['prestashop.language'].search([('code', '=', lang[0].code), ('presta_instance_id', '=', self.prestashop_instance_id.id)])[0]
else:
data = [data]
lang_id = self.env['prestashop.language'].search([('presta_id','=',data[0].get('attrs').get('id')), ('presta_instance_id','=', self.prestashop_instance_id.id)])[0]
val = [i for i in data if int(i.get('attrs').get('id')) == int(lang_id.presta_id)]
return val[0]
def import_country_state(self):
browse_country_obj = self.env['res.country']
browse_state_obj = self.env['res.country.state']
for shop in self:
try:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
filters = {'display': 'full', 'filter[id]': '>[%s]' % self.last_country_id_import, 'limit': 1000}
state_filters = {'display': 'full', 'filter[id]': '>[%s]' % self.last_state_id_import, 'limit': 1000}
prestashop_country_data = prestashop.get('countries', options=filters)
# import country
if 'country' in prestashop_country_data.get('countries'):
country_list = prestashop_country_data.get('countries').get('country')
if isinstance(country_list, list):
country_list = country_list
else:
country_list = [country_list]
for country in country_list:
country_vals={'presta_id': country.get('id'),'is_prestashop': True}
country_id=browse_country_obj.search([('code','=',country.get('iso_code'))],limit=1)
if not country_id:
# Asi estaba
# country_vals.update({'name': country.get('name').get('language')[0].get('value'), 'code': country.get('iso_code')})
# Elimino [0] para tomar el valor según como viene de prestashop
country_vals.update({'name': country.get('name').get('language').get('value'), 'code': country.get('iso_code')})
browse_country_obj.create(country_vals)
else:
country_id.write(country_vals)
shop.write({'last_country_id_import':int(country.get('id'))})
self.env.cr.commit()
prestashop_state_data = prestashop.get('states', options=state_filters)
if 'state' in prestashop_state_data.get('states'):
state_list = prestashop_state_data.get('states').get('state')
if isinstance(state_list, list):
state_list = state_list
else:
state_list = [state_list]
for state in state_list:
state_vals={'presta_id': state.get('id'),'is_prestashop': True}
country_id = browse_country_obj.search([('presta_id', '=', state.get('id_country')),('is_prestashop','=',True)], limit=1)
state_id=browse_state_obj.search([('name','=',state.get('name'))],limit=1)
if state_id:
state_id.write(state_vals)
# if not state_id:
# state_vals.update({'name': state.get('name'), 'country_id': country_id.id,'code':state.get('iso_code')})
# browse_state_obj.create(state_vals)
# else:
# state_id.write(state_vals)
shop.write({'last_state_id_import':int(state.get('id'))})
self.env.cr.commit()
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'import_country_state', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': False})
self.env.context = new_context
def update_country_state_prestashop_id(self, id_country,id_state, prestashop):
browse_country_obj = self.env['res.country']
browse_state_obj = self.env['res.country.state']
try:
if isinstance(id_country, str):
country_id = browse_country_obj.search([('presta_id', '=', id_country)], limit=1)
if not country_id and id_state == False:
country_data = prestashop.get('countries', id_country)
country_name = self.get_value_data(self.get_value(country_data.get('country').get('name').get('language')))
country_code = self.get_value_data(country_data.get('country').get('iso_code'))
country_id = browse_country_obj.create({'name': country_name, 'code': country_code,'presta_id': id_country,'is_prestashop': True})
self.env.cr.commit()
return country_id
if id_state:
prestashop_state_data = prestashop.get('states', id_state)
state_dict = prestashop_state_data.get('state')
state_vals = {'presta_id': state_dict.get('id'), 'is_prestashop': True}
state_id = browse_state_obj.search([('name', '=', state_dict.get('name'))], limit=1)
if not state_id:
state_vals.update({'name': state_dict.get('name'), 'country_id': id_country.id, 'code': state_dict.get('iso_code')})
browse_state_obj.create(state_vals)
else:
state_id.write(state_vals)
return state_id
except Exception as e:
print('eee',str(e))
def create_address(self,address_dict,prestashop):
try:
address_id = False
res_partner_obj = self.env['res.partner']
id_state = state_id = False
country_id = self.update_country_state_prestashop_id(address_dict.get('id_country'), id_state, prestashop)
if address_dict.get('id_state') != '0':
state_id = self.update_country_state_prestashop_id(country_id, address_dict.get('id_state'), prestashop)
if state_id:
state_id = state_id.id
addr_vals = {
'name': address_dict.get('firstname') + ' ' + address_dict.get('lastname'),
'street': address_dict.get('address1'),
'street2': address_dict.get('address2'),
'city': address_dict.get('city'),
'zip': address_dict.get('postcode'),
'phone': address_dict.get('phone'),
'mobile': address_dict.get('phone_mobile'),
'address_id': address_dict.get('id'),
'prestashop_address': True,
'country_id': country_id.id,
'state_id': state_id,
}
parent_id = False
if address_dict.get('id_customer') != '0':
parent_id = res_partner_obj.search([('presta_id', '=', address_dict.get('id_customer')), ('prestashop_customer', '=', True)], limit=1)
if not parent_id:
try:
cust_data = prestashop.get('customers', address_dict.get('id_customer'))
parent_id = self.create_customer(cust_data.get('customer'), prestashop)
except Exception as e:
logger.info('Error/Warning '+ str(e))
elif address_dict.get('id_supplier') != '0':
parent_id = res_partner_obj.search([('presta_id', '=', address_dict.get('id_supplier')), ('prestashop_supplier', '=', True)], limit=1)
if not parent_id:
try:
supplier_detail = prestashop.get('suppliers', address_dict.get('id_supplier'))
parent_id = self.create_presta_supplier(supplier_detail.get('supplier'))
except Exception as e:
logger.info('Error/Warning '+ str(e))
elif address_dict.get('id_manufacturer') != '0':
parent_id = res_partner_obj.search([('presta_id', '=', address_dict.get('id_manufacturer')), ('manufacturer', '=', True)], limit=1)
if not parent_id:
try:
manufacturer_detail = prestashop.get('manufacturers', address_dict.get('id_manufacturer'))
parent_id = self.create_presta_manufacturers(manufacturer_detail.get('manufacturer'))
except Exception as e:
logger.info('Error/Warning '+ str(e))
if parent_id:
parent_id = parent_id.id
addr_vals.update({'parent_id': parent_id})
address_id = res_partner_obj.search([('address_id', '=', address_dict.get('id')), ('prestashop_address', '=', True)])
if address_id:
address_id.write(addr_vals)
else:
address_id = res_partner_obj.create(addr_vals)
self.env.cr.commit()
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'import_addresses', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
return address_id
def import_addresses(self):
res_partner_obj = self.env['res.partner']
for shop in self:
try:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
filters = {'display': 'full', 'filter[id]': '>[%s]' % self.last_address_id_import, 'limit': 500}
prestashop_address_data = prestashop.get('addresses',options=filters)
if 'address' in prestashop_address_data.get('addresses'):
address_list = prestashop_address_data.get('addresses').get('address')
if isinstance(address_list, list):
address_list = address_list
else:
address_list = [address_list]
for address_dict in address_list:
address_id = res_partner_obj.search([('address_id','=',address_dict.get('id')) , ('prestashop_address', '=', True )])
if not address_id:
shop.create_address(address_dict, prestashop)
shop.write({'last_address_id_import': int(address_dict.get('id'))})
self.env.cr.commit()
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'import_addresses', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
return True
# @api.one
def create_presta_product(self, product_dict, prestashop):
prod_temp_obj = self.env['product.template']
prod_prod_obj = self.env['product.product']
att_val_obj = self.env['product.attribute.value']
product_image_obj = self.env['product.images']
res_partner_obj = self.env['res.partner']
product_categ_obj = self.env['product.category']
try:
manufacturers_id = supplier_id = False
prd_tmp_vals = {
# 'name': product_dict.get('name').get('language')[0].get('value'),
'name': product_dict.get('name').get('language').get('value'),
'type': 'product',
'list_price': product_dict.get('price'),
'default_code': product_dict.get('reference'),
'prestashop_product': True,
'wholesale_price': product_dict.get('wholesale_price'),
'product_onsale': product_dict.get('on_sale'),
# 'product_instock': self.get_value(product_dict.get('available_now').get('language')),
'product_lngth': product_dict.get('depth'),
'product_width': product_dict.get('width'),
'product_wght': product_dict.get('weight'),
'product_hght': product_dict.get('height'),
'presta_id': product_dict.get('id'),
}
if product_dict.get('id_category_default'):
domain_categ = [('presta_id', '=', product_dict.get('id_category_default')), ('is_presta', '=', True),('active', '=', True)]
cate_id = self.search_record_in_odoo(product_categ_obj, domain_categ)
if cate_id:
prd_tmp_vals.update({'categ_id': cate_id.id})
if product_dict.get('ean13') not in ['0','']:
# Aqui barcode
prd_tmp_vals.update({'barcode': product_dict.get('ean13')})
# get manufacturer id if not in odoo create
if product_dict.get('id_manufacturer') != '0':
manufacturers_id = res_partner_obj.search([('presta_id', '=', product_dict.get('id_manufacturer')), ('manufacturer', '=', True)],limit=1)
if not manufacturers_id:
try:
manufacturer_detail = prestashop.get('manufacturers',product_dict.get('id_manufacturer'))
manufact_id = self.create_presta_manufacturers(manufacturer_detail.get('manufacturer'))
if manufact_id:
manufacturers_id = manufact_id.id
except Exception as e:
manufacturers_id = False
else:
manufacturers_id=manufacturers_id.id
prd_tmp_vals.update({'manufacturer_id':manufacturers_id})
# get supplier id if not in odoo create
if product_dict.get('id_supplier') != '0':
supplier_id = res_partner_obj.search([('presta_id', '=', product_dict.get('id_supplier')), ('prestashop_supplier', '=', True)],limit=1)
if supplier_id:
supplier_id = supplier_id.id
else:
try:
supplier_detail = prestashop.get('suppliers', product_dict.get('id_supplier'))
supply_id = self.create_presta_supplier(supplier_detail.get('supplier'))
if supply_id:
supplier_id = supply_id.id
except Exception as e:
supplier_id = False
if supplier_id:
prd_tmp_vals.update({'supplier_id': supplier_id})
prd_tmp_vals.update({'seller_ids': [(0, 0, {'name': supplier_id})]})
if product_dict.get('associations'):
attribute_line_ids, atttibute_lines_dict = [], {}
if product_dict.get('associations').get('product_option_values'):
if product_dict.get('associations').get('product_option_values').get('product_option_value'):
data = product_dict.get('associations').get('product_option_values').get('product_option_value')
else:
data = product_dict.get('associations').get('product_option_values')
if data:
if isinstance(data, dict):
data = [data]
for att_val in data:
if att_val.get('value') in ('', '0'):
continue
value_id = att_val_obj.search([('presta_id', '=', self.get_value_data(att_val.get('id')))],limit=1)
if not value_id:
try:
values_data = prestashop.get('product_option_values', self.get_value_data(att_val.get('id')))
self._create_attribute_values(values_data.get('product_option_value'), prestashop)
self.env.cr.commit()
except Exception as e:
value_id = False
value_id = att_val_obj.search([('presta_id', '=', self.get_value_data(att_val.get('id')))], limit=1)
if value_id:
if value_id.attribute_id.id in atttibute_lines_dict:
if value_id.id not in atttibute_lines_dict.get(value_id.attribute_id.id):
atttibute_lines_dict.get(value_id.attribute_id.id).append(value_id.id)
else:
atttibute_lines_dict.update({value_id.attribute_id.id: [value_id.id]})
for i in atttibute_lines_dict.keys():
attribute_line_ids.append((0, 0, {'attribute_id': i, 'value_ids': [(6, 0, atttibute_lines_dict.get(i))]}))
prd_tmp_vals.update({'attribute_line_ids': attribute_line_ids})
prod_id = prod_temp_obj.search([('presta_id', '=', self.get_value_data(product_dict.get('id'))),('prestashop_product','=',True)],limit=1)
if 'barcode' in prd_tmp_vals and prd_tmp_vals['barcode']:
if not prod_id:
check_barcode = prod_temp_obj.search([('barcode', '=', prd_tmp_vals['barcode'])], limit=1)
else:
check_barcode = prod_temp_obj.search(
[('barcode', '=', prd_tmp_vals['barcode']), ('id', '!=', prod_id.id)], limit=1)
if check_barcode and check_barcode.id != prod_id.id:
while check_barcode:
prd_tmp_vals.update(
{'barcode': prd_tmp_vals['barcode'] + prd_tmp_vals['presta_id']})
check_barcode = prod_temp_obj.search(
[('barcode', '=', prd_tmp_vals['barcode'] + prd_tmp_vals['presta_id'])], limit=1)
if not prod_id:
prod_id = prod_temp_obj.create(prd_tmp_vals)
logger.info('Product created %s' % prod_id.name)
else:
prod_id.write(prd_tmp_vals)
logger.info('Product updated %s' % prod_id.name)
self.env.cr.commit()
if prod_id:
# Image create/write
img_ids = product_dict.get('associations').get('images').get('image', False)
if img_ids:
if isinstance(img_ids, list):
img_ids = img_ids
else:
img_ids = [img_ids]
for image in img_ids:
loc = (self.prestashop_instance_id.location).split('//')
url = "http://" + self.prestashop_instance_id.webservice_key + "@" + loc[1] + '/api/images/products/' + product_dict.get('id') + '/' + image.get('id')
client = PrestaShopWebServiceImage(self.prestashop_instance_id.location, self.prestashop_instance_id.webservice_key)
res = client.get_image(url)
if res.get('image_content'):
img_test = res.get('image_content').decode('utf-8')
extention = res.get('type')
if img_test:
product_img_id=product_image_obj.search([('prest_img_id','=',int(image.get('id'))),('product_t_id','=',prod_id.id)])
if not product_img_id:
is_default_img = False
if product_dict.get('id_default_image').get('value') is not None:
is_default_img=True
prod_id.write({'image_1920':img_test})
img_vals = ({'is_default_img':is_default_img,'extention':extention,'image_url': url, 'image': img_test, 'prest_img_id': int(image.get('id')),'name':' ','product_t_id': prod_id.id})
_img_created = product_image_obj.create(img_vals)
logger.info('Product Image created %s' % _img_created.id)
# # write attributes
if prd_tmp_vals.get('attribute_line_ids'):
for each in prd_tmp_vals.get('attribute_line_ids'):
attribute_ids = self.env['product.template.attribute.line'].search(
[('product_tmpl_id', '=', prod_id.id), ('attribute_id', '=', each[2].get('attribute_id'))])
if attribute_ids:
for val_at in each[2].get('value_ids')[0][2]:
if val_at not in attribute_ids[0].value_ids.ids:
attribute_ids[0].write({'value_ids': [(6, 0, [val_at])]})
else:
self.env['product.template.attribute.line'].create({'attribute_id': each[2].get('attribute_id'), 'product_tmpl_id': prod_id.id, 'value_ids': each[2].get('value_ids')})
if prd_tmp_vals.get('attribute_line_ids'):
prd_tmp_vals.pop('attribute_line_ids')
if 'message_follower_ids' in prd_tmp_vals:
prd_tmp_vals.pop('message_follower_ids')
prod_id.write(prd_tmp_vals)
logger.info('Product comb updated %s' % prod_id.name)
self.env.cr.execute("select product_id from product_templ_shop_rel where product_id = %s and shop_id = %s" % (prod_id.id, self.id))
prod_data = self.env.cr.fetchone()
if prod_data == None:
self.env.cr.execute("insert into product_templ_shop_rel values(%s,%s)" % (prod_id.id, self.id))
logger.info('Producrt Created ===> %s', prod_id.id)
self.env.cr.execute("select product_id from product_templ_shop_rel where product_id = %s and shop_id = %s" % (prod_id.id, self.id))
prod_data = self.env.cr.fetchone()
if prod_data == None:
q1 = "insert into product_templ_shop_rel values(%s,%s)" % (prod_id.id, self.id)
self.env.cr.execute(q1)
if product_dict.get('associations').get('combinations').get('combination', False):
comb_l = product_dict.get('associations').get('combinations').get('combination', False)
c_val = {}
if comb_l:
if isinstance(comb_l, list):
comb_l = comb_l
else:
comb_l = [comb_l]
for comb in comb_l:
try:
combination_dict = prestashop.get('combinations', self.get_value_data(comb.get('id')))
value_list = []
value_comb_ids = combination_dict.get('combination').get('associations').get('product_option_values').get('product_option_value')
if value_comb_ids:
if isinstance(value_comb_ids, list):
value_comb_ids = value_comb_ids
else:
value_comb_ids = [value_comb_ids]
# print "value_comb_ids",value_comb_ids
for each in value_comb_ids:
val_id = self.get_value_data(each.get('id'))
value_list.append(val_id)
prest_product_id = self.get_value_data(combination_dict.get('combination').get('id_product'))
product_ids = prod_prod_obj.search([('product_tmpl_id.presta_id', '=',prest_product_id)])
prod_id_var = False
if product_ids:
for product_data in product_ids:
prod_val_ids = product_data.product_template_attribute_value_ids.product_attribute_value_id
k = []
for red in prod_val_ids:
k.append(red.presta_id)
res = k
rles = sorted(res, key=int)
t = self.get_value_data(value_list)
imag_odoo_data = False
if rles == t:
img_ids = combination_dict.get('combination').get('associations').get('images').get('image', False)
if img_ids:
if isinstance(img_ids, list):
img_ids = img_ids
else:
img_ids = [img_ids]
for image in img_ids:
imag_odoo_data=self.return_image_data(prestashop,product_data.product_tmpl_id.id, prest_product_id, image.get('id'))
product_barcode=False
if self.get_value_data(combination_dict.get('combination').get('ean13')) not in ['','0']:
# Aqui Barcode
product_barcode = self.get_value_data(combination_dict.get('combination').get('ean13'))
# Add estas lineas para asegurarnos que el barcode sea único del lado de Odoo
if product_barcode:
check_barcode = prod_prod_obj.search(
[('barcode', '=', product_barcode)], limit=1)
if check_barcode:
while check_barcode:
product_barcode += combination_dict.get('combination').get('id')
check_barcode = prod_prod_obj.search(
[('barcode', '=', product_barcode)], limit=1)
c_val.update({
'default_code':self.get_value_data(combination_dict.get('combination').get('reference')),
'barcode': product_barcode,
'combination_id':self.get_value_data(combination_dict.get('combination').get('id')),
})
if imag_odoo_data:
c_val.update({
'image_1920':imag_odoo_data.image
})
product_data.product_template_attribute_value_ids.write({'price_extra':self.get_value_data(combination_dict.get('combination').get('price'))})
product_data.write(c_val)
logger.info('Product comb updated %s' % product_data.name)
except Exception as e:
continue
self.env.cr.commit()
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'import_products', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
return True
def return_image_data(self,prestashop,product_id,product_presta_id,img_id):
product_image_obj = self.env['product.images']
product_img_id = product_image_obj.search([('prest_img_id', '=', int(img_id)), ('product_t_id', '=', product_id)])
if not product_img_id:
try:
loc = (self.prestashop_instance_id.location).split('//')
url = "http://" + self.prestashop_instance_id.webservice_key + "@" + loc[1] + '/api/images/products/' + product_presta_id + '/' + img_id
client = PrestaShopWebServiceImage(self.prestashop_instance_id.location,self.prestashop_instance_id.webservice_key)
res = client.get_image(url)
if res.get('image_content'):
img_test = res.get('image_content').decode('utf-8')
extention = res.get('type')
if img_test:
product_img_id = product_image_obj.search([('prest_img_id', '=', int(img_id)), ('product_t_id', '=', product_id)])
if not product_img_id:
img_vals = ({'is_default_img': False,
'extention': extention, 'image_url': url,
'image': img_test,
'prest_img_id': int(img_id),
'name': 'test',
'product_t_id': product_id})
product_image_obj.create(img_vals)
except Exception as e:
product_img_id =False
return product_img_id
def search_record_in_odoo(self,brows_obj, domain):
record_id = brows_obj.search(domain)
if not record_id:
domain.append(('active','=',False))
record_id = brows_obj.search(domain)
return record_id
# @api.multi
def import_products(self):
product_brows = self.env['product.template']
for shop in self:
try:
product_categ_obj = self.env['product.category']
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
filters = {'display': 'full', 'filter[id]': '>[%s]' % self.last_product_id_import, 'limit': 1000}
prestashop_product_data = prestashop.get('products', options=filters)
if prestashop_product_data.get('products') and prestashop_product_data.get('products').get('product'):
prestashop_product_list = prestashop_product_data.get('products').get('product')
prestashop_product_list = self.action_check_isinstance(prestashop_product_list)
for product_dict in prestashop_product_list:
if product_dict.get('id_category_default'):
domain_categ = [('presta_id', '=', product_dict.get('id_category_default')),('is_presta', '=', True)]
cate_id = self.search_record_in_odoo(product_categ_obj, domain_categ)
if not cate_id:
try:
parent_presta_categ_data = prestashop.get('categories', product_dict.get('id_category_default'))
shop.create_presta_category(parent_presta_categ_data.get('category'), prestashop)
self.env.cr.commit()
except Exception as e:
logger.info('Parent category ===> %s' % (e))
shop.create_presta_product(product_dict, prestashop)
shop.write({'last_product_id_import': product_dict.get('id')})
self.env.cr.commit()
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'import_products', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
return True
def createInventory(self, stock, lot_stock_id, prestashop):
product_obj = self.env['product.product']
product_temp_obj = self.env['product.template']
quantity = int(stock.get('quantity'))
product_id = False
try:
if stock.get('id_product_attribute') != '0':
product_id = product_obj.search([('combination_id', '=', stock.get('id_product_attribute'))],limit=1)
if not product_id:
product_id = product_obj.search([('product_tmpl_id.presta_id', '=', stock.get('id_product'))], limit=1)
if product_id:
self.env.cr.execute("select product_prod_id from product_prod_shop_rel where product_prod_id = %s and shop_id = %s" % (product_id.id, self.id))
prod_data = self.env.cr.fetchone()
if prod_data is None:
self.env.cr.execute("insert into product_prod_shop_rel values(%s,%s)" % (product_id.id, self.id))
id=self.env['stock.quant'].with_context(inventory_mode=True).create({
'product_id': product_id.id,
'location_id': lot_stock_id,
'lot_id': False,
'package_id': False,
'owner_id': False,
'presta_id': stock.get('id'),
'is_presta': True,
'inventory_quantity': quantity,
})
if id:
logger.info("Inventario importado")
return True
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'import_inventory', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
# @api.multi
def import_product_inventory(self):
for shop in self:
try:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
filters = {'display': 'full', 'filter[id]': '>[%s]' % self.last_product_inventory_import, 'limit': 100}
prestashop_stock_data = prestashop.get('stock_availables', options=filters)
if prestashop_stock_data.get('stock_availables') and 'stock_available' in prestashop_stock_data.get('stock_availables'):
stocks = prestashop_stock_data.get('stock_availables').get('stock_available')
if isinstance(stocks, list):
stocks = stocks
else:
stocks = [stocks]
for stock in stocks:
stock_id = self.env['stock.quant'].search([('presta_id','=',stock.get('id')),('is_presta','=',True)])
if not stock_id:
shop.createInventory(stock,shop.warehouse_id.lot_stock_id.id, prestashop)
shop.write({'last_product_inventory_import': stock.get('id')})
self.env.cr.commit()
except Exception as e:
raise ValidationError(_(str(e)))
return True
# @api.one
def create_carrier(self, carrier_dict):
carrier_obj = self.env['delivery.carrier']
product_obj = self.env['product.product']
car_id= False
try:
product_id = product_obj.search([('name', '=',carrier_dict.get('name'))],limit=1)
if not product_id:
product_id = product_obj.create({'name': carrier_dict.get('name')})
carr_vals = {
'name': carrier_dict.get('name'),
'fixed_price': int(carrier_dict.get('shipping_external')),
'product_id': product_id.id,
'is_presta': True,
'delay_comment': True,
'presta_id': carrier_dict.get('id'),
'delay_comment': self.get_value_data(self.get_value(carrier_dict.get('delay').get('language')))
}
car_id = carrier_obj.search([('presta_id', '=', carrier_dict.get('id')),('is_presta','=',True)],limit=1)
if not car_id:
car_id = carrier_obj.create(carr_vals)
logger.info('created carrier ===> %s', car_id.id)
self.env.cr.commit()
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'import_carriers', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
return car_id
# @api.multi
def import_carriers(self):
for shop in self:
try:
carrier_obj = self.env['delivery.carrier']
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
filters = {'display': 'full', 'filter[id]': '>[%s]' % self.last_delivery_carrier_import, 'limit': 100}
prestashop_carriers_data = prestashop.get('carriers', options=filters)
if prestashop_carriers_data.get('carriers') and prestashop_carriers_data.get('carriers').get('carrier'):
carriers = prestashop_carriers_data.get('carriers').get('carrier')
if isinstance(carriers, list):
carriers = carriers
else:
carriers = [carriers]
for carrier in carriers:
carrier_id = carrier_obj.search([('presta_id', '=',carrier.get('id')),('is_presta','=', True)],limit=1)
if not carrier_id:
shop.create_carrier(carrier)
shop.write({'last_delivery_carrier_import': carrier.get('id')})
self.env.cr.commit()
return True
except Exception as e:
raise ValidationError(_(str(e)))
#workflow of order
def manageOrderWorkflow(self, saleorderid, order_detail, status):
invoice_obj = self.env['account.move']
return_obj = self.env['stock.return.picking']
return_line_obj = self.env['stock.return.picking.line']
if status.name == 'Canceled':
if saleorderid.state in ['draft']:
saleorderid.action_cancel()
if saleorderid.state in ['progress', 'done', 'manual']:
invoice_ids = saleorderid.invoice_ids
for invoice in invoice_ids:
refund_ids = invoice_obj.search([('invoice_origin', '=', invoice.number)])
if not refund_ids:
if invoice.state == 'paid':
refund_invoice_id = invoice_obj.create(dict(
description='Refund To %s' % (invoice.partner_id.name),
date=datetime.date.today(),
filter_refund='refund'
))
refund_invoice_id.invoice_refund()
saleorderid.write({'is_refund': True})
else:
invoice.action_cancel()
for picking in saleorderid.picking_ids:
if picking.state not in ('done'):
picking.action_cancel()
else:
ctx = self._context.copy()
ctx.update({'active_id': picking.id})
res = return_obj.with_context(ctx).default_get(['product_return_moves', 'move_dest_exists'])
res.update({'invoice_state': '2binvoiced'})
return_id = return_obj.with_context(ctx).create({'invoice_state': 'none'})
for record in res['product_return_moves']:
record.update({'wizard_id': return_id.id})
return_line_obj.with_context(ctx).create(record)
pick_id_return, type = return_id.with_context(ctx)._create_returns()
# pick_id_return.force_assign()
pick_id_return._action_done()
saleorderid.action_cancel()
return True
# Make Order Confirm
#if validate order is activated in workflow
if self.workflow_id.validate_order:
if saleorderid.state in ['draft']:
saleorderid.action_confirm()
# if complete shipment is activated in workflow
if self.workflow_id.complete_shipment:
if saleorderid.state in ['draft','confirmed']:
saleorderid.action_confirm()
for picking_id in saleorderid.picking_ids:
# If still in draft => confirm and assign
if picking_id.state == 'draft':
picking_id.action_confirm()
# picking_id._action_assign()
# if picking_id.state == 'confirmed':
# picking_id._action_assign()
move_ids = picking_id.move_ids_without_package._action_confirm()
move_ids._action_assign()
# if create_invoice is activated in workflow
if self.workflow_id.create_invoice:
if not saleorderid.invoice_ids:
invoice_ids = saleorderid._create_invoices()
invoice_ids.write({'is_prestashop': True})
# if validate_invoice is activated in workflow
if self.workflow_id.validate_invoice:
if saleorderid.state3 == 'draft':
saleorderid.action_confirm()
if not saleorderid.invoice_ids:
invoice_ids = saleorderid._create_invoices()
# invoice_ids = invoice_obj.browse(invoice_ids)
invoice_ids.write({'is_prestashop': True})
for invoice_id in saleorderid.invoice_ids:
invoice_id.write({
'total_discount_tax_excl': self.get_value_data(order_detail.get('total_discounts_tax_excl')),
'total_discount_tax_incl': self.get_value_data(order_detail.get('total_discounts_tax_incl')),
'total_paid_tax_excl': self.get_value_data(order_detail.get('total_paid_tax_excl')),
'total_paid_tax_incl': self.get_value_data(order_detail.get('total_paid_tax_incl')),
'total_products_wt': self.get_value_data(order_detail.get('total_products_wt')),
'total_shipping_tax_excl': self.get_value_data(order_detail.get('total_shipping_tax_excl')),
'total_shipping_tax_incl': self.get_value_data(order_detail.get('total_shipping_tax_incl')),
'total_wrapping_tax_excl': self.get_value_data(order_detail.get('total_wrapping_tax_excl')),
'total_wrapping_tax_incl': self.get_value_data(order_detail.get('total_wrapping_tax_incl')),
'is_prestashop': True,
})
if invoice_id.state == 'draft':
invoice_id.action_post()
# if register_payment is activated in workflow
if self.workflow_id.register_payment:
if saleorderid.state == 'draft':
saleorderid.action_confirm()
if not saleorderid.invoice_ids:
if sum(line.qty_to_invoice for line in saleorderid.order_line) > 0:
invoice_ids = saleorderid._create_invoices()
invoice_ids.write({'is_prestashop': True})
for invoice_id in saleorderid.invoice_ids:
invoice_id.write({
'total_discount_tax_excl': self.get_value_data(order_detail.get('total_discounts_tax_excl')),
'total_discount_tax_incl': self.get_value_data(order_detail.get('total_discounts_tax_incl')),
'total_paid_tax_excl': self.get_value_data(order_detail.get('total_paid_tax_excl')),
'total_paid_tax_incl': self.get_value_data(order_detail.get('total_paid_tax_incl')),
'total_products_wt': self.get_value_data(order_detail.get('total_products_wt')),
'total_shipping_tax_excl': self.get_value_data(order_detail.get('total_shipping_tax_excl')),
'total_shipping_tax_incl': self.get_value_data(order_detail.get('total_shipping_tax_incl')),
'total_wrapping_tax_excl': self.get_value_data(order_detail.get('total_wrapping_tax_excl')),
'total_wrapping_tax_incl': self.get_value_data(order_detail.get('total_wrapping_tax_incl')),
'is_prestashop': True,
})
if invoice_id.state == 'draft':
invoice_id.action_post()
# if invoice_id.state not in ['paid'] and invoice_id.invoice_line_ids:
# payment_register_id = self.env['account.payment.register'].with_context(active_model= 'account.move',active_ids=invoice_id.ids).create({})
# print('order_detail---------',order_detail)
# print('payment_register_id---------',payment_register_id)
# payments = payment_register_id._create_payments()
# print('payments---------',payments)
# invoice_id.pay_and_reconcile(
# self.workflow_id and self.sale_journal or self.env['account.journal'].search(
# [('type', '=', 'bank')], limit=1), invoice_id.amount_total)
return True
# @api.one
def manageOrderLines(self, orderid, order_detail, prestashop):
sale_order_line_obj = self.env['sale.order.line']
prod_attr_val_obj = self.env['product.attribute.value']
prod_templ_obj = self.env['product.template']
product_obj = self.env['product.product']
lines = []
order_rows = order_detail.get('associations').get('order_rows').get('order_row')
if isinstance(order_rows, list):
order_rows = order_rows
else:
order_rows = [order_rows]
for child in order_rows:
line = {
'price_unit': float(self.get_value_data(child.get('unit_price_tax_incl'))),
'name': self.get_value_data(child.get('product_name')),
'product_uom_qty': float(self.get_value_data(child.get('product_quantity'))),
'order_id': orderid.id,
'tax_id': False,
'presta_id': self.get_value_data(child.get('id')),
'presta_line': True,
}
if self.get_value_data(child.get('product_attribute_id')) != '0':
value_list = []
temp_id = False
try:
combination = prestashop.get('combinations', self.get_value_data(child.get('product_attribute_id')))
value_ids = combination.get('combination').get('associations').get('product_option_values').get(
'product_option_value')
if isinstance(value_ids, list):
value_ids = value_ids
else:
value_ids = [value_ids]
for value_id in value_ids:
values = self.get_value_data(value_id.get('id'))
value_ids = prod_attr_val_obj.search([('presta_id', '=', values)])
value_list.append(value_ids.id)
temp_id = prod_templ_obj.search(
[('presta_id', '=', self.get_value_data(combination.get('combination').get('id_product'))),
('prestashop_product', '=', True)], limit=1)
except Exception as e:
logger.info('Error/Warning product combination 000000000000000000000000000===> %s', e)
if not temp_id:
try:
prod_data_tmpl = prestashop.get('products', self.get_value_data(child.get('product_id')))
self.create_presta_product(prod_data_tmpl.get('product'), prestashop)
temp_id = prod_templ_obj.search(
[('presta_id', '=', self.get_value_data(child.get('product_id'))),
('prestashop_product', '=', True)], limit=1)
except Exception as e:
logger.info('Error/Warning product combination 11111111111111111111111111111111111===> %s', e)
if temp_id:
product_ids = product_obj.search(
[('presta_id', '=', self.get_value_data(child.get('product_id')))])
for product_id in product_ids:
if product_id.product_template_attribute_value_ids == prod_attr_val_obj.browse(
value_list) and product_id.product_tmpl_id == temp_id:
product_ids = product_id
if product_ids:
line.update({'product_id': product_ids[0].id, 'product_uom': product_ids[0].uom_id.id})
else:
prod_data = prestashop.get('products', self.get_value_data(
combination.get('combination').get('id_product')))
self.create_presta_product(prod_data.get('product'), prestashop)
product_ids = product_obj.search([('product_tmpl_id.presta_id', '=', self.get_value_data(
combination.get('combination').get('id_product')))])
line.update({'product_id': product_ids[0].id, 'product_uom': product_ids[0].uom_id.id})
else:
product_id = product_obj.search(
[('product_tmpl_id.presta_id', '=', self.get_value_data(child.get('product_id'))),
('prestashop_product', '=', True)], limit=1)
if product_id:
line.update({'product_id': product_id.id, 'product_uom': product_id.uom_id.id})
else:
try:
new_product_data = prestashop.get('products', self.get_value_data(child.get('product_id')))
self.create_presta_product(new_product_data.get('product'), prestashop)
self.env.cr.commit()
new_product_ids = product_obj.search(
[('product_tmpl_id.presta_id', '=', self.get_value_data(child.get('product_id')))])
line.update({'product_id': new_product_ids[0].id, 'product_uom': new_product_ids[0].uom_id.id})
except:
# product_id = self.remove_record_prestashop_checked(prod_templ_obj, 'Removed Product', {'name': 'Removed Product'})
product_id = self.remove_record_prestashop_checked(product_obj, 'Removed Product', {'name': 'Removed Product'})
line.update({'product_id': product_id.id, 'product_uom': product_id.uom_id.id})
if 'product_id' not in line:
# product_id = self.remove_record_prestashop_checked(prod_templ_obj, 'Removed Product', {'name': 'Removed Product'})
product_id = self.remove_record_prestashop_checked(product_obj, 'Removed Product', {'name': 'Removed Product'})
line.update({'product_id': product_id.id, 'product_uom': product_id.uom_id.id})
if child.get('id'):
line_ids = sale_order_line_obj.search(
[('presta_id', '=', self.get_value_data(line.get('id'))), ('order_id', '=', orderid.id)])
if not line_ids:
sale_order_line_obj.create(line)
if order_detail.get('total_discounts'):
discoun = order_detail.get('total_discounts_tax_incl')
discount_line = {
'product_id': self.discount_product_id.id,
'product_uom': self.discount_product_id.uom_id.id,
'price_unit': - (float(discoun)),
'product_uom_qty': 1,
'tax_id': False,
'order_id': orderid.id
}
dline_ids = sale_order_line_obj.search(
[('product_id', '=', self.get_value_data(discount_line.get('product_id'))), ('order_id', '=', orderid.id)])
if not dline_ids:
sale_order_line_obj.create(dline_ids)
else:
dline_ids[0].write({'price_unit': - (float(discoun))})
# Shipment fees and fields
ship = float(self.get_value_data(order_detail.get('total_shipping_tax_excl')))
if ship and ship > 0:
sline = {
'product_id': self.shipment_fee_product_id.id,
'product_uom': self.shipment_fee_product_id.uom_id.id,
'price_unit': ship,
'product_uom_qty': 1,
'order_id': orderid.id,
'tax_id': False,
}
sline_ids = sale_order_line_obj.search(
[('product_id', '=', self.get_value_data(sline.get('product_id'))), ('order_id', '=', orderid.id)])
if not sline_ids:
sale_order_line_obj.create(sline)
else:
sline_ids[0].write(sline)
# wrapping fees and fields
wrapping = float(self.get_value_data(order_detail.get('total_wrapping', 0)))
if wrapping and wrapping > 0:
wline = {
'product_id': self.gift_wrapper_fee_product_id.id,
'product_uom': self.gift_wrapper_fee_product_id.uom_id.id,
'price_unit': wrapping,
'product_uom_qty': 1,
'name': self.get_value_data(order_detail.get('gift_message')),
'order_id': orderid.id,
'tax_id': False,
}
wline_ids = sale_order_line_obj.search(
[('product_id', '=', self.get_value_data(wline.get('product_id'))), ('order_id', '=', orderid.id)])
if not wline_ids:
sale_order_line_obj.create(wline)
else:
wline_ids[0].write(wline)
# @api.one
def remove_record_prestashop_checked(self,brows_object,name,vals):
record_id = brows_object.search([('name', '=', name)], limit=1)
if not record_id:
record_id = brows_object.create(vals)
return record_id
def create_presta_order(self, order_detail, prestashop):
sale_order_obj = self.env['sale.order']
res_partner_obj = self.env['res.partner']
carrier_obj = self.env['delivery.carrier']
product_obj = self.env['product.product']
status_obj = self.env['presta.order.status']
order_vals = {}
try:
id_customer = res_partner_obj.search([('presta_id', '=', order_detail.get('id_customer')),('prestashop_customer','=',True)],limit=1)
if not id_customer:
try:
cust_data = prestashop.get('customers', order_detail.get('id_customer'))
id_customer = self.create_customer(cust_data.get('customer'),prestashop)
except Exception as e:
id_customer = self.remove_record_prestashop_checked(res_partner_obj,'Removed Customer',{'name':'Removed Customer'})
id_address_delivery = res_partner_obj.search([('presta_id', '=', order_detail.get('id_address_delivery')), ('prestashop_address', '=', True)], limit=1)
if not id_address_delivery:
try:
address_data = prestashop.get('addresses', order_detail.get('id_address_delivery'))
id_address_delivery = self.create_address(address_data.get('address'), prestashop)
except Exception as e:
id_address_delivery = self.remove_record_prestashop_checked(res_partner_obj, 'Removed Addresss',{'name': 'Removed Addresss'})
id_address_invoice = res_partner_obj.search([('presta_id', '=', order_detail.get('id_address_invoice')), ('prestashop_address', '=', True)],limit=1)
if not id_address_invoice:
try:
address_inv_data = prestashop.get('addresses', order_detail.get('id_address_invoice'))
id_address_invoice = self.create_address(address_inv_data.get('address'), prestashop)
except Exception as e:
id_address_invoice = self.remove_record_prestashop_checked(res_partner_obj, 'Removed Addresss',{'name': 'Removed Addresss'})
order_vals.update({'partner_id': id_customer.id,'partner_shipping_id':id_address_delivery.id,'partner_invoice_id': id_address_invoice.id})
state_id = status_obj.search([('presta_id', '=', self.get_value_data(order_detail.get('current_state')))],limit=1)
if not state_id:
try:
orders_status_lst = prestashop.get('order_states', self.get_value_data(order_detail.get('current_state')))
state_id = status_obj.create({'name': self.get_value_data(self.get_value(orders_status_lst.get('order_state').get('name').get('language'))),'presta_id': self.get_value_data(order_detail.get('current_state')),})
except Exception as e:
state_id = self.remove_record_prestashop_checked(status_obj, 'Removed Status',{'name': 'Removed Status'})
a = self.get_value_data(order_detail.get('payment'))
p_mode = False
if a[0] == 'Cash on delivery COD':
p_mode = 'cod'
elif a[0] == 'Bank wire':
p_mode = 'bankwire'
elif a[0] == 'Payments by check':
p_mode = 'cheque'
elif a[0] == 'Bank transfer':
p_mode = 'banktran'
order_vals.update({
'reference': self.get_value_data(order_detail.get('reference')),
'presta_id': order_detail.get('id'),
'warehouse_id': self.warehouse_id.id,
'presta_order_ref': self.get_value_data(order_detail.get('reference')),
'pretsa_payment_mode': p_mode,
'pricelist_id': self.pricelist_id.id,
'workflow_order_id': self.workflow_id.id,
# 'name': self.get_value_data(order_detail.get('id')),
'order_status' : state_id.id,
'shop_id': self.id,
'prestashop_order': True,
'date_order': self.get_value_data(order_detail.get('date_add')),
# 'presta_order_date': self.get_value_data(order_detail.get('date_add')),
})
if self.workflow_id.picking_policy:
order_vals.update({'picking_policy' : self.workflow_id.picking_policy})
carr_id=False
if int(self.get_value_data(order_detail.get('id_carrier'))) > 0:
carr_obj_id = carrier_obj.search([('presta_id', '=', order_detail.get('id_carrier')), ('is_presta', '=', True)], limit=1)
if carr_obj_id:
carr_id = carr_obj_id.id
if not carr_obj_id:
try:
carrier_data = prestashop.get('carriers', self.get_value_data(order_detail.get('id_carrier')))
carr_id = self.create_carrier(self.get_value_data(carrier_data.get('carrier')))
except Exception as e:
product_id = self.remove_record_prestashop_checked(product_obj, 'Removed Carrier',{'name': 'Removed Carrier'})
carr_id = self.remove_record_prestashop_checked(carrier_obj, 'Removed Carrier',{'name': 'Removed Carrier','product_id': product_id.id,'is_presta': True}).id
order_vals.update({'carrier_prestashop': carr_id})
sale_order_id = sale_order_obj.search([('presta_id','=', order_detail.get('id')),('prestashop_order','=',True)],limit=1)
if not sale_order_id:
sale_order_id = sale_order_obj.create(order_vals)
logger.info('created orders ===> %s', sale_order_id.id)
if sale_order_id:
self.env.cr.execute("select saleorder_id from saleorder_shop_rel where saleorder_id = %s and shop_id = %s" % (sale_order_id.id, self.id))
so_data = self.env.cr.fetchone()
if so_data == None:
self.env.cr.execute("insert into saleorder_shop_rel values(%s,%s)" % (sale_order_id.id, self.id))
self.manageOrderLines(sale_order_id, order_detail, prestashop)
self.manageOrderWorkflow(sale_order_id, order_detail, state_id)
self.env.cr.commit()
return sale_order_id
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create({'all_operations':'import_orders','error_lines': [(0,0, {'log_description': str(e)})]})
log_id = log_id_obj.id
# self = self.with_context(log_id = log_id.id)
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
return True
# @api.multi
def import_orders(self):
try:
for shop in self:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
if 'last_order_import_date' in self._context:
prestashop_order_data = prestashop.get('orders', options={
'display': 'full',
'filter[date_upd]': "[{},{}]".format(self.env.context.get('last_order_import_date'), str(datetime.now())),
'date': '1',
'sort': '[id_DESC]',
'limit': 100,
})
else:
filters = {'display': 'full', 'filter[id]': '=[%s]' % shop.last_order_id_id_import, 'limit': 100}
prestashop_order_data = prestashop.get('orders', options=filters)
if prestashop_order_data.get('orders') and prestashop_order_data.get('orders').get('order'):
orders = prestashop_order_data.get('orders').get('order')
if isinstance(orders, list):
orders = orders
else:
orders = [orders]
for order in orders:
shop.create_presta_order(order, prestashop)
shop.write({'last_order_id_id_import': order.get('id')})
self.env.cr.commit()
except Exception as e:
raise ValidationError(_(str(e)))
return True
def create_presta_message_threads(self,thread,prestashop):
res_obj = self.env['res.partner']
sale_obj = self.env['sale.order']
customer_threads_obj = self.env['customer.threads']
thread_id = customer_threads_obj.search([('presta_id','=',thread)],limit=1)
if not thread_id:
try:
customer_threads = prestashop.get('customer_threads',thread)
if customer_threads.get('customer_thread'):
customer_thread_dict = customer_threads.get('customer_thread')
threads_vals = {
'presta_id':customer_thread_dict.get('id'),
'id_shop': self.get_value_data(customer_thread_dict.get('id_shop')),
'token': self.get_value_data(customer_thread_dict.get('token')),
'email': self.get_value_data(customer_thread_dict.get('email')),
'status': self.get_value_data(customer_thread_dict.get('status')),
}
if self.get_value_data(customer_thread_dict.get('id_customer')):
customer_id = res_obj.search([('presta_id', '=', self.get_value_data(customer_thread_dict.get('id_customer'))), ('prestashop_customer', '=', True)], limit=1)
if not customer_id:
try:
cust_data = prestashop.get('customers', self.get_value_data(customer_thread_dict.get('id_customer')))
customer_id = self.create_customer(cust_data.get('customer'), prestashop)
except Exception as e:
customer_id = self.remove_record_prestashop_checked(res_obj, 'Removed Customer',
{'name': 'Removed Customer'})
threads_vals.update({'customer_id': customer_id.id, })
order_presta_id = self.get_value_data(customer_thread_dict.get('id_order'))
if order_presta_id:
check_order = True
order = sale_obj.search([('presta_id', '=', order_presta_id), ('prestashop_order', '=', True)],limit=1)
if not order:
try:
order_detail = prestashop.get('orders', self.get_value_data(customer_thread_dict.get('id_order')))
order_data_ids = order_detail.get('order')
order = self.create_presta_order(order_data_ids, prestashop)
except Exception as e:
check_order = False
if check_order:
threads_vals.update({'order_id': order.id})
thread_id = customer_threads_obj.create(threads_vals)
except Exception as e:
thread_id = False
return thread_id
def create_presta_message(self,message_dict,prestashop):
order_msg = self.env['order.message']
order_msg_vals = {
'msg_prest_id': self.get_value_data(message_dict.get('id')),
'message': self.get_value_data(message_dict.get('message')),
}
thread_id = self.create_presta_message_threads(message_dict.get('id_customer_thread'), prestashop)
if thread_id != False:
order_msg_vals.update({'thread_id': thread_id.id,'new_id': thread_id.order_id.id})
order_msg_id = order_msg.search([('thread_id', '=', thread_id.id),('msg_prest_id', '=', order_msg_vals.get('msg_prest_id'))])
if not order_msg_id:
msg_id = order_msg.create(order_msg_vals)
logger.info('created messages ===> %s', msg_id.id)
self.env.cr.commit()
else:
order_msg_id.write(order_msg_vals)
def import_messages(self):
try:
for shop in self:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
filters = {'display': 'full', 'filter[id]': '=[%s]' % shop.last_message_id_import, 'limit': 20}
message = prestashop.get('customer_messages', options=filters)
if message.get('customer_messages') and message.get('customer_messages').get('customer_message'):
messages = message.get('customer_messages').get('customer_message')
if isinstance(messages, list):
messages = messages
else:
messages = [messages]
for message_dict in messages:
shop.create_presta_message(message_dict, prestashop)
shop.write({'last_message_id_import': message_dict.get('id')})
except Exception as e:
raise ValidationError(_(str(e)))
return True
# @api.multi
def import_cart_rules(self):
try:
cart_obj = self.env['cart.rules']
res_partner_obj = self.env['res.partner']
for shop in self:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
filters = {'display': 'full', 'filter[id]': '=[%s]' % shop.last_cart_rule_id_import, 'limit': 500}
cart = prestashop.get('cart_rules', options=filters)
if cart.get('cart_rules') and cart.get('cart_rules').get('cart_rule'):
carts = cart.get('cart_rules').get('cart_rule')
if isinstance(carts, list):
carts = carts
else:
carts = [carts]
for cart_dict in carts:
id_customer = self.env['res.partner'].search([('presta_id', '=', cart_dict.get('id_customer'))])
if not id_customer:
try:
cust_data = prestashop.get('customers', cart_dict.get('id_customer'))
id_customer = shop.create_customer(cust_data.get('customer'), prestashop)
except Exception as e:
id_customer = self.remove_record_prestashop_checked(res_partner_obj, 'Removed Customer',{'name': 'Removed Customer'})
cart_vals = {
'id_customer':id_customer.id or False,
'date_from': self.get_value_data(cart_dict.get('date_from')),
'date_to': self.get_value_data(cart_dict.get('date_to')),
'description': self.get_value_data(cart_dict.get('description')),
'quantity': self.get_value_data(cart_dict.get('quantity')),
'code': self.get_value_data(cart_dict.get('code')),
'partial_use':bool(int( self.get_value_data(cart_dict.get('partial_use')))),
'minimum_amount': self.get_value_data(cart_dict.get('minimum_amount')),
'free_shipping':bool(int( self.get_value_data(cart_dict.get('free_shipping')))),
# 'name' : cart_data.get('cart_rule').get('name').get('language').get('value'),
'name': self.get_value_data(self.get_value( cart_dict.get('name').get('language'))),
'presta_id' : cart_dict.get('id'),
}
carts_id = cart_obj.search([('presta_id', '=', self.get_value_data(cart_dict.get('id')))],limit=1)
if not carts_id:
carts_id = cart_obj.create(cart_vals)
else:
carts_id.write(cart_vals)
self.env.cr.execute("select cart_id from cart_shop_rel where cart_id = %s and shop_id = %s" % (carts_id.id, shop.id))
data = self.env.cr.fetchone()
if not data:
self.env.cr.execute("insert into cart_shop_rel values(%s,%s)" % (carts_id.id, shop.id))
self.env.cr.commit()
shop.write({'last_catalog_rule_id_import': last_cart_rule_id_import.get('id')})
self.env.cr.commit()
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'import_cart_rules', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
return True
def import_catalog_price_rules(self):
try:
catalog_price_obj = self.env['catalog.price.rules']
for shop in self:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
filters = {'display': 'full', 'filter[id]': '=[%s]' % shop.last_catalog_rule_id_import, 'limit': 500}
catalog_rule = prestashop.get('specific_price_rules',options=filters)
if catalog_rule.get('specific_price_rules') and catalog_rule.get('specific_price_rules').get('specific_price_rule'):
catalog_rules = catalog_rule.get('specific_price_rules').get('specific_price_rule')
if isinstance(catalog_rules, list):
catalog_rules = catalog_rules
else:
catalog_rules = [catalog_rules]
for catlog_dict in catalog_rules:
from_date = False
if not self.get_value_data(catlog_dict.get('from')) == '0000-00-00 00:00:00':
from_date = self.get_value_data(catlog_dict.get('from'))
to_date = False
if not self.get_value_data(catlog_dict.get('to')) == '0000-00-00 00:00:00':
to_date = self.get_value_data(catlog_dict.get('to'))
rule_vals = {
'name': self.get_value_data(catlog_dict.get('name')),
'from_quantity': self.get_value_data(catlog_dict.get('from_quantity')),
'price': self.get_value_data(catlog_dict.get('price')),
'reduction': self.get_value_data(catlog_dict.get('reduction')),
'reduction_type': self.get_value_data(catlog_dict.get('reduction_type')),
'from_date': from_date,
'to_date': to_date,
'presta_id':catlog_dict.get('id'),
}
rule_id = catalog_price_obj.search([('presta_id','=', self.get_value_data(catlog_dict.get('id')))],limit=1)
if not rule_id:
rule_id = catalog_price_obj.create(rule_vals)
logger.info('created catalog RULE ===> %s', rule_id.id)
else:
rule_id.write(rule_vals)
self.env.cr.execute("select catalog_id from catalog_shop_rel where catalog_id = %s and shop_id = %s" % (rule_id.id, shop.id))
data = self.env.cr.fetchone()
if not data:
self.env.cr.execute("insert into catalog_shop_rel values(%s,%s)" % (rule_id.id, shop.id))
shop.write({'last_catalog_rule_id_import': catlog_dict.get('id')})
self.env.cr.commit()
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'import_catalog_rules', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
return True
# @api.multi
def update_prestashop_category(self):
categ_obj = self.env['prestashop.category']
for shop in self:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
try:
query = "select categ_id from presta_categ_shop_rel where shop_id = %s"%shop.id
self.env.cr.execute(query)
fetch_categ = self.env.cr.fetchall()
if shop.prestashop_last_update_category_date:
categ_ids = categ_obj.search([('write_date','>=', shop.prestashop_last_update_category_date),('id','in',fetch_categ)])
else:
categ_ids = categ_obj.search([('id','in',fetch_categ)])
for each in categ_ids:
d=each.presta_id.replace('[','')
c=d.replace(']','')
v=c.split()
for i in v:
if i.isdigit():
k=i
cat = prestashop.get('categories',k)
cat.get('category').update({
'id': k,
'name': {'language': {'attrs': {'id': '1'}, 'value': str(each.name)}},
'active': 1,
'id_parent': each.parent_id and str(each.parent_id.presta_id) or 0,
})
cat.get('category').pop('level_depth')
cat.get('category').pop('nb_products_recursive')
result = prestashop.edit('categories', cat)
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create({'all_operations': 'update_categories', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
shop.write({'prestashop_last_update_category_date': datetime.now()})
return True
# @api.multi
def update_cart_rules(self):
cart_obj = self.env['cart.rules']
for shop in self:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
try:
query = "select cart_id from cart_shop_rel where shop_id = %s"%shop.id
self.env.cr.execute(query)
fetch_cart_rules = self.env.cr.fetchall()
if fetch_cart_rules != None:
fetch_cart_rules = [i[0] for i in fetch_cart_rules]
if shop.prestashop_last_update_cart_rule_date:
cart_ids = cart_obj.search([('write_date', '>=', shop.prestashop_last_update_cart_rule_date),('id','in',fetch_cart_rules)])
else:
cart_ids = cart_obj.search([('id','in',fetch_cart_rules)])
for each in cart_ids:
# try:
cart = prestashop.get('cart_rules', each.presta_id)
cart.get('cart_rule').update(
{
'id': each.presta_id and str(each.presta_id),
'code': each.code and str(each.code),
'description': each.description and str(each.description),
'free_shipping': each.free_shipping and str(int(each.free_shipping)),
'id_customer': each.id_customer and each.id_customer.presta_id and str(each.id_customer.presta_id) or '0',
'date_to': str(each.date_to) or '0000-00-00 00:00:00',
'name': {'language': {'attrs': {'id': '1'}, 'value': each.name and str(each.name)}},
'date_from': str(each.date_from) or '0000-00-00 00:00:00',
'partial_use': each.partial_use and str(int(each.partial_use)),
'quantity': str(each.quantity),
})
prestashop.edit('cart_rules', cart)
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create({'all_operations': 'update_cart_rules', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
shop.write({'prestashop_last_update_cart_rule_date': datetime.now()})
return True
# @api.multi
def update_catalog_rules(self):
catalog_price_obj = self.env['catalog.price.rules']
for shop in self:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
try:
query = "select catalog_id from catalog_shop_rel where shop_id = %s"%shop.id
self.env.cr.execute(query)
fetch_catalog_rules = self.env.cr.fetchall()
if fetch_catalog_rules is not None:
fetch_catalog_rules = [i[0] for i in fetch_catalog_rules]
if shop.prestashop_last_update_catalog_rule_date:
catalog_ids = catalog_price_obj.search([('write_date', '>', shop.prestashop_last_update_catalog_rule_date),('id', 'in', fetch_catalog_rules)])
else:
catalog_ids = catalog_price_obj.search([('id', 'in', fetch_catalog_rules)])
for each in catalog_ids:
catalog = prestashop.get('specific_price_rules', each.presta_id)
catalog.get('specific_price_rule').update({
'id': str(each.presta_id),
'reduction_type': str(each.reduction_type),
'name': str(each.name),
'price': str(each.price),
'from_quantity': str(each.from_quantity),
'reduction': str(each.reduction),
'from': str(each.from_date) or '0000-00-00 00:00:00',
'to': str(each.to_date) or '0000-00-00 00:00:00',
'id_shop':1,
'id_country':0,
'id_currency':0,
'id_group':0,
'reduction_tax':0
})
prestashop.edit('specific_price_rules', catalog)
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create({'all_operations': 'update_catalog_rules', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
shop.write({'prestashop_last_update_catalog_rule_date': datetime.now()})
return True
# @api.multi
def update_products(self,variant=False):
#update product details,image and variants
prod_templ_obj = self.env['product.template']
prdct_obj = self.env['product.product']
for shop in self:
try:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
query = "select product_id from product_templ_shop_rel where shop_id = %s"%shop.id
self.env.cr.execute(query)
fetch_products = self.env.cr.fetchall()
if fetch_products is not None:
fetch_products = [i[0] for i in fetch_products]
if shop.prestashop_last_update_product_data_date:
product_data_ids = prod_templ_obj.search([('write_date', '>=',shop.prestashop_last_update_product_data_date),('id', 'in',fetch_products)])
else:
product_data_ids = prod_templ_obj.search([('id', 'in',fetch_products)])
for each in product_data_ids:
product = prestashop.get('products', each.presta_id)
categ = [{'id': each.presta_categ_id.presta_id and str(each.presta_categ_id.presta_id)}]
parent_id = each.presta_categ_id.parent_id
while parent_id:
categ.append({'id': parent_id.presta_id and str(parent_id.presta_id)})
parent_id = parent_id.parent_id
product.get('product').get('associations').update({'categories': {'attrs': {'node_type': 'category'}, 'category': categ},})
product.get('product').update({
'name': {'language': {'attrs': {'id': '1'}, 'value': each.name and str(each.name)}},
'active': '1',
'type': 'simple',
'on_sale':'1',
'state': '1',
'online_only': '1',
'reference': each.default_code and str(each.default_code),
'wholesale_price': each.wholesale_price and str(each.wholesale_price),
'price': each.list_price and str(each.list_price),
'depth': each.product_lngth and str(each.product_lngth),
'width': each.product_width and str(each.product_width),
'weight': each.product_wght and str(each.product_wght),
'height': each.product_hght and str(each.product_hght),
'available_now': ({'language': {'attrs': {'id': '1'}, 'value': each.product_instock and str(int(each.product_instock))}}),
'on_sale' : each.product_onsale and str(int(each.product_onsale)) ,
'id': each.presta_id and str(each.presta_id),
'id_supplier': each.supplier_id and str(each.supplier_id.presta_id) or '0',
'id_manufacturer': each.manufacturer_id and str(each.manufacturer_id.presta_id) or '0',
'id_category_default':each.presta_categ_id and str(each.presta_categ_id.presta_id),
'position_in_category':'',
# 'description': {'language': {'attrs': {'id': '1'}, 'value': each.product_description}}
# 'name': {'language': {'attrs': {'id': '1'}, 'value': each.prd_label}},
# 'product_img_ids':product.get('associations').get('images').get('image') or False,
})
product.get('product').pop('quantity')
combination_list = []
if each.attribute_line_ids:
prod_variant_ids = prdct_obj.search([('product_tmpl_id', '=', each.id)])
for variant in prod_variant_ids:
if variant.combination_id:
prod_variants_comb = prestashop.get('combinations', variant.combination_id)
option_values = []
for op in variant.product_template_attribute_value_ids:
option_values.append({'id': op.presta_id and str(op.presta_id)})
prod_variants_comb.get('combination').get('associations').get('product_option_values').update({
'product_option_value' : option_values[0]
})
#
prod_variants_comb.get('combination').update({
'is_virtual':'1',
'id_product': variant.product_tmpl_id and str(variant.product_tmpl_id.presta_id),
'reference': variant.default_code and str(variant.default_code),
'id': variant.combination_id and str(variant.combination_id),
'minimal_quantity': '1',
'price': variant.prdct_unit_price and str(variant.prdct_unit_price),
})
response_comb = prestashop.edit('combinations', prod_variants_comb)
combination_list.append({'id': variant.combination_id})
if combination_list:
product.get('product').get('associations').get('combinations').update({
'combination' : combination_list
})
product.get('product').pop('manufacturer_name')
response = prestashop.edit('products', product)
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'update_product_data', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
shop.write({'prestashop_last_update_product_data_date': datetime.now()})
return True
# @api.multi
# def update_product_price(self):
# print ("======update_product_price======")
# # update product price
# prod_templ_obj = self.env['product.template']
# prdct_obj = self.env['product.product']
# stock_quant_obj = self.env['stock.quant']
#
# for shop in self:
# prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,
# shop.prestashop_instance_id.webservice_key,physical_url = shop.shop_physical_url or None)
# # try:
# query = "select product_id from product_templ_shop_rel where shop_id = %s"%shop.id
# self.env.cr.execute(query)
# fetch_products_price = self.env.cr.fetchall()
# if fetch_products_price != None:
# fetch_products_price = [i[0] for i in fetch_products_price]
# # if shop.prestashop_last_update_product_data_date:
# # product_data_ids = prod_templ_obj.search(
# # [('write_date', '>=', shop.prestashop_last_update_product_data_date),('id', 'in',fetch_products_price)])
# # print ("=====product_data_ids111111======",product_data_ids)
# # else:
# product_data_ids = prod_templ_obj.search([('id', 'in',fetch_products_price)])
# for each in product_data_ids:
# # print ("EACHHHHHHHHH",each)
# # try:
# product = prestashop.get('products', each.presta_id)
# # print ("PRODUCTTTTTTTTTT",product)
# categ = [{'id': each.presta_categ_id.presta_id}]
# parent_id = each.presta_categ_id.parent_id
# while parent_id:
# categ.append({'id': parent_id.presta_id})
# parent_id = parent_id.parent_id
# product.get('product').get('associations').update({
# 'categories': {'attrs': {'node_type': 'category'}, 'category': categ},
# })
#
# product.get('product').update({
# # 'price': str(each.prdct_unit_price),
# 'price': str(each.with_context(pricelist=self.pricelist_id.id).price),
# # 'quantity':0,
# 'wholesale_price': each.wholesale_price and str(each.wholesale_price),
# 'id': each.presta_id and str(each.presta_id),
# 'position_in_category':'',
# 'id_category_default':each.presta_categ_id and str(each.presta_categ_id) and each.presta_categ_id.presta_id,
# # 'available_now': (
# # {'language': {'attrs': {'id': '1'}, 'value': str(int(each.product_instock))}}),
# # 'on_sale': str(int(each.product_onsale)),
# # 'id': each.presta_id,
# })
#
# if each.attribute_line_ids:
# # try:
# prod_variant_ids = prdct_obj.search([('product_tmpl_id', '=', each.id)])
# # if not prod_variant_ids:
#
# for variant in prod_variant_ids:
# if variant.combination_id:
# print("=======prod_variant_ids========>",prod_variant_ids)
# print("=======prestashop.get('combinations'========>",prestashop.get('combinations'))
# prod_variants_comb = prestashop.get('combinations', variant.combination_id)
# print("=======prod_variants_comb========>",prod_variants_comb)
# # prod_variants_comb_price = prestashop.get('combinations', variant.combination_price)
# # print "prod_variants_comb_price===>",prod_variants_comb_price
# # option_values = []
# # for op in variant.attribute_value_ids:
# # option_values.append({'id': op.presta_id})
# # prod_variants_comb.get('combination').get('associations').get('product_option_values').update({
# # 'product_option_value' : option_values
# # })
# prod_variants_comb.get('combination').update({
# # 'id_product': variant.product_tmpl_id.presta_id,
# # 'reference': variant.default_code,
# 'minimal_quantity': '1',
# # 'position_in_category':'',
# # 'price': str(variant.prdct_unit_price),
# # 'id': variant.combination_id and str(variant.combination_id),
# # 'id_product': variant.product_tmpl_id and str(variant.product_tmpl_id.presta_id),
# 'id_product': variant.product_tmpl_id and variant.product_tmpl_id.presta_id and str(variant.product_tmpl_id.presta_id),
# 'price': str(variant.with_context(pricelist=self.pricelist_id.id).price),
# 'wholesale_price': variant.wholesale_price and str(variant.wholesale_price),
# })
# prod_variants_comb.get('combination').pop('quantity')
# # print("==========result=======>",result)
# prestashop.edit('combinations', prod_variants_comb)
# # except:
# # pass
# #
# product.get('product').pop('manufacturer_name')
# product.get('product').pop('quantity')
# prestashop.edit('products', product)
# # except Exception as e:
# # if self.env.context.get('log_id'):
# # log_id = self.env.context.get('log_id')
# # self.env['log.error'].create(
# # {'log_description': str(e) + ' While updating product price %s' % (each.name),
# # 'log_id': log_id})
# # else:
# # log_id = self.env['prestashop.log'].create({'all_operations': 'update_product_price',
# # 'error_lines': [(0, 0, {'log_description': str(
# # e) + ' While updating product price %s' % (
# # each.name)})]})
# # self = self.with_context(log_id=log_id.id)
#
# # except Exception as e:
# # if self.env.context.get('log_id'):
# # log_id = self.env.context.get('log_id')
# # self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
# # else:
# # log_id = self.env['prestashop.log'].create(
# # {'all_operations': 'update_product_price', 'error_lines': [(0, 0, {'log_description': str(e)})]})
# # self = self.with_context(log_id=log_id.id)
# shop.write({'prestashop_last_update_product_data_date': datetime.now()})
# return True
# @api.multi
def update_presta_product_inventory(self):
prod_templ_obj = self.env['product.template']
prdct_obj = self.env['product.product']
stck_quant = self.env['stock.quant']
try:
for shop in self:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
# try:
if self.env.context.get('product_ids'):
p_ids = prod_templ_obj.browse(self.env.context.get('product_ids'))
elif shop.prestashop_last_update_product_data_date:
stck_ids = stck_quant.search([('write_date', '>', shop.prestashop_last_update_product_data_date)])
p_ids = []
for i in stck_ids:
if i.product_id not in p_ids:
p_ids.append(i.product_id)
else:
p_ids = prdct_obj.search([('presta_id', '!=',False)])
for each in p_ids:
if each.presta_inventory_id:
prod_variant_inventory = prestashop.get('stock_availables', each.presta_inventory_id)
query = "SELECT sum(quantity) FROM stock_quant where product_id = %s and location_id = %s group by product_id"%(each.id, shop.warehouse_id.lot_stock_id.id)
self.env.cr.execute(query)
qty = self.env.cr.fetchone()
if qty:
if not each.combination_id:
prod_variant_inventory.get('stock_available').update({
'quantity': str(int(qty[0])),
'id': each.presta_inventory_id and str(each.presta_inventory_id),
'id_product': each.product_tmpl_id.presta_id,
'id_product_attribute':'0',
'depends_on_stock':0,
'out_of_stock':2,
'id_shop': shop.presta_id and str(shop.presta_id)
})
else :
prod_variant_inventory.get('stock_available').update({
'quantity': str(int(qty[0])),
'id': each.presta_inventory_id and str(each.presta_inventory_id),
'id_product': each.product_tmpl_id.presta_id ,
'id_product_attribute': each.combination_id and str(each.combination_id),
'depends_on_stock':0,
'out_of_stock':2,
'id_shop': shop.presta_id and str(shop.presta_id)
})
r = prestashop.edit('stock_availables', prod_variant_inventory)
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'update_inventory', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
shop.write({'prestashop_last_update_product_data_date': datetime.now()})
# @api.multi
def update_order_status(self):
sale_order = self.env['sale.order']
status_obj = self.env['presta.order.status']
for shop in self:
try:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
query = "select * from sale_order o, presta_order_status ps where o.presta_id is not null and ps.name in ('Awaiting check payment','Awaiting bank wire payment','Awaiting Cash On Delivery validation','Processing in progress') and shop_id = %s"%(shop.id)
self.env.cr.execute(query)
fetch_orders = self.env.cr.fetchall()
if fetch_orders is not None:
fetch_orders = [i[0] for i in fetch_orders]
if shop.prestashop_last_update_order_status_date:
sale_order_ids = sale_order.search([('id', 'in',fetch_orders),('order_status.name','in',['Awaiting check payment','Awaiting bank wire payment','Awaiting Cash On Delivery validation','Processing in progress'])])
else:
sale_order_ids = sale_order.search([('id', 'in',fetch_orders)])
#import order status
order_states = prestashop.get('order_states')
os_status = order_states.get('order_states').get('order_state')
if isinstance(os_status, list):
os_status = os_status
else:
os_status = [os_status]
for status in os_status:
state_ids = status_obj.search([('presta_id', '=', status.get('attrs').get('id'))])
if state_ids:
st_id = state_ids[0]
else:
orders_status_lst = prestashop.get('order_states', status.get('attrs').get('id'))
st_id = status_obj.create(
{'name': self.get_value(orders_status_lst.get('order_state').get('name').get('language')).get('value'),
'presta_id': orders_status_lst.get('order_state').get('id')})
for sale_order in sale_order_ids:
order = prestashop.get('orders', sale_order.presta_id)
order.get('order').update({
'reference': sale_order.presta_order_ref and str(sale_order.presta_order_ref),
# 'conversion_rate': '1.000000',
'module': str(sale_order.pretsa_payment_mode),
'id_customer':1,
'id_address_delivery':1,
'id_address_invoice' :1,
'id_cart':1,
'id_currency':1,
'total_products': str(sale_order.amount_total),
'id_carrier': sale_order.carrier_prestashop and str(sale_order.carrier_prestashop.presta_id),
'payment': {'bankwire': 'Bankwire'},
'id': sale_order and str(sale_order.presta_id),
'id_lang':1,
'total_paid':sale_order.amount_untaxed and str(sale_order.amount_untaxed),
'total_paid_real':sale_order.amount_total and str(sale_order.amount_total),
'total_products_wt': 1,
'conversion_rate': 1
# 'id_shop': '1',
})
if sale_order.invoice_status == 'invoiced':
order.get('order').get('total_paid_tax_incl').update({'value': str(sale_order.amount_total)})
order.get('order').get('total_paid_tax_excl').update({'value': str(sale_order.amount_untaxed)})
shipping_product = shop.shipment_fee_product_id
for line in sale_order.order_line:
if line.product_id.id == shipping_product.id:
shipping_cost = shipping_product.lst_price
order.get('order').update({'total_shipping': str(shipping_cost)})
order.get('order').update({'total_shipping_tax_excl': str(shipping_cost)})
discount = 0.0
for line in sale_order.order_line:
discount += line.discount
if discount>0.0:
order.get('order').update({'total_discounts':discount})
order.get('order').update({'total_discounts_tax_excl':discount})
if sale_order.order_status.name in ['Awaiting check payment','Awaiting bank wire payment','Awaiting Cash On Delivery validation','Processing in progress']:
# print "inside iffffffff"
invoice_not_done = False
for invoice in sale_order.invoice_ids:
if invoice.state == 'open' or invoice.state == "paid" :
order.get('order').update({'invoice_number': str(invoice.number)})
order.get('order').update({'invoice_date': str(invoice.date_invoice)})
order.get('order').update({'total_paid_real': str(sale_order.amount_total)})
# order.get('order').update({'current_state': str(status_ids[0].presta_id)})
else:
invoice_not_done = True
if invoice_not_done == False:
# sddddd
status_ids = status_obj.search([('name','=','Payment accepted')])
order.get('order').update({'current_state': str(status_ids[0].presta_id)})
sale_order.order_status = status_ids[0].id
picking_not_done = False
for picking in sale_order.picking_ids:
status_ids = status_obj.search([('name', '=', 'Shipped')])
if picking.state == 'done':
order.get('order').update({'delivery_number': str(picking.name)})
order.get('order').update({'delivery_date': picking.scheduled_date})
order.get('order').update({'current_state': str(status_ids[0].presta_id)})
else:
picking_not_done = True
if picking_not_done == False:
status_ids = status_obj.search([('name', '=', 'Shipped')])
order.get('order').update({'current_state': str(status_ids[0].presta_id)})
sale_order.order_status = status_ids[0].id
prestashop.edit('orders', order)
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'update_order_status', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
shop.write({'prestashop_last_update_order_status_date': datetime.now()})
return True
# @api.multi
def export_presta_products(self):
# exports product details,image and variants
prod_templ_obj = self.env['product.template']
prdct_obj = self.env['product.product']
stock_quanty = self.env['stock.quant']
for shop in self:
try:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
query = "select product_id from product_templ_shop_rel where shop_id = %s"%shop.id
self.env.cr.execute(query)
fetch_shop_products = self.env.cr.fetchall()
if self.env.context.get('product_ids'):
product_ids = prod_templ_obj.browse(self.env.context.get('product_ids'))
else:
product_ids = prod_templ_obj.search([('product_to_be_exported','=',True)])
for product in product_ids:
product_schema = prestashop.get('products', options={'schema': 'blank'})
categ = [{'id': product.presta_categ_id.presta_id}]
parent_id = product.presta_categ_id.parent_id
while parent_id:
categ.append({'id': parent_id.presta_id})
parent_id = parent_id.parent_id
product_schema.get('product').get('associations').update({
'categories': {'attrs': {'node_type': 'category'}, 'category': categ},
})
product_schema.get('product').update({
#'name': {'language': {'attrs': {'id': '1'}, 'value': product.name}},
#'link_rewrite': {'language': {'attrs': {'id': '1'}, 'value': product.name.replace(' ', '-')}},
'reference': product.default_code,
#'wholesale_price': str(product.wholesale_price),
#'depth': str(product.product_lngth),
#'width': str(product.product_width),
#'weight': str(product.product_wght),
#'height': str(product.product_hght),
#'price': product.list_price and str(product.list_price) or '0.00',
'date_upd': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'date_add': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'active': 1,
# 'state': {'value': '1'},
#'type': {'attrs': {'notFilterable': 'true'}, 'value': 'simple'},
#'id_supplier': product.supplier_id and product.supplier_id.presta_id or '0',
#'id_manufacturer': product.manufacturer_id and product.manufacturer_id.presta_id or '0',
#'id_shop_default':self.id
})
p_ids = prdct_obj.search([('product_tmpl_id', '=' ,product[0].id)])
product_var_ids = prdct_obj.search([('product_tmpl_id','=',product.id)])
presta_res = prestashop.add('products', product_schema)
presta_id = self.get_value_data(presta_res.get('prestashop').get('product').get('id'))
product.write({'presta_id': presta_id})
for prod_var in product_var_ids:
stck_id = stock_quanty.search([('product_id','=',prod_var.id),('location_id','=',shop.warehouse_id.lot_stock_id.id)])
qty = 0
for stck in stck_id:
qty += stck.quantity
product_comb_schema = prestashop.get('combinations',options = {'schema': 'blank'})
option_values = []
for op in prod_var.product_template_attribute_value_ids:
option_values.append({'id': op.presta_id})
product_comb_schema.get('combination').get('associations').get('product_option_values').update({
'product_option_value' : option_values
})
product_comb_schema.get('combination').update({
'id_product' : presta_id,
#'price' : prod_var.combination_price and str(prod_var.combination_price) or '0.00',
'reference': prod_var.default_code,
'quantity': str(int(prod_var.qty_available)),
#'minimal_quantity': '1',
})
combination_resp = prestashop.add('combinations', product_comb_schema)
c_presta_id = self.get_value_data(combination_resp.get('prestashop').get('combination').get('id'))
prod_var.write({
'combination_id': c_presta_id,
})
product.write({
'product_to_be_exported': False,
})
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'export_product_data', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
return True
# @api.multi
def export_presta_categories(self):
categ_obj = self.env['prestashop.category']
for shop in self:
try:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
query = "select categ_id from presta_categ_shop_rel where shop_id = %s"%shop.id
self.env.cr.execute(query)
fetch_shop_category = self.env.cr.fetchall()
prestashop.category = categ_obj.search([])
if self.env.context.get('category_ids'):
category_ids = categ_obj.browse(self.env.context.get('category_ids'))
else:
category_ids = categ_obj.search([('to_be_exported','=',True),('id','in',fetch_shop_category)])
for category in category_ids:
category_schema = prestashop.get('categories', options={'schema': 'blank'})
category_schema.get('category').update({
'name' : {'language': {'attrs': {'id': '1'}, 'value': category.name and str(category.name)}} ,
'id_parent': category.parent_id and category.parent_id.presta_id and str(category.parent_id.presta_id) or '0',
'link_rewrite': {'language': {'attrs': {'id': '1'}, 'value': category.name and str(category.name.replace(' ','-'))}},
'active': '1',
'description': {'language': {'attrs': {'id': '1'}, 'value': category.name and str(category.name)}},
'id_shop_default':self.id,
})
presta_res = prestashop.add('categories', category_schema)
if presta_res.get('prestashop').get('category').get('id'):
categ_presta_id = self.get_value_data(presta_res.get('prestashop').get('category').get('id'))
else:
categ_presta_id = self.get_value_data(presta_res.get('prestashop').get('id'))
category.write({
'presta_id': categ_presta_id,
'to_be_exported': False,
})
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'export_categories', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
return True
# @api.one
def search_state(self, prestashop, state_name,country_id):
# To find the country id in prestashop
state_obj = self.env['res.country.state']
state_ids = prestashop.search('states', options={'filter[name]': state_name.name})
if state_ids:
state_id = state_ids[0]
else:
stats_schema = prestashop.get('states', options={'schema': 'blank'})
if stats_schema:
stats_schema.get('state').update({
'name': state_name.name,
'iso_code': state_name.code,
'id_country': country_id,
})
stat_res = prestashop.add('states', stats_schema)
state_id = stat_res.get('prestashop').get('state').get('id').get('value')
return state_id
# @api.one
def search_country(self, prestashop, country_name):
# To find the country id in prestashop
country_ids = prestashop.search('countries', options={'filter[name]': country_name.name})
if country_ids:
country_id = country_ids[0]
else:
country_schema = prestashop.get('countries', options={'schema': 'blank'})
country_schema.get('country').update({
'name': {'language': {'attrs': {'id': '1'}, 'value': country_name.name}},
'iso_code': country_name.code,
'alias': ''
})
country_res = prestashop.add('countries', country_schema)
country_id = country_res.get('prestashop').get('country').get('id').get('value')
return country_id
# @api.multi
def export_presta_customers(self):
res_partner_obj = self.env['res.partner']
for shop in self:
try:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
query = "select cust_id from customer_shop_rel where shop_id = %s"%shop.id
self.env.cr.execute(query)
fetch_shop_customers = self.env.cr.fetchall()
if self.env.context.get('customer_ids'):
customer_ids = res_partner_obj.browse(self.env.context.get('customer_ids'))
else:
customer_ids = res_partner_obj.search([('to_be_exported', '=', True)])
# ('id','in',fetch_shop_customers)
for customer in customer_ids:
customer_schema = prestashop.get('customers', options={'schema': 'blank'})
customer_name = customer.name
name_list = customer_name.split(' ')
first_name = name_list[0]
if len(name_list) > 1:
last_name = name_list[1]
else:
last_name = name_list[0]
dob = customer.date_of_birth
customer_schema.get('customer').update({
'firstname' : first_name and str(first_name),
'lastname' : last_name and str(last_name),
'email' : customer.email and str(customer.email),
'active': '1',
'date_upd': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'date_add': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'birthday': customer.date_of_birth and str(customer.date_of_birth) or False,
'website': customer.website and str(customer.website) or '' or False,
})
presta_cust = prestashop.add('customers', customer_schema)
customer_presta_id = self.get_value_data(presta_cust.get('prestashop').get('customer').get('id'))
address_schema = prestashop.get('addresses', options={'schema': 'blank'})
address_schema.get('address').update({
'firstname': first_name and str(first_name),
'lastname': last_name and str(last_name),
'address1' : customer.street and str(customer.street) or '',
'address2' : customer.street2 and str(customer.street2) or '',
'city' : customer.city and str(customer.city) or '',
'phone' : customer.phone and str(customer.phone) or '',
'phone_mobile' : customer.mobile and str(customer.mobile) or '',
'postcode' : customer.zip and str(customer.zip) or '',
'id_customer': customer_presta_id and str(customer_presta_id),
'alias': customer.type and str(customer.type),
})
if customer.country_id:
c_id = shop.search_country(prestashop, customer.country_id)
if c_id:
address_schema.get('address').update({
'id_country': c_id,
})
# if customer.state_id:
# address_schema.get('address').update({
# 'id_state': shop.search_state(prestashop, customer.state_id,c_id)
# })
presta_address = prestashop.add('addresses', address_schema)
add_presta_id = self.get_value_data(presta_address.get('prestashop').get('address').get('id'))
customer.write({
'presta_id': customer_presta_id,
'to_be_exported': False,
'address_id' : add_presta_id,
})
for child in customer.child_ids:
address_schema = prestashop.get('addresses', options={'schema': 'blank'})
if child.name:
name = child.name
else:
name = customer.name
name_list = name.split(' ')
first_name = name_list[0]
if len(name_list) > 1:
last_name = name_list[1]
else:
last_name = name_list[0]
address_schema.get('address').update({
'firstname': first_name and str(first_name),
'lastname': last_name and str(last_name),
'address1': child.street and str(child.street) or '',
'address2': child.streets and str(child.street2) or '',
'city': child.city and (child.city) or '',
'phone': child.phone and str(child.phone) or '',
'phone_mobile': child.mobile and str(child.mobile) or '',
'postcode': child.zip and str(child.zip) or '',
'id_customer': customer_presta_id and str(customer_presta_id),
'alias': customer.type and str(customer.type) or ''
})
if customer.state_id:
address_schema.get('address').update({
'id_state': shop.search_state(prestashop, child.state_id)
})
if customer.country_id:
c_id = shop.search_country(prestashop, child.country_id)
address_schema.get('address').update({
'id_country': c_id[0],
})
presta_address = prestashop.add('addresses', address_schema)
add_presta_id = self.get_value_data(presta_address.get('prestashop').get('address').get('id'))
child.write({
'address_id': add_presta_id,
'to_be_exported':False
})
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'Export_customers', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
# @api.multi
def export_presta_customer_messages(self):
order_msg_obj = self.env['order.message']
for shop in self:
try:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
query = "select mess_id from message_shop_rel where shop_id = %s"%shop.id
self.env.cr.execute(query)
fetch_shop_customer_messages = self.env.cr.fetchall()
if self.env.context.get('customer_message_ids'):
customer_message_ids = order_msg_obj.browse(self.env.context.get('customer_message_ids'))
else:
customer_message_ids = order_msg_obj.search([('to_be_exported', '=', True)])
for customer_message in customer_message_ids:
customer_message_schema = prestashop.get('customer_threads', options={'schema': 'blank'})
customer_message_schema.get('customer_thread').update({
'token': customer_message.token and str(customer_message.token),
'email': customer_message.email and str(customer_message.email) ,
'status': customer_message.status and str(customer_message.status),
'id_lang': '1',
'id_customer' : customer_message.customer_id and str(customer_message.customer_id.presta_id) or '0',
'id_contact': 0,
'id_order':customer_message.new_id and str(customer_message.new_id.presta_id) or '',
})
customer_threads_res = prestashop.add('customer_threads', customer_message_schema)
msg_presta_id = self.get_value_data(customer_threads_res.get('prestashop').get('customer_thread').get('id'))[0]
customer_message.write({
'presta_id': msg_presta_id,
'to_be_exported': False,
})
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'export_customer_message', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
# @api.one
def get_currency(self, prestashop, currency):
currency_ids = prestashop.search('currencies', options={'filter[iso_code]': currency.name})
if currency_ids:
currency_id = currency_ids[0]
else:
currency_schema = prestashop.get('currencies', options={'schema': 'blank'})
currency_schema.get('currency').update({
'name': currency.name,
'iso_code': currency.name,
'sign': currency.name,
'active': '1',
'conversion_rate': '1'
})
currency_res = prestashop.add('currencies', currency_schema)
currency_id = currency_res.get('prestashop').get('currency').get('id').get('value')
return currency_id
# @api.one
def get_languange(self, prestashop, languange):
lang = self.env['res.lang'].search([('code','=',languange)])
languange_ids = prestashop.search('languages', options={'filter[iso_code]': lang.iso_code})
if languange_ids:
languange_id = languange_ids[0]
else:
languange_schema = prestashop.get('languages', options={'schema': 'blank'})
languange_schema.get('language').update({
'name': lang.name,
'iso_code': lang.iso_code,
'language_code' : lang.code.replace('_','-'),
'active': '1',
'date_format_lite': lang.date_format,
})
languange_res = prestashop.add('languages', languange_schema)
languange_id = self.get_value(languange_res.get('prestashop').get('language'))[0].get('id').get('value')
return languange_id
# @api.multi
def export_presta_orders(self):
sale_order_obj = self.env['sale.order']
status_obj = self.env['presta.order.status']
sale_order_line_obj = self.env['sale.order.line']
for shop in self:
try:
prestashop = PrestaShopWebServiceDict(shop.prestashop_instance_id.location,shop.prestashop_instance_id.webservice_key or None)
query = "select saleorder_id from saleorder_shop_rel where shop_id = %s"%shop.id
self.env.cr.execute(query)
fetch_shop_sale_order = self.env.cr.fetchall()
if self.env.context.get('ordered_ids'):
order_ids = sale_order_obj.browse(self.env.context.get('ordered_ids'))
else:
order_ids = sale_order_obj.search([('to_be_exported', '=', True)])
for order in order_ids:
order_schema = prestashop.get('orders', options={'schema': 'blank'})
carts_schema = prestashop.get('carts', options={'schema': 'blank'})
# lang_schema = prestashop.get('languages',1)
payment_value = dict(self.env['sale.order'].fields_get(allfields=['pretsa_payment_mode'])['pretsa_payment_mode']['selection'])[order.pretsa_payment_mode]
carts_schema = prestashop.get('carts', options={'schema': 'blank'})
order_schema.get('order').update({
'allow_seperated_package': '',
'conversion_rate': '1.000000' ,
'current_state': order.order_status and order.order_status.presta_id and str(order.order_status.presta_id),
'carrier_tax_rate': '0.000',
'date_upd': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'date_add': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'delivery_date': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'delivery_number': '0',
'id_shop': shop.presta_id and str(shop.presta_id),
'id_customer': order.partner_id and order.partner_id.presta_id and str(order.partner_id.presta_id),
'id_address_delivery': order.partner_id.address_id and str(order.partner_id.address_id),
'id_address_invoice': order.partner_invoice_id.address_id and str(order.partner_invoice_id.address_id),
'id_currency': shop.get_currency(prestashop, shop.pricelist_id.currency_id),
'id_carrier': order.carrier_prestashop.presta_id and str(order.carrier_prestashop.presta_id),
'invoice_number': '0',
'id_lang': shop.get_languange(prestashop, order.partner_id.lang),
# 'id_shop_group': '1',
'mobile_theme': '0',
'module': order.pretsa_payment_mode.lower(),
'payment': order.pretsa_payment_mode.capitalize(),
'round_mode': '0',
'round_type': '0',
'reference': order.name and str(order.name),
'recyclable': '0',
'shipping_number': {'attrs': {'notFilterable': 'true'}, 'value': ''},
'total_paid': '0.000000',
'total_paid_real': '0.000000',
'total_products': order.amount_total and str(order.amount_total),
'total_products_wt': '1.0' or '',
'total_discounts' : '0.000000',
'total_discounts_tax_excl' : '0.000000',
'total_discounts_tax_incl' : '0.000000',
'total_paid_tax_excl' : '0.000000',
'total_paid_tax_incl' : '0.000000',
'total_shipping' : '0.000000',
'total_shipping_tax_excl' : '0.000000',
'total_shipping_tax_incl' : '0.000000',
'total_wrapping_tax_excl' : '0.000000',
'total_wrapping_tax_incl' : '0.000000',
'total_wrapping' : '0.000000',
'valid': '1',
})
if order.invoice_status == 'invoiced':
order_schema.get('order').update({'total_paid_tax_incl': order.amount_total and str(order.amount_total)})
order_schema.get('order').update({'total_paid_tax_excl': order.amount_untaxed and str(order.amount_untaxed)})
shipping_product = shop.shipment_fee_product_id
for line in order.order_line:
if line.product_id.id == shipping_product.id:
shipping_cost = shipping_product.lst_price and str(shipping_product.lst_price)
order_schema.get('order').update({'total_shipping': shipping_cost and str(shipping_cost)})
order_schema.get('order').update({'total_shipping_tax_excl': shipping_cost and str(shipping_cost)})
discount = 0.0
status_ids=False
for line in order.order_line:
discount += line.discount
if discount > 0.0:
order_schema.get('order').update({'total_discounts': discount and str(discount)})
order_schema.get('order').update({'total_discounts_tax_excl': discount and str(discount)})
if order.order_status.name in ['Awaiting check payment', 'Awaiting bank wire payment',
'Awaiting Cash On Delivery validation', 'Processing in progress']:
invoice_not_done = False
for invoice in order.invoice_ids:
if invoice.state == 'open' or invoice.state == "paid":
order_schema.get('order').update({'invoice_number': invoice.number and str(invoice.number)})
order_schema.get('order').update({'invoice_date': invoice.date_invoice and str(invoice.date_invoice)})
order_schema.get('order').update({'total_paid_real': order.amount_total and str(order.amount_total)})
# order.get('order').update({'current_state': str(status_ids[0].presta_id)})
else:
invoice_not_done = True
if invoice_not_done == False:
status_ids = status_obj.search([('name', '=', 'Payment accepted')])
order_schema.get('order').update({'current_state': status_ids[0].presta_id and str(status_ids[0].presta_id)})
order.order_status = status_ids[0].id
picking_not_done = False
for picking in order.picking_ids:
if picking.state == 'done':
order_schema.get('order').update({'delivery_number': picking.name and str(picking.name)})
order_schema.get('order').update({'delivery_date': picking.scheduled_date and str(picking.scheduled_date)})
# order_schema.get('order').update({'current_state': status_ids[0].presta_id and str(status_ids[0].presta_id)})
else:
picking_not_done = True
if picking_not_done == False:
status_ids = status_obj.search([('name', '=', 'Shipped')])
if status_ids:
order_schema.get('order').update({'current_state': status_ids[0].presta_id and str(status_ids[0].presta_id)})
order.order_status = status_ids[0].id
lines = []
cart_line_list = []
if len(order.order_line)>1:
for line in order.order_line:
lines.append({
'product_attribute_id': line.product_id.combination_id and str(line.product_id.combination_id) or '0',
'product_id': line.product_id.product_tmpl_id and line.product_id.product_tmpl_id.presta_id and str(line.product_id.product_tmpl_id.presta_id),
'product_name': line.name and str(line.name),
'product_price': str(int(line.price_unit)),
'product_quantity': str(int(line.product_uom_qty)),
'product_reference': line.product_id.default_code and str(line.product_id.default_code),
})
cart_line_list.append({'id_address_delivery': order.partner_id.address_id and str(order.partner_id.address_id),
'id_product_attribute': line.product_id.combination_id and str(line.product_id.combination_id) or '0',
'id_product': line.product_id.product_tmpl_id and line.product_id.product_tmpl_id.presta_id and str(line.product_id.product_tmpl_id.presta_id),
'quantity': line.product_uom_qty and str(line.product_uom_qty),
})
else:
line = order.order_line[0]
lines = {
'product_attribute_id': line.product_id.combination_id and str(line.product_id.combination_id) or '0',
'product_id': line.product_id.product_tmpl_id and line.product_id.product_tmpl_id.presta_id and str(line.product_id.product_tmpl_id.presta_id),
'product_name': line.name and str(line.name),
'product_price': str(int(line.price_unit)),
'product_quantity': str(int(line.product_uom_qty)),
'product_reference': line.product_id.default_code and str(line.product_id.default_code),
}
cart_line_list = {
'id_address_delivery': order.partner_id.address_id and str(order.partner_id.address_id),
'id_product_attribute': line.product_id.combination_id and str(line.product_id.combination_id) or '0',
'id_product': line.product_id.product_tmpl_id and line.product_id.product_tmpl_id.presta_id and str(line.product_id.product_tmpl_id.presta_id),
'quantity': line.product_uom_qty and str(line.product_uom_qty),
}
order_schema.get('order').get('associations').get('order_rows').update({
# 'attrs': {'nodeType': 'order_row',
# 'virtualEntity': 'true'},
'order_row': lines,
})
carts_schema.get('cart').update({
'id_carrier': order.carrier_prestashop and order.carrier_prestashop.presta_id and str(order.carrier_prestashop.presta_id),
'id_address_delivery': order.partner_id.address_id and str(order.partner_id.address_id),
'id_shop': shop.presta_id and str(shop.presta_id),
'id_customer': order.partner_id and order.partner_id.presta_id and str(order.partner_id.presta_id),
'id_lang': shop.get_languange(prestashop, order.partner_id.lang),
'id_address_invoice' : order.partner_id.address_id and str(order.partner_id.address_id),
'id_currency': shop.get_currency(prestashop, shop.pricelist_id.currency_id),
# 'id_shop_group' : '1',
'mobile_theme': '0',
'id_shop': shop.presta_id and str(shop.presta_id),
# 'gift': '0',
# 'gift_message': '',
# 'id_guest': '1',
})
carts_schema.get('cart').get('associations').get('cart_rows').update({
# 'attrs': {'node_type': 'cart_row',
# 'virtual_entity': 'true'},
# 'delivery_option': 'a:1:{i:3;s:2:"2,";}',
'cart_row': cart_line_list,
})
sale_gift_ids = sale_order_line_obj.search([('order_id', '=', order.id), ('gift', '=', True)])
if sale_gift_ids:
for gift_id in sale_gift_ids:
gift_msg = gift_id.gift_message
wrapping_cost = gift_id.wrapping_cost or '0.000'
carts_schema.get('cart').update({
'gift': '1',
'gift_message': gift_msg and str(gift_msg),
})
order_schema.get('order').update(
{'gift': '1',
'gift_message': gift_msg and str(gift_msg),
'total_wrapping': wrapping_cost and str(wrapping_cost),
'total_wrapping_tax_excl': wrapping_cost and str(wrapping_cost),
})
presta_cart = prestashop.add('carts', carts_schema)
cart_presta_id = self.get_value_data(presta_cart.get('prestashop').get('cart').get('id'))[0]
order.write({
'to_be_exported':False
})
if cart_presta_id:
order_schema.get('order').update({
'id_cart' : cart_presta_id and str(cart_presta_id),
})
presta_orders = prestashop.add('orders', order_schema)
except Exception as e:
if self.env.context.get('log_id'):
log_id = self.env.context.get('log_id')
self.env['log.error'].create({'log_description': str(e), 'log_id': log_id})
else:
log_id_obj = self.env['prestashop.log'].create(
{'all_operations': 'export_order_status', 'error_lines': [(0, 0, {'log_description': str(e), })]})
log_id = log_id_obj.id
new_context = dict(self.env.context)
new_context.update({'log_id': log_id})
self.env.context = new_context
| UTF-8 | Python | false | false | 144,491 | py | 52 | sale_shop.py | 25 | 0.641184 | 0.636194 | 0 | 3,011 | 46.987047 | 257 |
chscheller/sc2_imitation_learning | 2,078,764,217,310 | 087fe5ffb50f0ed8a4a640a826453c41aaa41f84 | 5c0e9170fc8a05d1e30777fb708757ce0944b1b6 | /sc2_imitation_learning/behaviour_cloning/learner.py | 250cab1853f6d296a92bdd2b50a2987d4a52766d | [
"MIT"
]
| permissive | https://github.com/chscheller/sc2_imitation_learning | 770cad1af0398b2983e4eb0508f04ecd732debc9 | 8dca03e9be92e2d8297a4bc34248939af5c7ec3b | refs/heads/master | 2023-06-11T22:13:23.100108 | 2021-07-02T00:25:22 | 2021-07-02T00:25:22 | 372,315,766 | 10 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null | import logging
import math
import os
import time
import timeit
from typing import Optional, Callable, Dict
import numpy as np
import tensorflow as tf
from sonnet.src.types import TensorNest
from sc2_imitation_learning.agents import Agent, build_saved_agent
from sc2_imitation_learning.common import utils
from sc2_imitation_learning.common.progress_logger import ConsoleProgressLogger, TensorboardProgressLogger
from sc2_imitation_learning.common.utils import make_dummy_batch, swap_leading_axes
from sc2_imitation_learning.environment.environment import ObservationSpace, ActionSpace
logger = logging.getLogger(__file__)
def compute_correct_predictions(target_actions, learner_actions, label_mask_value: Optional[int] = -1):
target_actions = tf.cast(target_actions, dtype=tf.int32)
learner_actions = tf.cast(learner_actions, dtype=tf.int32)
correct_predictions = tf.equal(target_actions, learner_actions)
if label_mask_value is not None:
masks = tf.not_equal(target_actions, label_mask_value)
correct_predictions = tf.logical_and(correct_predictions, masks)
num_samples = tf.math.count_nonzero(masks, dtype=tf.int32)
else:
num_samples = tf.size(target_actions, dtype=tf.int32)
num_correct_predictions = tf.math.count_nonzero(correct_predictions, dtype=tf.int32)
return num_correct_predictions, num_samples
def compute_neg_log_probs(labels, logits, label_mask_value: Optional[int] = -1):
""" Computes negative log probabilities of labels given logits, where labels equal to `label_mask_value`
are zero-masked """
if label_mask_value is not None:
# mask labels to prevent invalid (e.g. negative) label values
mask = tf.math.not_equal(labels, label_mask_value)
labels *= tf.cast(mask, dtype=labels.dtype)
# calculate neg log probabilities
neg_log_probs = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels, logits=logits)
if label_mask_value is not None:
# mask neg_log_probs with pre calculated mask
neg_log_probs *= tf.cast(mask, dtype=neg_log_probs.dtype)
return neg_log_probs
def compute_cross_entropy_loss(labels, logits, label_mask_value: Optional[int] = -1):
""" Computes the cross entropy loss, where labels equal to `label_mask_value` are ignored. """
neg_log_probs = tf.nest.map_structure(
lambda x, y: compute_neg_log_probs(x, y, label_mask_value),
labels,
logits)
# sum negative log probabilities and average across time dimension
return tf.reduce_mean(sum(tf.nest.flatten(neg_log_probs)), axis=0)
def evaluate_gradients(
trajectory_ids: tf.Tensor,
trajectories: TensorNest,
global_batch_size: int,
agent: Agent,
agent_states: utils.Aggregator,
l2_regularization=0.):
trajectories = tf.nest.map_structure(swap_leading_axes, trajectories) # B x T -> T x B
env_outputs = (trajectories['reward'], trajectories['done'], trajectories['observation'])
prev_agent_states = agent_states.read(trajectory_ids)
with tf.GradientTape() as tape:
agent_outputs, curr_agent_states = agent(
prev_actions=trajectories['prev_action'],
env_outputs=env_outputs,
core_state=prev_agent_states,
unroll=True,
teacher_actions=trajectories['action'])
crosse_entropy_loss = tf.nn.compute_average_loss(
per_example_loss=compute_cross_entropy_loss(trajectories['action'], agent_outputs.logits),
global_batch_size=global_batch_size)
if l2_regularization > 0.:
l2_loss = tf.nn.scale_regularization_loss(
regularization_loss=sum([tf.nn.l2_loss(v) for v in agent.trainable_variables]))
else:
l2_loss = 0.
loss = crosse_entropy_loss + l2_regularization * l2_loss
# Update current state.
agent_states.replace(trajectory_ids, curr_agent_states)
gradients = tape.gradient(loss, agent.trainable_variables)
grad_norm = tf.linalg.global_norm(gradients) * (1 / tf.distribute.get_strategy().num_replicas_in_sync)
correct_predictions = tf.nest.map_structure(
compute_correct_predictions, trajectories['action'], agent_outputs.actions)
summary = {
'loss': {
'loss': loss,
'ce': crosse_entropy_loss,
'l2': l2_loss,
},
'grad_norm': grad_norm,
'num_correct': {
action_name: num_correct for action_name, (num_correct, _) in correct_predictions.items()
},
'num_samples': {
action_name: num_samples for action_name, (_, num_samples) in correct_predictions.items()
},
}
return gradients, summary
def accumulate_gradients(
accumulated_gradients: tf.Tensor,
trajectory_ids: tf.Tensor,
trajectories: TensorNest,
global_batch_size: int,
agent: Agent,
agent_states: utils.Aggregator,
l2_regularization=0.):
gradients, summary = evaluate_gradients(
trajectory_ids=trajectory_ids, trajectories=trajectories, global_batch_size=global_batch_size, agent=agent,
agent_states=agent_states, l2_regularization=l2_regularization)
for t, g in zip(accumulated_gradients, gradients):
t.assign_add(g)
return summary
def apply_gradients(
accumulated_gradients: tf.Tensor,
agent: Agent,
update_frequency: int,
optimizer: tf.optimizers.Optimizer):
gradients = tuple([g / float(update_frequency) for g in accumulated_gradients])
optimizer.apply_gradients(zip(gradients, agent.trainable_variables))
for v in accumulated_gradients:
v.assign(tf.zeros_like(v))
def train_step(trajectory_ids: tf.Tensor,
trajectories: TensorNest,
global_batch_size: int,
agent: Agent,
optimizer: tf.optimizers.Optimizer,
agent_states: utils.Aggregator,
l2_regularization=0.):
gradients, summary = evaluate_gradients(
trajectory_ids=trajectory_ids, trajectories=trajectories, global_batch_size=global_batch_size, agent=agent,
agent_states=agent_states, l2_regularization=l2_regularization)
optimizer.apply_gradients(zip(gradients, agent.trainable_variables))
return summary
def learner_loop(log_dir: str,
observation_space: ObservationSpace,
action_space: ActionSpace,
training_strategy: tf.distribute.Strategy,
dataset_fn: Callable[[tf.distribute.InputContext], tf.data.Dataset],
agent_fn: Callable[[], Agent],
optimizer_fn: Callable[[], tf.keras.optimizers.Optimizer],
total_train_samples: int,
batch_size: int,
sequence_size: int,
l2_regularization: float,
update_frequency: int,
num_episodes: int,
eval_fn: Callable[[Agent], Dict],
eval_interval: int,
max_to_keep_checkpoints: int = None,
save_checkpoint_interval: float = 1800., # every 30 minutes
tensorboard_log_interval: float = 10.,
console_log_interval: float = 60.) -> None:
batch_samples = batch_size*sequence_size
total_steps = math.ceil(total_train_samples/float(batch_samples))
eval_interval_steps = math.ceil(eval_interval/float(batch_samples))
global_step = tf.Variable(0, dtype=tf.int64)
last_checkpoint_time = None
with training_strategy.scope():
agent = agent_fn()
optimizer = optimizer_fn()
# initialize agent variables by feeding dummy batch:
initial_agent_state = agent.initial_state(1)
prev_actions, env_outputs = make_dummy_batch(observation_space, action_space)
agent(prev_actions=prev_actions, env_outputs=env_outputs, core_state=initial_agent_state, unroll=True)
# initialize all optimizer variables:
_ = optimizer.iterations
optimizer._create_hypers()
optimizer._create_slots(agent.trainable_variables)
checkpoint = tf.train.Checkpoint(agent=agent, optimizer=optimizer, step=global_step)
checkpoint_manager = tf.train.CheckpointManager(checkpoint, log_dir, max_to_keep=max_to_keep_checkpoints)
if checkpoint_manager.latest_checkpoint:
logging.info(f'Restoring checkpoint: {checkpoint_manager.latest_checkpoint}')
checkpoint.restore(checkpoint_manager.latest_checkpoint).assert_consumed()
# agent states and accumulated gradients should not be shared between replicas:
agent_state_specs = tf.nest.map_structure(lambda t: tf.TensorSpec(t.shape[1:], t.dtype), initial_agent_state)
agent_states = utils.Aggregator(num_episodes, agent_state_specs, 'agent_states')
if update_frequency > 1:
accumulated_gradients = [tf.Variable(tf.zeros_like(v), trainable=False) for v in agent.trainable_variables]
else:
accumulated_gradients = None
dataset = training_strategy.experimental_distribute_datasets_from_function(dataset_fn)
@tf.function
def distributed_train_step(trajectory_ids, sequences):
if update_frequency > 1:
per_replica_summary = training_strategy.run(accumulate_gradients, kwargs={
'accumulated_gradients': accumulated_gradients,
'trajectory_ids': trajectory_ids,
'trajectories': sequences,
'global_batch_size': batch_size,
'agent': agent,
'agent_states': agent_states,
'l2_regularization': l2_regularization,
})
if tf.math.mod(global_step, update_frequency) == 0:
training_strategy.run(apply_gradients, kwargs={
'accumulated_gradients': accumulated_gradients,
'agent': agent,
'update_frequency': update_frequency,
'optimizer': optimizer,
})
else:
per_replica_summary = training_strategy.run(train_step, kwargs={
'trajectory_ids': trajectory_ids,
'trajectories': sequences,
'global_batch_size': batch_size,
'agent': agent,
'optimizer': optimizer,
'agent_states': agent_states,
'l2_regularization': l2_regularization
})
summary = tf.nest.map_structure(lambda t: training_strategy.reduce("SUM", t, axis=None), per_replica_summary)
return summary
def should_evaluate(_step):
return _step % eval_interval_steps == 0
def should_save_checkpoint(_time):
return last_checkpoint_time is None or _time - last_checkpoint_time >= save_checkpoint_interval
def iter_dataset(_dataset):
dataset_iterator = iter(_dataset)
while global_step.numpy() < total_steps:
yield next(dataset_iterator)
console_logger = ConsoleProgressLogger(
final_step=total_steps,
batch_samples=batch_samples,
logging_interval=console_log_interval,
initial_step=global_step.numpy())
console_logger.start()
tensorboard_logger = TensorboardProgressLogger(
summary_writer=tf.summary.create_file_writer(log_dir),
logging_interval=tensorboard_log_interval,
initial_step=global_step.numpy())
tensorboard_logger.start()
last_step_time = timeit.default_timer()
for batch in iter_dataset(dataset):
step = global_step.numpy()
train_summary = distributed_train_step(*batch)
current_time = timeit.default_timer()
step_duration = current_time - last_step_time
last_step_time = current_time
train_summary = tf.nest.map_structure(lambda s: s.numpy(), train_summary)
train_summary['samples'] = (step+1) * batch_samples
train_summary['samples_per_second'] = batch_samples / float(step_duration)
train_summary['learning_rate'] = optimizer._decayed_lr('float32').numpy()
train_summary['accuracy'] = {
action_name: np.true_divide(train_summary['num_correct'][action_name], num_samples)
for action_name, num_samples in train_summary['num_samples'].items() if num_samples > 0
}
console_logger.log_dict(train_summary, step)
tensorboard_logger.log_dict(train_summary, step)
if should_evaluate(step):
checkpoint_manager.save()
saved_agent = build_saved_agent(agent, observation_space, action_space)
tf.saved_model.save(saved_agent, os.path.join(log_dir, 'saved_model'))
eval_summary = eval_fn(os.path.join(log_dir, 'saved_model'))
tensorboard_logger.log_dict(eval_summary, step)
now = time.time()
if should_save_checkpoint(now):
checkpoint_manager.save()
saved_agent = build_saved_agent(agent, observation_space, action_space)
tf.saved_model.save(saved_agent, os.path.join(log_dir, 'saved_model'))
last_checkpoint_time = now
global_step.assign_add(1)
checkpoint_manager.save()
saved_agent = build_saved_agent(agent, observation_space, action_space)
tf.saved_model.save(saved_agent, os.path.join(log_dir, 'saved_model'))
console_logger.shutdown()
tensorboard_logger.shutdown()
| UTF-8 | Python | false | false | 13,444 | py | 40 | learner.py | 38 | 0.649212 | 0.644079 | 0 | 324 | 40.493827 | 117 |
igavriil/two-player-ai | 14,783,277,439,856 | 8014db81176e1eba8335bf7638f616fcd6f8d8f2 | b32a2663be4842f652f0189759b86d1a4be9d93d | /src/two_player_ai/alpha_beta.py | f6fba9945f75386064d5e42da7f296988ec7b5b7 | [
"MIT"
]
| permissive | https://github.com/igavriil/two-player-ai | 8d71449720d13f7a52946af4ca1c270cb15aa57e | f5e2ae9d714370da11650676f8619ea055730ad5 | refs/heads/master | 2018-07-10T05:18:52.902669 | 2018-07-01T13:40:31 | 2018-07-01T13:40:31 | 121,112,128 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import numpy as np
from two_player_ai.utils import benchmark
class AlphaBeta(object):
def __init__(self, game=None, heuristic=None):
self.game = game
self.heuristic = heuristic
@benchmark
def run(self, state, player, maximize, alpha=-np.inf, beta=np.inf, depth=10):
if depth == 0 or self.game.terminal_test(state, player):
return state, self.heuristic(state, player)
actions = self.game.actions(state, player)
best_action = None
if not actions:
return best_action, 0
if maximize:
value = -np.inf
for action in actions:
next_state, next_player = self.game.result(state, player,
action)
_, result = self.run(next_state, next_player, False, alpha,
beta, depth - 1)
if result > value:
value = result
best_action = action
alpha = np.max([alpha, value])
if beta <= alpha:
break
return best_action, value
else:
value = +np.inf
for action in actions:
next_state, next_player = self.game.result(state, player,
action)
_, result = self.run(next_state, next_player, True, alpha,
beta, depth - 1)
if result < value:
value = result
best_action = action
beta = np.min([beta, value])
if beta <= alpha:
break
return best_action, value
| UTF-8 | Python | false | false | 1,770 | py | 25 | alpha_beta.py | 22 | 0.461582 | 0.458192 | 0 | 49 | 35.122449 | 81 |
saris20038/programacion01 | 884,763,274,969 | 15b3f03973cf7edcfd666c36cda8d9b39a099e7f | 098a069f5870216caf2983ccaa6a9795fb742ca6 | /TALLERES/TALLER2.py | 6c7e446e03c704ebd04d8be4518191a6b9b9b884 | []
| no_license | https://github.com/saris20038/programacion01 | 6adc4fc3a90001333aa65f25a958cf6868daa08f | 62183a88f6c8cf174a456f761d3ed8489770ea61 | refs/heads/main | 2023-05-07T23:32:34.720216 | 2021-05-27T12:51:52 | 2021-05-27T12:51:52 | 335,283,326 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | ## Usando el mismo codigo del taller 1, haré un mayor y menor.
print("Segundo taller, con el codigo del primero")
print("--Los dos numeros que tendré seran un supuesto de la nota de calculo que tengo y la nota necesaria para aprobar--")
miNota = 2.8
notaMinima = 3
notaMaxima= 5
print("¿con la nota que tengo, gano la materia? ")
isAprobado = miNota>= 3
pruebaV = True
pruebaF= False
print(isAprobado)
print("Entonces, ¿cuanto me falta para ganar?")
isMeFalta = notaMinima - miNota
print(isMeFalta)
print("Si multiplico mis esfuerzos y por lo tanto mi nota dos veces, ¿sobrepasa el 5? ")
isDoble = miNota * 2
isSobrepasado = isDoble > notaMaxima
print(isSobrepasado)
print("Si divido mi nota entre la nota minima, ¿es menor que 1?")
isMenor = miNota / notaMinima
isMenorQueUno = isMenor < 1
print(isMenorQueUno)
print("Elevando mi nota a la nota minima ¿sobrepasare la nota maxima?")
isMayor= miNota ** notaMinima
isVerdadSobrepasa = isMayor > notaMaxima
print(isVerdadSobrepasa)
print("¿Mi nota es diferente a la nota maxima?")
isDiferente = miNota != notaMaxima
print(isDiferente)
## AQUÍ EMPIEZAN LOS CAMBIOS
MENSAJE_DESPEDIDA = "Hasta luego perdedora, te quedo la nota final de calculo en: "
print (MENSAJE_DESPEDIDA, miNota)
print("Ahora calcularemos el promedio de las dos materias, es decir de calculo con Programación")
PREGUNTA_PROGRAMACIÓN = "¿Cual fue tu nota final en programación? : "
notaProgramacion = float(input(PREGUNTA_PROGRAMACIÓN))
##como las dos tienen 3 creditos valen lo mismo
promedioDosMaterias = (notaProgramacion + miNota) / 2
print(f"su promedio es de: {promedioDosMaterias}")
isPromedioAprobado = promedioDosMaterias > notaMinima
print("¿Con este promedio aprueba (esta encima de 3)?" , isPromedioAprobado)
TuNombre = "¿Como te llamas?: "
nombrePerdedor = input(TuNombre)
MENSAJE_BYE = "Hasta luego"
MOTIVACIONES = ", estudia más porfi"
print(MENSAJE_BYE , nombrePerdedor , MOTIVACIONES) | UTF-8 | Python | false | false | 1,938 | py | 37 | TALLER2.py | 35 | 0.765226 | 0.758459 | 0 | 45 | 41.711111 | 122 |
Helsinki-NLP/Opus-MT | 16,587,163,711,102 | cb207cf80a5712ee69bd4ce96733ce969992ebbd | b67e2939225aa8446ea8f367b1023217d8ae4a54 | /telegram_bot/keyboards.py | 04cf9807bb5e485b4b5e86856476ecb9b41d1885 | [
"MIT",
"CC-BY-4.0"
]
| permissive | https://github.com/Helsinki-NLP/Opus-MT | 7d8968f1208e89ea67866ef66e47d14b0f9f4c8d | c1980b571d3a34f963102b1460d5f78ce8bf4d55 | refs/heads/master | 2023-08-17T05:58:04.606847 | 2023-08-15T18:32:03 | 2023-08-15T18:32:03 | 203,664,345 | 387 | 55 | MIT | false | 2023-08-15T18:32:05 | 2019-08-21T21:05:30 | 2023-08-14T07:43:08 | 2023-08-15T18:32:03 | 38,458 | 382 | 54 | 41 | Python | false | false | from aiogram import types
KEYBOARDS = {
'hide': types.ReplyKeyboardRemove(selective=False),
'lang': {
'options': ['English', 'Finnish', 'German', 'Swedish', 'Ukrainian'],
'markup': None,
},
}
def fill_keyboards():
# scales
markup = types.ReplyKeyboardMarkup(resize_keyboard=True)
markup.row(*[types.KeyboardButton(variant) for variant in KEYBOARDS['lang']['options']])
KEYBOARDS['lang']['markup'] = markup
fill_keyboards()
| UTF-8 | Python | false | false | 474 | py | 49 | keyboards.py | 22 | 0.64346 | 0.64346 | 0 | 17 | 26.882353 | 92 |
dgjung0220/deepLearing_tensorflow | 9,440,338,155,024 | bf0a796d85a4759f6046c9807f9c4f9d99dc4483 | f01a0412d19d0434d088a6d902dbe8a2f659f644 | /DL_Source/Day_05_03_tensorboard.py | c3c524514acaff8a43c9183e68be8f5fd8917f16 | []
| no_license | https://github.com/dgjung0220/deepLearing_tensorflow | eccf6cf519ce98ef2c130834d5bcf810bb4de8e0 | 8cd759dfb0384702dc96d8895bde12b9056cc378 | refs/heads/master | 2020-03-18T08:05:04.530830 | 2018-05-27T13:31:26 | 2018-05-27T13:31:26 | 134,488,732 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Day_05_03_tensorboard.py
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets('mnist', one_hot=True)
with tf.name_scope('input'):
x = tf.placeholder(tf.float32, name='x')
y = tf.placeholder(tf.float32, name='y')
with tf.name_scope('weight'):
w = tf.Variable(tf.zeros([784, 10]), name='w')
b = tf.Variable(tf.zeros([10]), name='b')
with tf.name_scope('model'):
z = tf.matmul(x, w) + b
hx = tf.nn.softmax(z)
cost_i = tf.nn.softmax_cross_entropy_with_logits(logits=z,
labels=y)
cost = tf.reduce_mean(cost_i)
with tf.name_scope('train'):
optimizer = tf.train.AdamOptimizer(0.001)
train = optimizer.minimize(cost)
sess = tf.Session()
sess.run(tf.global_variables_initializer())
# step 1.
tf.summary.scalar('cost', cost)
# step 2.
merged = tf.summary.merge_all()
# step 3.
writer = tf.summary.FileWriter('board/mnist', sess.graph)
epochs, batch_size = 15, 100
iter = mnist.train.num_examples // batch_size
for i in range(epochs):
total = 0
for j in range(iter):
xx, yy = mnist.train.next_batch(batch_size)
feed = {x: xx, y: yy}
_, loss = sess.run([train, cost], feed)
total += loss
print('{:2} : {}'.format(i, total / iter))
# step 4.
summary = sess.run(merged, {x: xx, y: yy})
writer.add_summary(summary, i)
# step 5.
# tensorboard --logdir=board/mnist
| UTF-8 | Python | false | false | 1,487 | py | 39 | Day_05_03_tensorboard.py | 30 | 0.611298 | 0.590451 | 0 | 60 | 23.683333 | 62 |
humnaawan/3D-galaxies-kavli | 8,546,984,922,359 | e9b6f54a031725bbb497e61f9287c0f47eab51eb | 02422b9e360118021f5eaa63c477b189c2305268 | /runscripts/get_data/get_illustris_data.py | f75b9cbb97187dafbe36c60c8ce300a45bcecd68 | []
| no_license | https://github.com/humnaawan/3D-galaxies-kavli | 4235604ab54d71aa437da58e79f96ce34a6693df | 5c011319fe32924a98f4adca8b0f432128ff6950 | refs/heads/master | 2020-06-20T20:46:51.889748 | 2019-09-19T00:31:28 | 2019-09-19T00:31:28 | 197,243,502 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import datetime, time, socket, os
import numpy as np
from d3g2d import get_data, get_time_passed, readme as readme_obj
# ------------------------------------------------------------------------------
illustris_z0 = False # True for z=0.0 data; False for z=0.4 one
# ------------------------------------------------------------------------------
if illustris_z0:
path = '/Users/humnaawan/repos/3D-galaxies-kavli/data/illustris_mass_shape/mass-all-11p0/'
z = 0.0
snap_num = 135
# get the haloIds
haloIds = []
for file in os.listdir(path):
haloIds.append( int( file.split('subhalo')[1].split('.dat')[0] ) )
haloIds = np.unique( haloIds )
else:
# get z=0.4 cutouts
path = '/Users/humnaawan/repos/3D-galaxies-kavli/data/sum_illustris/'
z = 0.4
snap_num = 108
# get the haloIds
haloIds = []
for file in os.listdir(path):
haloIds.append( int( file.split('_')[4] ) )
haloIds = np.unique( haloIds )
# set up
run_name = 'Illustris-1'
outdir = '/Users/humnaawan/repos/3D-galaxies-kavli/outputs/illustris_z%s/' % z
# ------------------------------------------------------------------------------
# set up the readme
start_time = time.time()
readme_tag = ''
update = '%s\n' % datetime.datetime.now()
update += 'Running on %s\n\n' % socket.gethostname()
update += 'Outdir: %s\n' % outdir
update += 'For z = %s, run_name = %s\n' % (z, run_name)
update += '%s haloIds:\n%s\n' % ( len(haloIds), haloIds)
readme = readme_obj(outdir=outdir, readme_tag=readme_tag, first_update=update)
readme.run()
# save the halo ids
filename = 'haloIds.txt'
np.savetxt('%s/%s' % (outdir, filename), haloIds, fmt='%s')
readme.update(to_write='Saved %s' % filename)
# now get the cutouts etc
get_data(run_name=run_name, z=z, snap_num=snap_num, haloIds=haloIds,
outdir=outdir, print_progress=True, readme=readme)
readme.update(to_write='Done.\n## Time taken: %s\n' % get_time_passed(start_time) )
| UTF-8 | Python | false | false | 1,951 | py | 33 | get_illustris_data.py | 26 | 0.573552 | 0.558175 | 0 | 52 | 36.519231 | 94 |
mozilla/ansible-junos-stdlib | 1,941,325,258,373 | bbc2d256948d4074af6494ee6513fa0b87bd67ff | 7cf86e0b0a68d53c73f7cd144a0195a5ec1e840b | /version.py | 6b78ccab0d3edc21f263c61c56733b64fe0ab0f2 | [
"Apache-2.0"
]
| permissive | https://github.com/mozilla/ansible-junos-stdlib | d59b464dfd787c0631054ca419db8a78461dfb76 | 79e65f3626c38e002ee0bf3e0656a0fba00c5f40 | refs/heads/master | 2023-07-04T02:23:08.237969 | 2017-01-29T23:46:59 | 2017-07-07T10:01:57 | 96,518,054 | 1 | 0 | null | true | 2017-07-07T08:38:42 | 2017-07-07T08:38:42 | 2017-06-27T14:36:45 | 2017-06-28T11:46:12 | 1,201 | 0 | 0 | 0 | null | null | null | VERSION = "2.0.0+dev0"
DATE = "2017-April-24"
| UTF-8 | Python | false | false | 46 | py | 1 | version.py | 1 | 0.630435 | 0.413043 | 0 | 2 | 22 | 22 |
inlpi/anci_nn | 10,797,547,789,564 | 58943c276476d77247bec44bc0f303eb49ddeb11 | 50d25e7059f99d16217da790c198f6c200f684b4 | /extract_embeddings.py | aa235023db2d485abe0c2da1ac3f82303133d00b | [
"MIT"
]
| permissive | https://github.com/inlpi/anci_nn | 6ded0712d3db49a61fe956799ea0e2b8a83a26c0 | 81c23406ef983e403be9c71450dae987f3233da7 | refs/heads/master | 2022-09-27T13:59:34.099100 | 2020-06-03T20:39:30 | 2020-06-03T20:39:30 | 269,162,346 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Python 3.7.6
# -*- coding: utf-8 -*-
# Author: Ines Pisetta
import os
import gensim
import numpy as np
extracted_embeddings_path = 'extracted_embeddings/'
if not os.path.exists(extracted_embeddings_path+'tratz/'):
os.makedirs(extracted_embeddings_path+'tratz')
if not os.path.exists(extracted_embeddings_path+'oseaghdha/'):
os.makedirs(extracted_embeddings_path+'oseaghdha')
constituents_path = 'constituents/'
constituents = []
transformations_path = 'transformations/tratz/'
if not os.path.exists(transformations_path):
os.makedirs(transformations_path)
def extract_embeddings(unknown_emb_file, ds):
extract_embeddings.unknown_emb = np.load(unknown_emb_file)
extract_embeddings.con_vec = {}
if ds == 'oseaghdha':
# no transformations needed
for word in constituents:
get_vector_o(word)
elif ds == 'tratz':
with open('constituents/constituents_tratz_transformations.txt', 'r', encoding = 'utf-8') as t:
extract_embeddings.first_transformation = {line.split(' -> ')[0]:line.split(' -> ')[1] for line in t.read().splitlines()}
extract_embeddings.second_transformation = {}
extract_embeddings.combined_transformation = {}
for word in constituents:
get_vector_t(word)
tmp = 'transformations_' + unknown_emb_file.split('/')[1].rsplit('.', 1)[0].replace('_unknown', '') + '.txt'
with open(transformations_path+tmp, 'w', encoding = 'utf-8') as t:
for k,v in extract_embeddings.combined_transformation.items():
t.write(k + ' -> ' + v + '\n')
else:
print('Error')
tmp = unknown_emb_file.split('/')[1].rsplit('.', 1)[0].replace('_unknown', '') + '.txt'
output_file = extracted_embeddings_path + ds + '/emb_' + tmp
with open(output_file, 'w', encoding = 'utf-8') as o:
for k,v in extract_embeddings.con_vec.items():
o.write(k + ' ' + str(v) + '\n')
with open(output_file.replace('.txt', '_indices.txt'), 'w', encoding = 'utf-8') as i:
for count, k in enumerate(extract_embeddings.con_vec.keys()):
i.write(k + ' ' + str(count) + '\n')
lookuptable = np.array(list(extract_embeddings.con_vec.values()))
#print('\n' + str(len(constituents)-lookuptable.shape[0]) + ' constituents filtered')
print(lookuptable.shape)
#print('\n\n')
np.save(output_file.replace('.txt', '_vectors.npy'), lookuptable)
def get_vector_o(word):
vector = ''
try:
vector = vectors[word]
except KeyError:
#print(word, ' not in vocabulary')
vector = extract_embeddings.unknown_emb
assert len(vector) == 300
assert isinstance(vector, np.ndarray)
extract_embeddings.con_vec[word] = vector
def get_vector_t(word):
word_t = word
if word in extract_embeddings.first_transformation:
word_t = extract_embeddings.first_transformation[word]
if '_' in word_t:
word_f = 0
words = word_t.split('_')
if word_t in extract_embeddings.second_transformation:
extract_embeddings.combined_transformation[word] = extract_embeddings.second_transformation[word_t]
#print(word)
pass
else:
if '-'.join(words) in vectors:
vector = vectors['-'.join(words)]
word_f = '-'.join(words)
elif ' '.join(words) in vectors:
vector = vectors[' '.join(words)]
word_f = ' '.join(words)
elif word_t in vectors:
vector = vectors[word_t]
word_f = word_t
else:
v = []
for w in words:
# Vektoren der einzelnen Wörter
try:
v.append(vectors[w])
# es sei denn, diese sind unbekannt, dann das entsprechende Embedding verwenden
except KeyError:
v.append(extract_embeddings.unknown_emb)
v = np.array(v)
vector = v.mean(axis=0)
word_f = '#'.join(words)
assert len(vector) == 300
assert isinstance(vector, np.ndarray)
assert isinstance(word_f, str)
extract_embeddings.second_transformation[word_t] = word_f
extract_embeddings.combined_transformation[word] = word_f
extract_embeddings.con_vec[word_f] = vector
# one-word expressions
else:
try:
vector = vectors[word_t]
except KeyError:
#print(word_t, ' not in vocabulary')
vector = extract_embeddings.unknown_emb
assert len(vector) == 300
assert isinstance(vector, np.ndarray)
extract_embeddings.combined_transformation[word] = word_t
extract_embeddings.con_vec[word_t] = vector
def load_glove(emb_file):
vec = {}
with open(emb_file, 'r', encoding = 'utf-8') as e:
for line in e:
line = line.rstrip()
word = line.split(' ', 1)[0]
vector_string = line.split(' ', 1)[1]
vector_list = vector_string.split(' ')
vector = np.array([float(x) for x in vector_list])
vec[word] = vector
return vec
if __name__ == '__main__':
for ds in ['tratz', 'oseaghdha']:
with open(constituents_path + 'constituents_' + ds + '.txt', 'r', encoding = 'utf-8') as c:
constituents = c.read().splitlines()
"""
vectors = load_glove('embeddings/glove.6B.300d.txt')
extract_embeddings('unknown_embeddings/glove.6B_unknown.npy', ds)
vectors = load_glove('embeddings/glove.42B.300d.txt')
extract_embeddings('unknown_embeddings/glove.42B_unknown.npy', ds)
vectors = load_glove('embeddings/glove.840B.300d.txt')
extract_embeddings('unknown_embeddings/glove.840B_unknown.npy', ds)
"""
vectors = gensim.models.KeyedVectors.load_word2vec_format('embeddings/GoogleNews-vectors-negative300.bin', binary=True)
#extract_embeddings('unknown_embeddings/w2v_unknown.npy', ds)
extract_embeddings('unknown_embeddings/w2v_unknown_1000.npy', ds)
| UTF-8 | Python | false | false | 6,660 | py | 24 | extract_embeddings.py | 6 | 0.54783 | 0.538069 | 0 | 188 | 33.37766 | 133 |
jiyudonggithub/WebSpider | 10,402,410,832,729 | ad66136d0afc5183549919c6ca80ce73a933af45 | c55758fe1b61828d4e8e46787e6c1683a5244c9b | /First/multithreading.py | 42fc950739d9d3be42f7b27c60ab257223b5e7fd | []
| no_license | https://github.com/jiyudonggithub/WebSpider | b5625bb2a2c4b448ff4d7c66ebb70c0e16da1e8d | cf171e501ed75efedadeff80abcf33605041cd58 | refs/heads/master | 2023-01-03T11:10:47.516326 | 2020-11-04T03:26:38 | 2020-11-04T03:26:38 | 306,371,802 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
# @Time : 2020/9/24 19:53
# @Author : Jiyudong
# @FileName: multithreading.py
# @Software: PyCharm
import time
import threading
tasks = ['move1', 'move2', 'move3', 'move4', 'move5', 'move6', 'move7', 'move8', 'move9', 'move10']
def download(move):
print(f'start downloading {move}')
time.sleep(2)
print(f'finish download {move}\n')
if __name__ == '__main__':
for task in tasks:
thread = threading.Thread(target=download, args=(task,))
thread.start()
| UTF-8 | Python | false | false | 510 | py | 50 | multithreading.py | 48 | 0.613725 | 0.566667 | 0 | 20 | 24.5 | 99 |
sbaguirr/Selenium-Test | 1,142,461,321,024 | 0b6f0577fecf0bd063a2d0c9b8b2cc87712c3d32 | 4322a33a538be997b54fda4bd6057fb270fd3ca4 | /Selenium/locators.py | fe917ed33820f4d98708296af9812822d7acecbc | []
| no_license | https://github.com/sbaguirr/Selenium-Test | 3c9f07045802dc0cde18d108d56bc3538c6af546 | 583872149dac4c4676bdc8973333e6b7cfa9f7f5 | refs/heads/master | 2021-01-04T15:56:25.898261 | 2020-02-17T03:07:26 | 2020-02-17T03:07:26 | 240,624,183 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | """Selenium Test"""
class EspolPageLocators:
"""
Class for Espol page locators.
"""
faculty_list_xpath = "//div[@id='accordion']/div/div/h4/a/strong"
ul_list_xpath = "//div[@class='panel-body']/ul[2]"
li_list_xpath = ".//li"
career_link_xpath = ".//a"
class BonusPageLocators:
"""
Class for Bonus page locators.
"""
elective_course_xpath = "//p[@id='informacion']/a"
select_elements_xpath = "//select[@name='tbl_materias_complementarias_length']"
rows_xpath = "//table[@id='tbl_materias_complementarias']/tbody/tr"
career_xpath = "//h1"
data_xpath = [".//td[1]", ".//td[2]", ".//td[3]"]
| UTF-8 | Python | false | false | 653 | py | 6 | locators.py | 5 | 0.591118 | 0.58193 | 0 | 22 | 28.681818 | 83 |
JaviS1997/ie_python_course | 6,760,278,528,224 | 0b56f51a691701736f9c84872057c10421ac07c0 | b9a94ca39f346d21d5d5e63b7dc8fc83a84f44f2 | /HW.py | 3dd9f569a3457f536e20ad00f679fac23410a4b9 | []
| no_license | https://github.com/JaviS1997/ie_python_course | cb0d39c02faa8706467afd02c5e1a7909a65e2f7 | 9d1aaf7619790b1b48b5b19c8a40e11825ceb295 | refs/heads/master | 2020-12-20T20:05:03.456979 | 2020-02-09T19:33:47 | 2020-02-09T19:33:47 | 236,196,235 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import numpy as np
def askName():
print("What's your name ?")
name = input()
print("Nice to meet you {} !".format(name))
return 1
def division():
print("Input two integer numbers for the division. \nNumerator :")
num = int(input())
print("Denominator :")
denominator = int(input())
# Rounded up to one decimal
print("{} / {} = {}".format(num, denominator, round(num / denominator, 1)))
return 1
def surfaceCircle():
print("Radius of the circle :")
radius = float(input())
result = np.pi * radius ** 2
print("Surface = π * {}^2 = {}".format(radius, round(result, 2)))
return 1
def maze():
moves = 0
answer = ''
solution = 'SSNWES'
print("You are in the magic maze")
while answer != solution:
print("Which way to go now ? (N,S,E,W)")
key = input().upper()
if key in ['S', 'N', 'E', 'W']:
answer += key
if answer == solution[0:moves + 1]:
moves += 1
print("Correct! {} move(s) to finish".format(len(solution) - moves))
else:
print('Wrong way ! You are going back to the beginning')
answer = ''
moves = 0
else:
print('Input a correct direction!')
return print("Congrats! You finished the maze")
def substr():
print('Input the word whose substring we will use :')
word = input()
prior = word[0]
substring = ''
longest_substring = ''
for c in word:
if c >= prior:
substring += c
else:
if len(substring) >= len(longest_substring):
longest_substring = substring
substring = '' + c
prior = c
print('The longest alphabetical substring is \'{}\''.format(longest_substring))
# askName()
# division()
# surfaceCircle()
# maze()
substr()
| UTF-8 | Python | false | false | 1,891 | py | 2 | HW.py | 1 | 0.538095 | 0.531217 | 0 | 74 | 24.540541 | 84 |
aikram24/S3-Restore | 12,489,764,939,456 | fbb5e2ae6906073af1635039e6332ae3b4e3dae9 | d4ff11e89fddbaeac676a2231bc90e1940b602a2 | /s3_restore.py | d0fc91d5c331bee2f9a30adb927f9e13dc99c8d7 | []
| no_license | https://github.com/aikram24/S3-Restore | 261c62ad932852ff46d5335f6bd96e4b0929dd1c | a12b33af59a23e166b17dbbd2c3f71e48ab4b9de | refs/heads/master | 2020-07-01T06:44:25.754151 | 2017-06-16T17:22:33 | 2017-06-16T17:22:33 | 74,093,047 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/python2.7
# Ali Ikram
#
import os, sys, argparse
from s3_functions import s3
parser = argparse.ArgumentParser(description='S3 Restore Script')
parser.add_argument('-b','--bucketname', help='Bucket Name ex. s3-io-test-2292',required=True)
parser.add_argument('-v','--version',help='Version ID', required=False)
parser.add_argument('--getbucketinfo',help='Get bucket info', action='store_true', required=False)
args = parser.parse_args()
BUCKET_NAME = args.bucketname
VERSION_ID = args.version
GET_INFO = args.getbucketinfo
if args.getbucketinfo:
data = s3(BUCKET_NAME)
data.get_bucket_list()
elif args.bucketname and args.version:
data = s3(BUCKET_NAME)
elif args.version and args.getbucketinfo:
print('You cannot provide version ID with "--getbucketinfo" options')
| UTF-8 | Python | false | false | 788 | py | 3 | s3_restore.py | 2 | 0.746193 | 0.730964 | 0 | 23 | 33.217391 | 99 |
wyanlord/MyCode | 2,843,268,352,465 | 871007239788e519ea16ff5f5fe85e36a02d20aa | 1cf340eb13aed56263cfdc82326c78882026d743 | /Linux/Ubuntu/安装.py | 3c5f95c3de537703ea429ce4f37a6d26597dabf3 | []
| no_license | https://github.com/wyanlord/MyCode | b9bb3a36a1f4f18ca49424993618f1640c87f5dd | 225d744101e5ab339ff47770e13f52b4c1176f65 | refs/heads/master | 2017-12-21T17:23:43.946122 | 2017-02-23T09:28:03 | 2017-02-23T09:28:03 | 72,913,669 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #更新apt-get源
apt-get update
#更新现有软件
apt-get upgrade
#安装vim
apt-get install vim
#去掉系统的错误报告
vim /etc/default/apport
enabled=0
reboot
#首先安装ssh
apt-get install ssh
#安装 vm-tools
mkdir /media/mnt && mount /dev/cdrom /media/mnt && cp /media/mnt/VMware* /root && umount /dev/cdrom && cd ~ && tar zxvf VMwareTools* && cd vmware-tools-distrib/ && ./vmware-install.pl
reboot
#一些常用的软件,chrome需要手动下载
apt-get install vim lrzsz gnome-software tree git python gcc unity-tweak-tool docky xchm
#更换侧边栏,命令有时不能用
apt-get install dconf-editor (com->canonical->unity->launcher)
gsettings set com.canonical.unity.launcher launcher-position Bottom
gsettings set com.canonical.unity.launcher launcher-position Left
#安装常用的软件
sublime-Text3 WPS sougoupinyin
#sublime Text 3设置中文
suod apt-get install libgtk2.0-dev
#新建一个文件sublime_imfix.c
cd ~ && sudo touch sublime_imfix.c
#添加内容
#include <gtk/gtkimcontext.h>
void gtk_im_context_set_client_window (GtkIMContext *context, GdkWindow *window)
{
GtkIMContextClass *klass;
g_return_if_fail (GTK_IS_IM_CONTEXT (context));
klass = GTK_IM_CONTEXT_GET_CLASS (context);
if (klass->set_client_window) klass->set_client_window (context, window);
g_object_set_data(G_OBJECT(context),"window",window);
if(!GDK_IS_WINDOW (window)) return;
int width = gdk_window_get_width(window);
int height = gdk_window_get_height(window);
if(width != 0 && height !=0) gtk_im_context_focus_in(context);
}
sudo gcc -shared -o libsublime-imfix.so sublime_imfix.c `pkg-config --libs --cflags gtk+-2.0` -fPIC
sudo mv libsublime-imfix.so /opt/sublime_text/
sudo vim /usr/bin/subl
#=>LD_PRELOAD=/opt/sublime_text/libsublime-imfix.so exec /opt/sublime_text/sublime_text "$@"
sudo vim /usr/share/applications/sublime_text.desktop
#=>Exec=bash -c "LD_PRELOAD=/opt/sublime_text/libsublime-imfix.so exec /opt/sublime_text/sublime_text %F"
#=>Exec=bash -c "LD_PRELOAD=/opt/sublime_text/libsublime-imfix.so exec /opt/sublime_text/sublime_text -n"
#=>Exec=bash -c "LD_PRELOAD=/opt/sublime_text/libsublime-imfix.so exec /opt/sublime_text/sublime_text --command new_file"
#WPS安装
下载http://download.csdn.net/download/wl1524520/6333049
复制到/usr/share/fonts/wps_symbol_fonts/下面
mkfontdir
mkfontscale
fc-cache
#安装主题
sudo add-apt-repository ppa:noobslab/themes
sudo apt-get update
sudo apt-get install flatabulous-theme
#安装图标
sudo add-apt-repository ppa:noobslab/icons
sudo apt-get update
sudo apt-get install ultra-flat-icons
#修改终端
sudo apt-get install zsh
wget https://github.com/robbyrussell/oh-my-zsh/raw/master/tools/install.sh -O - | sh
chsh -s /usr/bin/zsh
#chsh -s /bin/bash可以切回,要修改/etc/passwd中的root路径
#安装苹果主题
sudo add-apt-repository ppa:noobslab/macbuntu
sudo apt-get update
sudo apt-get install macbuntu-os-icons-lts-v7
sudo apt-get install macbuntu-os-ithemes-lts-v7
#安装svn管理器
sudo add-apt-repository ppa:rabbitvcs/ppa
sudo apt-get install python-nautilus python-configobj python-gtk2 python-glade2 python-svn python-dbus python-dulwich subversion meld
sudo apt-get install rabbitvcs-cli rabbitvcs-gedit rabbitvcs-core rabbitvcs-nautilus
nautilus -q
nautilus
reboot
#安装IDE
phpstorm clion IDEA
mv ** /opt/*
sudo ln -s /opt/clion/bin/clion.sh /usr/local/bin/clion
sudo ln -s /opt/idea/bin/idea.sh /usr/local/bin/idea
sudo ln -s /opt/phpstorm/bin/phpstorm.sh /usr/local/bin/phpstorm
#安装QT5
sudo chmod +x ***.run
sudo ./***.run
sudo apt-get install fcitx-libs-qt fcitx-libs-qt5
sudo cp /usr/lib/x86_64-linux-gnu/qt5/plugins/platforminputcontexts/libfcitxplatforminputcontextplugin.so \
/opt/Qt5.7.1/Tools/QtCreator/lib/Qt/plugins/platforminputcontexts/
#在设置中去掉第三方的源,不要删除,然后重新update
sudo apt-get update
#把桌面文件夹改成英文,选择好以后记得重启
export LANG=en_US(export LANG=zh_CN.UTF-8)
xdg-user-dirs-gtk-update
#也可以修改~/.config/user.dir*同时修改桌面的文件夹名字
#安装下载工具,firefox要安装flashgot插件
sudo add-apt-repository ppa:plushuang-tw/uget-stable
sudo add-apt-repository ppa:t-tujikawa/ppa
sudo apt-get update
sudo apt-get install uget aria2
#安装php开发环境
1、安装nginx
wget http://nginx.org/keys/nginx_signing.key
sudo apt-key add nginx_signing.key
echo "deb http://nginx.org/packages/ubuntu/ trusty nginx" >> /etc/apt/sources.list
echo "deb-src http://nginx.org/packages/ubuntu/ trusty nginx" >> /etc/apt/sources.list
sudo apt-get update
sudo apt-get install nginx
sudo vim /etc/nginx/conf.d/default.conf
sudo vim /etc/nginx/nginx.conf
user www-data;
/usr/sbin/nginx -v
service nginx restart
2、安装php7
sudo apt-get install python-software-properties software-properties-common
sudo add-apt-repository ppa:ondrej/php
sudo apt-get update
sudo apt-get -y install autoconf g++ make openssl libssl-dev libcurl4-openssl-dev
sudo apt-get install php7.0-fpm php7.0-mysql php7.0-common php7.0-curl php7.0-cli php7.0-mcrypt php7.0-mbstring php7.0-xml php7.0-dev
sudo vim /etc/php/7.0/fpm/php.ini
sudo vim /etc/php/7.0/cli/php.ini
service php7.0-fpm restart
#安装pear
sudo apt-get install php-pear
pecl install redis
3、安装mysql
sudo apt-get install mysql-server-5.7 mysql-client-5.7
mysql -uroot -p
| UTF-8 | Python | false | false | 5,506 | py | 76 | 安装.py | 65 | 0.752557 | 0.738395 | 0 | 140 | 34.314286 | 183 |
andyglez/external_access | 4,269,197,522,733 | 0c46a0a78662e8dbd7cfbb94a21c3a6d6b19da0b | d42f4965db75d36464ddc013538c911fa00074cc | /controllers/search_ctr.py | 3a3ffa6d75b6f471975cd631f5488c1c08f61d0d | []
| no_license | https://github.com/andyglez/external_access | 13fe21f1cc167c3aa8c4f54a989297390a16e0e5 | 2ca68d6efdea87741c6070237efe1b6fa6694b06 | refs/heads/master | 2020-04-28T13:08:52.266332 | 2019-09-06T19:52:30 | 2019-09-06T19:52:30 | 175,299,247 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from settings import database as db
from utils.cookies import Cookies
def dont_have_permissions(roles):
return roles['is_default']
def search(cookies, query, category):
data = ([(u, n, a, e, p) for u, n, a, e, p in get_users(category) if by_category(query, category, u, n, a, e, p) and a == cookies.get('info')[2]]
if cookies.get('roles')['is_dean'] or cookies.get('roles')['is_admin']
else [(u, n, a, e, p) for u, n, a, e, p in get_users(category) if by_category(query, category, u, n, a, e, p)])
cookies.set('query_value', query)
return data
def by_category(query, category, user, name, area, email, phone):
if category == 'name':
return query in name.lower()
elif category == 'username':
return query in user.lower()
elif category == 'email':
return query in email.lower()
elif category == 'phone':
return query in phone
return query in area
def get_users(category):
return db.query('select UserName, Name, Area, email, phone from Users order by {0}'.format(category)) | UTF-8 | Python | false | false | 1,072 | py | 37 | search_ctr.py | 23 | 0.633396 | 0.63153 | 0 | 26 | 40.269231 | 150 |
tijugeorge/Python-code-practice | 9,878,424,784,980 | 7ddd8e1cae8af70abcb1c7ae3100afc74c8d38d7 | b57152633d14bac5889aad76c0b233f6e3b9c720 | /sum-of-array.py | 55bb39d3d7eb53cb0dfeebe5443bdac7a34258ef | []
| no_license | https://github.com/tijugeorge/Python-code-practice | 2f01fbaf88238a0e72d18cae4adb23fab3d6625d | 8967a03a560160fa04e4801b3d7f0046687daf03 | refs/heads/master | 2022-10-21T02:10:28.784617 | 2022-10-09T22:56:26 | 2022-10-09T22:56:26 | 74,500,629 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import sys
import os
def sum(numbers):
ret=0
for i in numbers:
ret += i
return ret
print sum([1,2,3,4,5])
#f= open(os.environ['OUTPUT_PATH'],'w')
| UTF-8 | Python | false | false | 177 | py | 70 | sum-of-array.py | 68 | 0.553672 | 0.519774 | 0 | 14 | 11.571429 | 39 |
ducoquelicot/python_files | 3,848,290,745,430 | 97e9cdcb10a3d73e8d4c4f4e956670d07385a21b | da7b4d8d632278a7e793590095c2e3940fdaff0d | /fuckery.py | 0cd78b424cf89a15898dfc9733327515a3f0593a | []
| no_license | https://github.com/ducoquelicot/python_files | 6a0519c637714e19fa3e3f85f2a4d88b33f747c5 | aa3a070df16d04594f5e8645a05dc0a6853be73b | refs/heads/master | 2022-12-31T19:17:57.068541 | 2019-11-05T12:40:37 | 2019-11-05T12:40:37 | 168,590,154 | 0 | 0 | null | false | 2022-12-08T05:56:52 | 2019-01-31T20:17:42 | 2019-11-05T12:41:00 | 2022-12-08T05:56:51 | 552,942 | 0 | 0 | 2 | HTML | false | false | from bs4 import BeautifulSoup
import csv
import os
import requests
pa_agenda = {2019: 'https://www.cityofpaloalto.org/gov/depts/cou/council_agendas.asp', 2018:'https://www.cityofpaloalto.org/gov/agendas/council/2018.asp'}
for year in pa_agenda.keys():
print(pa_agenda[2019])
for i in range(2002,2019):
print(i)
from bs4 import BeautifulSoup
import os, urllib.request
import fitz
# cmd = 'pdftohtml -c -s /home/fabienne/Desktop/Python/Files/pdfs_2019_37.pdf /home/fabienne/Desktop/Python/html/pdfs_2019_37'
# os.system(cmd)
# response = urllib.request.urlopen('file:///home/fabienne/Desktop/Python/html/pdfs_2019_37-html.html', timeout=1)
# html = response.read()
# soup = BeautifulSoup(html, 'html.parser')
# links = soup.select('a')
# for row in links:
# if '31-18' or '07-19' in row.getText():
# print(row)
doc = fitz.open('/home/fabienne/Desktop/Python/Files/pdfs_2019_37.pdf')
page = doc[2]
links = page.getLinks()
print(len(doc))
lastnum = range(len(doc))[-1]
print(lastnum)
for row in links:
print(row['uri'])
'/home/fabienne/Desktop/Python/PDF/pdfs_2019_37.pdf'
# for recall in recalls:
# filename = os.path.basename(recall)[:-4]
# soup = BeautifulSoup(open(recall), 'html.parser')
# with open(os.path.expanduser('~/Desktop/Python/Files/' +filename +'_soup.html'), 'w') as file:
# file.write(str(soup)) | UTF-8 | Python | false | false | 1,366 | py | 140 | fuckery.py | 19 | 0.693997 | 0.644217 | 0 | 47 | 28.085106 | 154 |
cthamilton/BootCamp2017 | 566,935,726,753 | 49ba6fbc8a8cd45d3190a560d1f9da77a3a183cf | 90a979361002fd832ad0b1cee03adc8467dc1726 | /ProbSets/Comp/Week4/CondStab/1.py | 26c38d879f5c33a4ef39dd4a902fddc5dc735a76 | []
| no_license | https://github.com/cthamilton/BootCamp2017 | 50a2c4fb96209983a08647a5933a4d456ae1c9ea | e50927a79f5031318b9d12c6a30a475d7e5f637e | refs/heads/master | 2017-07-30T10:48:46.106675 | 2017-07-30T09:11:51 | 2017-07-30T09:11:51 | 94,893,563 | 0 | 0 | null | true | 2017-06-20T13:21:20 | 2017-06-20T13:21:19 | 2017-06-20T11:18:16 | 2017-06-20T13:15:29 | 11,430 | 0 | 0 | 0 | null | null | null | import numpy as np
import scipy.linalg as la
def condcalc(A):
x,y,z = la.svd(A)
ma = np.max(y)
mi = np.min(y)
return ma / mi
| UTF-8 | Python | false | false | 147 | py | 164 | 1.py | 139 | 0.564626 | 0.564626 | 0 | 8 | 16.75 | 25 |
f981113587/Python | 2,542,620,656,451 | 233fea320f14b1f57f79afe790369e8e2a574fa5 | eee2e2c19867b8500a0176442084e35361a51475 | /Aula 08/Desafios/018.py | 46f2a764f0d25aa2cd4feef4c7f718875834ecb6 | []
| no_license | https://github.com/f981113587/Python | 71a30d1ddd9317571f20f7abb735e94997604a08 | ef76f72c9caec4b4ba15315d1366f5fb1bbb42f0 | refs/heads/main | 2023-07-16T16:25:36.037368 | 2021-08-28T14:17:15 | 2021-08-28T14:17:15 | 400,808,061 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Faça um programa que leia um ângulo
# qualquer e mostre na tela o valor
# do seno, cosseno e tangete desse ângulo.
from math import cos, sin, tan, radians
angulo = float(input('Informe o valor do ângulo: '))
print('Cos({}) = {}'.format(angulo, cos(radians(angulo))))
print('Sin({}) = {}'.format(angulo, sin(radians(angulo))))
print('Tan({}) = {}'.format(angulo, tan(radians(angulo))))
| UTF-8 | Python | false | false | 393 | py | 83 | 018.py | 83 | 0.676093 | 0.676093 | 0 | 10 | 37.9 | 58 |
neviim/_commandos_ | 4,887,672,813,412 | f371359e2093d1ce79f2ff4d1d0a28d64c7cd6ad | 8c95394615cd1c4d973c43baa6fdb74aa4834993 | /fcn/doc-script/[mongodb_install_config.py | b1d7147c8bf3965d8599a57917b20055f55f14e6 | []
| no_license | https://github.com/neviim/_commandos_ | fa250dc8198f178a36664a60c37cbcbd8b01e29c | 6fb9c5dbe4b646f816d12e54f15529fe53859546 | refs/heads/master | 2021-05-14T12:32:18.116512 | 2020-02-28T18:35:50 | 2020-02-28T18:35:50 | 116,411,489 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # instalando mongodb centos 7
::: Colocar repositorio MongoDB
$ nano /etc/yum.repos.d/mongodb-org.repo
[mongodb-org-4.0]
name=MongoDB Repository
baseurl=https://repo.mongodb.org/yum/redhat/7/mongodb-org/4.0/x86_64/
gpgcheck=1
enabled=1
gpgkey=https://www.mongodb.org/static/pgp/server-4.0.asc
$ yum repolist
$ yum install mongodb-org
$ systemctl start mongod.service
$ systemctl status mongod.service
::: Master e Slave
| UTF-8 | Python | false | false | 460 | py | 283 | [mongodb_install_config.py | 144 | 0.704348 | 0.673913 | 0 | 21 | 20.809524 | 72 |
gitshaozhong/VideoSearchEngine | 8,856,222,608,881 | c6575a4a7df31f3de669cb9ee5691538c119dc95 | 5d0fe6e6210bfd3dcc149da08dd6f347d8cfb0a1 | /VideoSearchEngine/NoisyFrameFilter.py | cc9e84160c4b3940c9748676d7b0df23a4a92bef | [
"MIT"
]
| permissive | https://github.com/gitshaozhong/VideoSearchEngine | 1d36aef87a62ab04f0cddeff9eab0c1fb9eba4b9 | 48281688fc1abcad7e81121f38a87ab88c91dc90 | refs/heads/master | 2020-05-30T02:57:26.588291 | 2019-05-31T01:25:14 | 2019-05-31T01:25:14 | 189,505,669 | 0 | 0 | null | true | 2019-05-31T01:22:22 | 2019-05-31T01:22:21 | 2019-05-30T13:17:38 | 2018-09-26T23:26:06 | 182,992 | 0 | 0 | 0 | null | false | false | # Main file for given a frame, filter the noisy frames
# Main API is here, more files may be used for the implementation
def get_frame_filter():
'''
return the version specified in the configuration to use
e.g. if there is a basic one and a complex one, the configuration should be able
to decide which one to use
'''
return None
'''
Describe API supported here
''' | UTF-8 | Python | false | false | 392 | py | 50 | NoisyFrameFilter.py | 41 | 0.701531 | 0.701531 | 0 | 15 | 25.2 | 84 |
blakecheng/stylegan_reimplementation | 16,947,940,963,400 | c187ca3955dd34dfad7d0a1395e0bf4b2c060bca | e42bd2a672c335cfd6192962be49677e80259703 | /train.py | 5f2436214e69bdc2cb24f1f842ca23405ebac8f2 | [
"Apache-2.0"
]
| permissive | https://github.com/blakecheng/stylegan_reimplementation | c57d2464946767c097c976037a6eddcab3da6583 | f365a6fab9dd08c15480f76fee78da26ac942c9d | refs/heads/master | 2022-11-11T19:57:41.394343 | 2020-06-24T02:08:57 | 2020-06-24T02:08:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import os
import ast
from collections import namedtuple
#from tensorflow.python import debug as tf_debug
from shutil import copy, copytree
from tqdm import trange
import csv
import random
import tensorflow as tf
import numpy as np
from data import get_dataset
from models import Generator, Discriminator, MappingNetwork
from ops import name_scope, upsample, downsample, downsample_nv
from utils import filter_vars_with_checkpoint, build_label_list_from_file
TrainHps = namedtuple("TrainingHyperparams",
["res_h", "res_w", "current_res_w", "psi_w", "batch_size", "epochs_per_res",
"optimizer", "loss_fn", "profile", "ngpus",
"learning_rate", "adam_beta1", "adam_beta2", "use_beholder",
"model_dir", "gp_fn", "lambda_gp", "ncritic", "cond_uniform_fake",
"do_pixel_norm", "start_res_h", "start_res_w", "map_cond",
"tboard_debug", "cli_debug", "cond_weight", "cond_layers",
"eager", "no_train", "lambda_drift", "conditional_type",
"do_equalized_lr", "do_minibatch_stddev", "label_file",
"steps_per_save", "save_paths", "do_traditional_input",
"do_mapping_network", "do_add_noise", "resize_method"])
TrainHps.__new__.__defaults__ = (None,) * len(TrainHps._fields)
SavePaths = namedtuple("SavePaths",
["gen_model", "dis_model", "mapping_network", "sampling_model",
"gen_optim", "dis_optim", "mn_optim", "alpha", "step"])
SavePaths.__new__.__defaults__ = (None,) * len(SavePaths._fields)
@name_scope("non_saturating_loss")
def non_saturating_loss(real_logit, fake_logit):
"""
:param real_logit: logit(s) for real images (if None just return generator loss)
:param fake_logit: logit(s) for fake images
:return: loss for discriminator and generator (unless real_logit is None)
"""
loss_generator = .5 * tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(
labels=tf.ones_like(fake_logit),
logits=fake_logit))
if real_logit is None:
return loss_generator
loss_discriminator_real = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(
labels=tf.ones_like(real_logit),
logits=real_logit))
loss_discriminator_fake = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(
labels=tf.zeros_like(fake_logit),
logits=fake_logit))
loss_discriminator = .5 * loss_discriminator_real + .5 * loss_discriminator_fake
return loss_discriminator, loss_generator
@name_scope("wasserstein_loss")
def wasserstein_loss(real_logit, fake_logit):
"""
:param real_logit: logit(s) for real images (if None just return generator loss)
:param fake_logit: logit(s) for fake images
:return: loss for discriminator and generator (unless real_logit is None)
"""
loss_generator = - fake_logit
if real_logit is None:
return loss_generator
loss_discriminator_real = - real_logit
loss_discriminator_fake = fake_logit
# this actually negates the need for a bias in the FC layer, it's cancelled out
loss_discriminator = loss_discriminator_real + loss_discriminator_fake
return loss_discriminator, loss_generator
@name_scope("drift_penalty")
def drift_penalty(real_logit):
return tf.square(real_logit)
@name_scope("interpolates")
def get_interpolates(real_data, fake_data, alpha_interpolates=None):
if alpha_interpolates is None:
alpha_interpolates = tf.random_uniform([real_data.get_shape().as_list()[0], 1, 1, 1], 0., 1.)
return alpha_interpolates*fake_data + (1-alpha_interpolates)*real_data
@name_scope("R1_gp")
def r1_gp(fake_image, real_image, dis_model, alpha, label_dict=None, conditional_type=None, **kwargs):
output_sum = 0
if conditional_type == "acgan":
output, class_logits = dis_model(real_image, alpha=alpha, y=None)
if class_logits is not None:
for label in label_dict.keys():
output_sum = output_sum + tf.reduce_sum(class_logits[label])
elif conditional_type == "proj":
output, _ = dis_model(real_image,
alpha=alpha,
y=tf.concat([label_dict[l] for l in label_dict.keys()], axis=-1))
else:
output, _ = dis_model(real_image,
alpha=alpha,
y=None)
# sum of outputs for each image in batch. The derivative of a output for an image from a different
# batch should be 0, so this won't scale with batch size
# todo: is the sum even necessary?
output_sum = output_sum + tf.reduce_sum(output)
grads = tf.gradients(output_sum, [real_image])[0]
# all but first axis (usually [1,2,3]) or the first axis if only that is available
axis = np.arange(1, grads.get_shape().ndims) if grads.get_shape().ndims is not 1 else None
l2_squared_grads = tf.reduce_sum(tf.square(grads), axis=axis)
penalty = l2_squared_grads * 0.5
return penalty
@name_scope("l2_gp")
def l2_gp(input, output):
if output.get_shape().ndims not in [0, 1, 2]:
raise ValueError("output should be ranks 0 to 2 (list of losses or single loss)")
grads = tf.gradients(output, [input])[0]
# all but first axis (usually [1,2,3]) or the first axis if only that is available
axis = np.arange(1, grads.get_shape().ndims) if grads.get_shape().ndims is not 1 else None
l2_grads = tf.sqrt(tf.reduce_sum(tf.square(grads), axis=axis))
penalty = tf.square(l2_grads-1.)
return penalty
@name_scope("wgan_gp")
def wgan_gp(fake_image, real_image, dis_model, alpha, alpha_interpolates=None):
interps = get_interpolates(real_image, fake_image, alpha_interpolates)
output = tf.reduce_sum(dis_model(interps, alpha=alpha))
return l2_gp(interps, output)
@name_scope("wgan_gp_eager")
def wgan_gp_eager(fake_image, real_image, dis_model, alpha, alpha_interpolates=None):
interps = get_interpolates(real_image, fake_image, alpha_interpolates)
with tf.GradientTape() as tape:
tape.watch(interps) # interps is not trainable so not auto-watched
output = dis_model(interps, alpha=alpha)
if output.get_shape().ndims not in [0, 1, 2]:
raise ValueError("output should be ranks 0 to 2 (list of losses or single loss)")
grads = tape.gradient(output, interps)
# all but first axis (usually [1,2,3]) or the first axis if only that is available
axis = np.arange(1, grads.get_shape().ndims) if grads.get_shape().ndims is not 1 else None
l2_grads = tf.sqrt(tf.reduce_sum(tf.square(grads), axis=axis))
penalty = tf.square(l2_grads-1.)
return penalty
def build_models(hps, current_res_w, use_ema_sampling=False, num_classes=None, label_list=None): # todo: fix num_classes
mapping_network = MappingNetwork() if hps.do_mapping_network else None
gen_model = Generator(current_res_w, hps.res_w, use_pixel_norm=hps.do_pixel_norm,
start_shape=(hps.start_res_h, hps.start_res_w),
equalized_lr=hps.do_equalized_lr,
traditional_input=hps.do_traditional_input,
add_noise=hps.do_add_noise,
resize_method=hps.resize_method,
use_mapping_network=hps.do_mapping_network,
cond_layers=hps.cond_layers,
map_cond=hps.map_cond)
dis_model = Discriminator(current_res_w, equalized_lr=hps.do_equalized_lr,
do_minibatch_stddev=hps.do_minibatch_stddev,
end_shape=(hps.start_res_h, hps.start_res_w),
resize_method=hps.resize_method, cgan_nclasses=num_classes,
label_list=label_list)
if use_ema_sampling:
sampling_model = Generator(current_res_w, hps.res_w, use_pixel_norm=hps.do_pixel_norm,
start_shape=(hps.start_res_h, hps.start_res_w),
equalized_lr=hps.do_equalized_lr,
traditional_input=hps.do_traditional_input,
add_noise=hps.do_add_noise,
resize_method=hps.resize_method,
use_mapping_network=hps.do_mapping_network,
cond_layers=hps.cond_layers,
map_cond=hps.map_cond)
return gen_model, mapping_network, dis_model, sampling_model
else:
return gen_model, mapping_network, dis_model
def build_optimizers(hps):
optimizer_g = build_optimizer_from_hps(hps)
optimizer_d = build_optimizer_from_hps(hps)
optimizer_m = build_optimizer_from_hps(hps, lr_multiplier=1.)
return optimizer_g, optimizer_d, optimizer_m
def build_data_iterator(hps, files, current_res_h, current_res_w, batch_size=None, label_list=None,
num_shards=None, shard_index=None):
random.shuffle(files)
dataset = get_dataset(files, current_res_h, current_res_w, hps.epochs_per_res, batch_size,
label_list=label_list, num_shards=None, shard_index=None)
it = dataset.make_one_shot_iterator()
return it
@name_scope("optimizer")
def build_optimizer_from_hps(hps, lr_multiplier=1.):
if hps.optimizer == "adam":
return tf.train.AdamOptimizer(learning_rate=hps.learning_rate*lr_multiplier,
beta1=hps.adam_beta1,
beta2=hps.adam_beta2)
elif hps.optimizer == "gradient_descent":
return tf.train.GradientDescentOptimizer(learning_rate=hps.learning_rate*lr_multiplier)
@name_scope("generate_summary")
def generate_image_summary(images, name, step=None):
"""
:param images: images to display (batch_size, h, w, c)
:param name: name for summary
:param batch_size: if batch size in get_shape() is ambiguous, use this
:param step: step to specify for summary
:return: summary for grid of images
"""
#if images.get_shape()[0] % 4 != 0:
# raise ValueError("batch must be divisible by 4")
images = tf.pad(images, [[0, (4-images.get_shape()[0] % 4)], [0, 0], [0, 0], [0, 0]])
images = tf.clip_by_value(images, -1., 1.) # essential due to how tf.summary.image scales values
grid = tf.contrib.gan.eval.image_grid(
images,
grid_shape=[images.get_shape()[0]//4, 4],
image_shape=images.get_shape().as_list()[1:3])
if tf.executing_eagerly():
return tf.contrib.summary.image(name, grid, step=step)
else:
return tf.summary.image(name, grid)
def backup_model_for_this_phase(save_paths, writer_path):
copy(save_paths.gen_model, writer_path)
copy(save_paths.dis_model, writer_path)
copy(save_paths.sampling_model, writer_path)
if os.path.exists(save_paths.mapping_network):
copy(save_paths.mapping_network, writer_path)
copy(save_paths.alpha, os.path.join(writer_path, "alpha.txt"))
copy(save_paths.step, os.path.join(writer_path, "step.txt"))
copytree(os.path.dirname(save_paths.gen_optim),
os.path.join(writer_path, os.path.basename(os.path.dirname(save_paths.gen_optim))))
copytree(os.path.dirname(save_paths.dis_optim),
os.path.join(writer_path, os.path.basename(os.path.dirname(save_paths.dis_optim))))
if os.path.exists(save_paths.mn_optim):
copytree(os.path.dirname(save_paths.mn_optim),
os.path.join(writer_path, os.path.basename(os.path.dirname(save_paths.mn_optim))))
def save_alpha_and_step(alpha, step, save_paths):
with open(save_paths.alpha, "w") as f:
f.write(str(alpha))
with open(save_paths.step, "w") as f:
f.write(str(step))
def save_models_and_optimizers(sess, gen_model, dis_model, mapping_network, sampling_model,
optimizer_g, optimizer_d, optimizer_m, save_paths):
"""
:param sess: session if in graph mode, otherwise unused
:param alpha: float value for alpha at time of saving
:param gen_model: generator with defined variables
:param dis_model: discriminator with defined variables
:param optimizer_g: generator's optimizer
:param optimizer_d: discriminator's optimizer
:param save_paths: paths containing models, optimizers, and alpha on disk
"""
gen_model.save_weights(save_paths.gen_model, save_format='h5')
dis_model.save_weights(save_paths.dis_model, save_format='h5')
sampling_model.save_weights(save_paths.sampling_model, save_format='h5')
if mapping_network is not None:
mapping_network.save_weights(save_paths.mapping_network, save_format='h5')
if tf.executing_eagerly():
saver_d = tf.contrib.eager.Saver(var_list=optimizer_d.variables())
saver_d.save(file_prefix=save_paths.dis_optim)
saver_g = tf.contrib.eager.Saver(var_list=optimizer_g.variables())
saver_g.save(file_prefix=save_paths.gen_optim)
saver_g = tf.contrib.eager.Saver(var_list=optimizer_m.variables())
saver_g.save(file_prefix=save_paths.mn_optim)
else:
saver_d = tf.train.Saver(var_list=optimizer_d.variables())
saver_d.save(sess=sess, save_path=save_paths.dis_optim)
saver_g = tf.train.Saver(var_list=optimizer_g.variables())
saver_g.save(sess=sess, save_path=save_paths.gen_optim)
if len(optimizer_m.variables()) > 0:
saver_g = tf.train.Saver(var_list=optimizer_m.variables())
saver_g.save(sess=sess, save_path=save_paths.mn_optim)
def restore_models_and_optimizers(sess, gen_model, dis_model, mapping_network, sampling_model,
optimizer_g, optimizer_d, optimizer_m, save_paths):
"""
:param sess: session if in graph mode, otherwise unused
:param gen_model: generator with defined variables
:param dis_model: discriminator with defined variables
:param optimizer_g: generator's optimizer
:param optimizer_d: discriminator's optimizer
:param save_paths: paths containing models, optimizers, and alpha on disk
:return: read alpha value
"""
if gen_model is not None:
gen_model.load_weights(save_paths.gen_model, by_name=True)
if dis_model is not None:
dis_model.load_weights(save_paths.dis_model, by_name=True)
if mapping_network is not None:
mapping_network.load_weights(save_paths.mapping_network, by_name=True)
if sampling_model is not None:
sampling_model.load_weights(save_paths.sampling_model, by_name=True)
if optimizer_g is not None:
vars_g = filter_vars_with_checkpoint(chkpt_path=save_paths.gen_optim,
var_list=optimizer_g.variables())
if optimizer_d is not None:
vars_d = filter_vars_with_checkpoint(chkpt_path=save_paths.dis_optim,
var_list=optimizer_d.variables())
if optimizer_m is not None and \
mapping_network is not None and \
os.path.exists(os.path.dirname(save_paths.mn_optim)):
vars_mn = filter_vars_with_checkpoint(chkpt_path=save_paths.mn_optim,
var_list=optimizer_m.variables())
if tf.executing_eagerly():
if optimizer_d is not None:
saver_d = tf.contrib.eager.Saver(var_list=vars_d)
saver_d.restore(file_prefix=tf.train.latest_checkpoint(os.path.dirname(save_paths.dis_optim)))
if optimizer_g is not None:
saver_g = tf.contrib.eager.Saver(var_list=vars_g)
saver_g.restore(file_prefix=tf.train.latest_checkpoint(os.path.dirname(save_paths.gen_optim)))
if optimizer_m is not None and os.path.exists(os.path.dirname(save_paths.mn_optim)):
saver_g = tf.contrib.eager.Saver(var_list=vars_mn)
saver_g.restore(file_prefix=tf.train.latest_checkpoint(os.path.dirname(save_paths.mn_optim)))
else:
if optimizer_d is not None:
saver_d = tf.train.Saver(var_list=vars_d)
saver_d.restore(sess=sess,
save_path=tf.train.latest_checkpoint(os.path.dirname(save_paths.dis_optim)))
if optimizer_g is not None:
saver_g = tf.train.Saver(var_list=vars_g)
saver_g.restore(sess=sess,
save_path=tf.train.latest_checkpoint(os.path.dirname(save_paths.gen_optim)))
if optimizer_m is not None and \
mapping_network is not None and \
os.path.exists(os.path.dirname(save_paths.mn_optim)):
saver_g = tf.train.Saver(var_list=vars_mn)
saver_g.restore(sess=sess,
save_path=tf.train.latest_checkpoint(os.path.dirname(save_paths.mn_optim)))
def restore_alpha_and_step(save_paths):
step = None
alpha = None
if save_paths.step is not None:
with open(save_paths.step, "r") as f:
step = int(f.read())
if save_paths.alpha is not None:
with open(save_paths.alpha, "r") as f:
alpha = float(f.read())
return alpha, step
def weight_following_ema_ops(average_model, reference_model, decay=.99):
return [tf.assign(average_weight, average_weight*decay + updated_weight*(1-decay)
if updated_weight.trainable else updated_weight)
for average_weight, updated_weight in zip(average_model.weights, reference_model.weights)]
def train(hps, files):
ngpus = hps.ngpus
config = tf.ConfigProto()
if ngpus > 1:
try:
import horovod.tensorflow as hvd
config = tf.ConfigProto()
config.gpu_options.visible_device_list = str(hvd.local_rank())
except ImportError:
hvd = None
print("horovod not available, can only use 1 gpu")
ngpus = 1
# todo: organize
current_res_w = hps.current_res_w
res_multiplier = current_res_w // hps.start_res_w
current_res_h = hps.start_res_h * res_multiplier
tfrecord_input = any('.tfrecords' in fname for fname in files)
# if using tfrecord, assume dataset is duplicated across multiple resolutions
if tfrecord_input:
num_files = 0
for fname in [fname for fname in files if "res%d" % current_res_w in fname]:
for record in tf.compat.v1.python_io.tf_record_iterator(fname):
num_files += 1
else:
num_files = len(files)
label_list = []
total_classes = 0
if hps.label_file:
do_cgan = True
label_list, total_classes = build_label_list_from_file(hps.label_file)
else:
do_cgan = False
print("dataset has %d files" % num_files)
try:
batch_size = int(hps.batch_size)
try_schedule = False
except ValueError:
try_schedule = True
if try_schedule:
batch_schedule = ast.literal_eval(hps.batch_size)
else:
batch_schedule = None
# always generate 32 sample images (should be feasible at high resolutions due to no training)
# will probably need to edit for > 128x128
sample_batch = 32
sample_latent_numpy = np.random.normal(0., 1., [sample_batch, 512])
if do_cgan:
examples_per_class = sample_batch // total_classes
remainder = sample_batch % total_classes
sample_cgan_latent_numpy = None
for i in range(0, total_classes):
class_vector = [0.] * total_classes
class_vector[i] = 1.
if sample_cgan_latent_numpy is None:
sample_cgan_latent_numpy = [class_vector] * (examples_per_class + remainder)
else:
sample_cgan_latent_numpy += [class_vector] * examples_per_class
sample_cgan_latent_numpy = np.array(sample_cgan_latent_numpy)
use_beholder = hps.use_beholder
if use_beholder:
try:
from tensorboard.plugins.beholder import Beholder
except ImportError:
print("Could not import beholder")
use_beholder = False
while current_res_w <= hps.res_w:
if ngpus > 1:
hvd.init()
print("building graph")
if batch_schedule is not None:
batch_size = batch_schedule[current_res_w]
print("res %d batch size is now %d" % (current_res_w, batch_size))
gen_model, mapping_network, dis_model, sampling_model = \
build_models(hps,
current_res_w,
use_ema_sampling=True,
num_classes=total_classes,
label_list=label_list if hps.conditional_type == "acgan" else None)
with tf.name_scope("optimizers"):
optimizer_d, optimizer_g, optimizer_m = build_optimizers(hps)
if ngpus > 1:
optimizer_d = hvd.DistributedOptimizer(optimizer_d)
optimizer_g = hvd.DistributedOptimizer(optimizer_g)
optimizer_m = hvd.DistributedOptimizer(optimizer_m)
with tf.name_scope("data"):
num_shards = None if ngpus == 1 else ngpus
shard_index = None if ngpus == 1 else hvd.rank()
it = build_data_iterator(hps, files, current_res_h, current_res_w, batch_size, label_list=label_list,
num_shards=num_shards, shard_index=shard_index)
next_batch = it.get_next()
real_image = next_batch['data']
fake_latent1 = tf.random_normal([batch_size, 512], 0., 1., name="fake_latent")
fake_latent2 = tf.random_normal([batch_size, 512], 0., 1., name="fake_latent")
fake_label_dict = None
real_label_dict = None
if do_cgan:
fake_label_dict = {}
real_label_dict = {}
for label in label_list:
if hps.cond_uniform_fake:
distribution = np.ones_like([label.probabilities])
else:
distribution = np.log([label.probabilities])
fake_labels = tf.random.categorical(distribution, batch_size)
if label.multi_dim is False:
normalized_labels = (fake_labels - tf.reduce_min(fake_labels)) / \
(tf.reduce_max(fake_labels) - tf.reduce_min(fake_labels))
fake_labels = tf.reshape(normalized_labels, [batch_size, 1])
else:
fake_labels = tf.reshape(tf.one_hot(fake_labels, label.num_classes),
[batch_size, label.num_classes])
fake_label_dict[label.name] = fake_labels
real_label_dict[label.name] = next_batch[label.name]
#fake_label_list.append(fake_labels)
# ideally would handle one dimensional labels differently, theory isn't well supported
# for that though (example: categorical values of short, medium, tall are on one dimension)
# real_labels = tf.reshape(tf.one_hot(tf.cast(next_batch[label.name], tf.int32), num_classes),
# [batch_size, num_classes])
#real_label_list.append(real_labels)
fake_label_tensor = tf.concat([fake_label_dict[l] for l in fake_label_dict.keys()], axis=-1)
real_label_tensor = tf.concat([real_label_dict[l] for l in real_label_dict.keys()], axis=-1)
sample_latent = tf.constant(sample_latent_numpy, dtype=tf.float32, name="sample_latent")
if do_cgan:
sample_cgan_w = tf.constant(sample_cgan_latent_numpy, dtype=tf.float32, name="sample_cgan_latent")
alpha_ph = tf.placeholder(shape=(), dtype=tf.float32, name="alpha")
# From Fig 2: "During a resolution transition,
# we interpolate between two resolutions of the real images"
real_image = real_image*alpha_ph + \
(1-alpha_ph)*upsample(downsample_nv(real_image),
method="nearest_neighbor")
real_image = upsample(real_image, method='nearest_neighbor', factor=hps.res_w//current_res_w)
if do_cgan:
with tf.name_scope("gen_synthesis"):
fake_image = gen_model(alpha_ph, zs=[fake_latent1, fake_latent2], mapping_network=mapping_network,
cgan_w=fake_label_tensor, random_crossover=True)
real_logit, real_class_logits = dis_model(real_image, alpha_ph,
real_label_tensor if hps.conditional_type == "proj" else
None)
fake_logit, fake_class_logits = dis_model(fake_image, alpha_ph,
fake_label_tensor if hps.conditional_type == "proj" else
None)
else:
with tf.name_scope("gen_synthesis"):
fake_image = gen_model(alpha_ph, zs=[fake_latent1, fake_latent2], mapping_network=mapping_network,
random_crossover=True)
real_logit, real_class_logits = dis_model(real_image, alpha_ph) # todo: make work with other labels
fake_logit, fake_class_logits = dis_model(fake_image, alpha_ph)
with tf.name_scope("gen_sampling"):
average_latent = tf.constant(np.random.normal(0., 1., [10000, 512]), dtype=tf.float32)
low_psi = 0.20
if hps.map_cond:
class_vector = [0.] * total_classes
class_vector[0] = 1. # one hot encoding
average_w = tf.reduce_mean(mapping_network(tf.concat([average_latent,
[class_vector]*10000], axis=-1)), axis=0)
sample_latent_lowpsi = average_w + low_psi * \
(mapping_network(tf.concat([sample_latent,
[class_vector]*sample_batch], axis=-1)) - average_w)
else:
average_w = tf.reduce_mean(mapping_network(average_latent), axis=0)
sample_latent_lowpsi = average_w + low_psi * (mapping_network(sample_latent) - average_w)
average_w_batch = tf.tile(tf.reshape(average_w, [1, 512]), [sample_batch, 1])
if do_cgan:
sample_img_lowpsi = sampling_model(alpha_ph, intermediate_ws=sample_latent_lowpsi,
cgan_w=sample_cgan_w)
sample_img_base = sampling_model(alpha_ph, zs=sample_latent, mapping_network=mapping_network,
cgan_w=sample_cgan_w)
sample_img_mode = sampling_model(alpha_ph, intermediate_ws=average_w_batch,
cgan_w=sample_cgan_w)
sample_img_mode = tf.concat([sample_img_mode[0:2] + sample_img_mode[-3:-1]], axis=0)
else:
sample_img_lowpsi = sampling_model(alpha_ph, intermediate_ws=sample_latent_lowpsi)
sample_img_base = sampling_model(alpha_ph, zs=sample_latent, mapping_network=mapping_network)
sample_img_mode = sampling_model(alpha_ph, intermediate_ws=average_w_batch)[0:4]
sample_images = tf.concat([sample_img_lowpsi, sample_img_mode, sample_img_base], axis=0)
sampling_model_init_ops = weight_following_ema_ops(average_model=sampling_model,
reference_model=gen_model)
#sample_img_base = gen_model(sample_latent, alpha_ph, mapping_network)
with tf.name_scope("loss"):
loss_discriminator, loss_generator = hps.loss_fn(real_logit, fake_logit)
if real_class_logits is not None:
for label in label_list:
label_loss = tf.nn.softmax_cross_entropy_with_logits(labels=next_batch[label.name],
logits=real_class_logits[label.name])
loss_discriminator += label_loss * hps.cond_weight * 1./(len(label_list))
tf.summary.scalar("label_loss_real", tf.reduce_mean(label_loss))
if fake_class_logits is not None:
for label in label_list:
label_loss = tf.nn.softmax_cross_entropy_with_logits(labels=fake_label_dict[label.name],
logits=fake_class_logits[label.name])
loss_discriminator += label_loss * hps.cond_weight * 1./(len(label_list))
tf.summary.scalar("label_loss_fake", tf.reduce_mean(label_loss))
loss_generator += label_loss * hps.cond_weight * 1./(len(label_list))
if hps.gp_fn:
gp = hps.gp_fn(fake_image, real_image, dis_model, alpha_ph, real_label_dict,
conditional_type=hps.conditional_type)
tf.summary.scalar("gradient_penalty", tf.reduce_mean(gp))
loss_discriminator += hps.lambda_gp*gp
dp = drift_penalty(real_logit)
tf.summary.scalar("drift_penalty", tf.reduce_mean(dp))
if hps.lambda_drift != 0.:
loss_discriminator = tf.expand_dims(loss_discriminator, -1) + hps.lambda_drift * dp
loss_discriminator_avg = tf.reduce_mean(loss_discriminator)
loss_generator_avg = tf.reduce_mean(loss_generator)
with tf.name_scope("train"):
train_step_d = optimizer_d.minimize(loss_discriminator_avg, var_list=dis_model.trainable_variables)
# todo: test this
with tf.control_dependencies(weight_following_ema_ops(average_model=sampling_model,
reference_model=gen_model)):
train_step_g = [optimizer_g.minimize(loss_generator_avg, var_list=gen_model.trainable_variables)]
if hps.do_mapping_network:
train_step_g.append(
optimizer_m.minimize(loss_generator_avg, var_list=mapping_network.trainable_variables))
with tf.name_scope("summary"):
tf.summary.histogram("real_scores", real_logit)
tf.summary.scalar("loss_discriminator", loss_discriminator_avg)
tf.summary.scalar("loss_generator", loss_generator_avg)
tf.summary.scalar("real_logit", tf.reduce_mean(real_logit))
tf.summary.scalar("fake_logit", tf.reduce_mean(fake_logit))
tf.summary.histogram("real_logit", real_logit)
tf.summary.histogram("fake_logit", fake_logit)
tf.summary.scalar("alpha", alpha_ph)
merged = tf.summary.merge_all()
image_summary_real = generate_image_summary(real_image, "real")
image_summary_fake_avg = generate_image_summary(sample_images, "fake_avg")
#image_summary_fake = generate_image_summary(sample_img_base, "fake")
global_step = tf.train.get_or_create_global_step()
if hps.profile:
builder = tf.profiler.ProfileOptionBuilder
opts = builder(builder.time_and_memory()).order_by('micros').build()
with tf.contrib.tfprof.ProfileContext(hps.model_dir,
trace_steps=[],
dump_steps=[]) as pctx:
with tf.Session(config=config) as sess:
#if hps.tboard_debug:
# sess = tf_debug.TensorBoardDebugWrapperSession(sess, "localhost:6064")
#elif hps.cli_debug:
# sess = tf_debug.LocalCLIDebugWrapperSession(sess)
sess.run(tf.global_variables_initializer())
sess.run(sampling_model_init_ops)
alpha = 1.
step = 0
if os.path.exists(hps.save_paths.gen_model) and os.path.exists(hps.save_paths.dis_model):
if ngpus == 1 or hvd.rank() == 0:
print("restoring")
restore_models_and_optimizers(sess, gen_model, dis_model, mapping_network,
sampling_model,
optimizer_g, optimizer_d, optimizer_m, hps.save_paths)
if os.path.exists(hps.save_paths.alpha) and os.path.exists(hps.save_paths.step):
alpha, step = restore_alpha_and_step(hps.save_paths)
print("alpha")
print(alpha)
if alpha != 1.:
alpha_inc = 1. / (hps.epochs_per_res * (num_files / batch_size))
else:
alpha_inc = 0.
writer_path = \
os.path.join(hps.model_dir, "summary_%d" % current_res_w, "alpha_start_%d" % alpha)
if use_beholder:
beholder = Beholder(writer_path)
writer = tf.summary.FileWriter(writer_path, sess.graph)
writer.add_summary(image_summary_real.eval(feed_dict={alpha_ph: alpha}), step)
print("Starting res %d training" % current_res_w)
t = trange(hps.epochs_per_res * num_files // batch_size, desc='Training')
if ngpus > 1:
sess.run(hvd.broadcast_global_variables(0))
for phase_step in t:
try:
for i in range(0, hps.ncritic):
if hps.profile:
pctx.trace_next_step()
pctx.dump_next_step()
if step % 5 == 0:
summary, ld, _ = sess.run([merged,
loss_discriminator_avg,
train_step_d if not hps.no_train else tf.no_op()],
feed_dict={alpha_ph: alpha})
writer.add_summary(summary, step)
else:
ld, _ = sess.run([loss_discriminator_avg,
train_step_d if not hps.no_train else tf.no_op()],
feed_dict={alpha_ph: alpha})
if hps.profile:
pctx.profiler.profile_operations(options=opts)
if hps.profile:
pctx.trace_next_step()
pctx.dump_next_step()
lg, _ = sess.run([loss_generator_avg,
train_step_g if not hps.no_train else tf.no_op()],
feed_dict={alpha_ph: alpha})
if hps.profile:
pctx.profiler.profile_operations(options=opts)
alpha = min(alpha+alpha_inc, 1.)
#print("step: %d" % step)
#print("loss_d: %f" % ld)
#print("loss_g: %f\n" % lg)
t.set_description('Overall step %d, loss d %f, loss g %f' % (step+1, ld, lg))
if use_beholder:
try:
beholder.update(session=sess)
except Exception as e:
print("Beholder failed: " + str(e))
use_beholder = False
if phase_step < 5 or (phase_step < 500 and phase_step % 10 == 0) or (step % 1000 == 0):
writer.add_summary(image_summary_fake_avg.eval(
feed_dict={alpha_ph: alpha}), step)
#writer.add_summary(image_summary_fake.eval(
# feed_dict={alpha_ph: alpha}), step)
if hps.steps_per_save is not None and step % hps.steps_per_save == 0 and (ngpus == 1 or hvd.rank() == 0):
save_models_and_optimizers(sess,
gen_model, dis_model, mapping_network,
sampling_model,
optimizer_g, optimizer_d, optimizer_m,
hps.save_paths)
save_alpha_and_step(1. if alpha_inc != 0. else 0., step, hps.save_paths)
step += 1
except tf.errors.OutOfRangeError:
break
assert (abs(alpha - 1.) < .1), "Alpha should be close to 1., not %f" % alpha # alpha close to 1. (dataset divisible by batch_size for small sets)
if ngpus == 1 or hvd.rank() == 0:
print(1. if alpha_inc != 0. else 0.)
save_models_and_optimizers(sess,
gen_model, dis_model, mapping_network, sampling_model,
optimizer_g, optimizer_d, optimizer_m,
hps.save_paths)
backup_model_for_this_phase(hps.save_paths, writer_path)
save_alpha_and_step(1. if alpha_inc != 0. else 0., step, hps.save_paths)
# Will generate Out of range errors, see if it's easy to save a tensor so get_next() doesn't need
# a new value
#writer.add_summary(image_summary_real.eval(feed_dict={alpha_ph: 1.}), step)
#writer.add_summary(image_summary_fake.eval(feed_dict={alpha_ph: 1.}), step)
tf.reset_default_graph()
if alpha_inc == 0:
current_res_h *= 2
current_res_w *= 2
| UTF-8 | Python | false | false | 38,380 | py | 14 | train.py | 13 | 0.564982 | 0.558494 | 0 | 722 | 52.157895 | 162 |
protimaru/roma | 8,778,913,188,721 | 831de6484c64a9eab3388c3956f04625b7e293cb | d99f9f3d5ee79ec5283320d9ce6a79570f5eea8a | /posts/views.py | df421a00b9ce72487fc4dab8f7974bf0cdaf416a | []
| no_license | https://github.com/protimaru/roma | 72c648ffd3bee81dd2584d8056397034d9775c23 | 6531ba1d6a697c5182ba1c00240dc7cccdbaad04 | refs/heads/master | 2017-11-20T01:15:07.504443 | 2017-06-19T18:20:40 | 2017-06-19T18:20:40 | 94,805,944 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.db.models import Q
from django.http import HttpResponse
from django.shortcuts import render, render_to_response, get_object_or_404
from posts.models import Post, Comment, Category, SubCategory
from posts.forms import CommentForm
def home(request):
posts = Post.objects.all()
category_ = Category.objects.all()
popular_posts = Post.objects.order_by('-views')[:3]
query = request.GET.get('search')
if query:
posts = posts.filter(
Q(title__icontains=query) |
Q(content__icontains=query)
).distinct()
context = {
'popular_posts': popular_posts,
'posts': posts,
'category': category_
}
return render(request, ['posts/index.html', 'sidebar.html', 'base.html'], context)
def detail(request, slug):
get_post = get_object_or_404(Post, slug=slug)
category_ = Category.objects.all()
comments = Comment.objects.filter(post=get_post)
get_post.views += 1
get_post.save()
form = CommentForm()
if request.POST:
form = CommentForm(request.POST or None)
if form.is_valid():
sub = form.save(commit=False)
sub.save()
print(request.POST['post'])
popular_posts = Post.objects.order_by('-views')[:3]
context = {
'form': form,
'comments': comments,
'popular_posts': popular_posts,
'post': get_post,
'category': category_
}
return render(request, ['posts/detail.html', 'sidebar.html', 'base.html'], context)
def get_tag(tag):
web = ['python', 'html', 'css', 'jquery']
mobile = ['android', 'ios']
if tag == 'web':
return web
elif tag == 'mobile':
return mobile
def category(request, tag):
get = get_object_or_404(Category, category=tag)
posts = Post.objects.filter(category__exact=get)
category_ = Category.objects.all()
query = request.GET.get('search')
if query:
posts = posts.filter(
Q(title__icontains=query) |
Q(content__icontains=query)
).distinct()
popular_posts = Post.objects.order_by('-views')[:3]
context = {
'tag': get_tag(tag),
'popular_posts': popular_posts,
'get': get,
'posts': posts,
'category': category_
}
return render(request, ['posts/index.html', 'sidebar.html', 'base.html'], context)
def sub_category(request, sub_cat):
popular_posts = Post.objects.order_by('-views')[:3]
category_ = Category.objects.all()
# s = SubCategory.objects.filter()
context = {
'popular_posts': popular_posts,
# 'tag': SubCategory.objects.filter(sub_category__exact=sub_cat),
'tag': SubCategory.objects.filter(category__subcategory__sub_category__exact=sub_cat),
'posts': Post.objects.filter(sub_category__sub_category__exact=sub_cat),
'category': category_
}
return render(request, ['posts/index.html', 'sidebar.html', 'base.html'], context)
def about(request):
category_ = Category.objects.all()
popular_posts = Post.objects.order_by('-views')[:3]
context = {
'popular_posts': popular_posts,
'category': category_
}
return render(request, 'about.html', context)
| UTF-8 | Python | false | false | 3,243 | py | 15 | views.py | 6 | 0.610854 | 0.606229 | 0 | 102 | 30.794118 | 94 |
leggitta/Analysis | 8,083,128,498,088 | e21553d6623270f85166a2a12bfc39b05bf7fb23 | 2c5cb98b22b6e4a0aa3e5c6df3f3c421a45a2d46 | /merge_epochs.py | 776094a9a29b348782d23599ce29f8397273810a | []
| no_license | https://github.com/leggitta/Analysis | 8e26a691a621e220f8c7f952c4d9aa73e6b0e563 | 9f4b6b86dd47a4d5ed0b3c8d4b3242cb809855af | refs/heads/master | 2016-09-03T06:39:39.072373 | 2015-11-30T23:14:23 | 2015-11-30T23:14:23 | 41,761,735 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import mne
import os
import numpy as np
# suppress text output
mne.set_log_level(False)
# define the data directory
data_dir = "../Data"
# define the participants
pids = range(1001, 1042)
# additional parameters
nt = 1638 # number of times
nch = 69 # number of channels
npa = len(pids) # number of participants
# define the colors
c = ['c', 'm', 'b', 'r']
# loop through the participants
for p, pid in enumerate(pids):
# create a list to store epochs
epos = []
# loop through the blocks
for b in ['PV0', 'PV1', 'WM0', 'WM1']:
# get the data file
epo_fname = "%s/POSTICA/%d_%s_postica-epo.fif" % (data_dir, pid, b)
assert os.path.exists(epo_fname)
# read the epoch file
epo = mne.read_epochs(epo_fname, proj=False, add_eeg_ref=False)
# re-code the event types
if 'PV' in b:
epo.events[:, 2] /= 4
epo.event_id = {'PV_NEU': 4096, 'PV_NEG': 8192}
elif 'WM' in b:
epo.event_id = {'WM_NEU': 16384, 'WM_NEG': 32768}
# update the list
epos.append(epo)
# concatenate all epochs
epo = mne.epochs.concatenate_epochs(epos)
assert epo._data.shape == (160, nch, nt)
# save the data
epo.save('%s/EPODATA/%d-epo.fif' % (data_dir, pid))
| UTF-8 | Python | false | false | 1,287 | py | 81 | merge_epochs.py | 49 | 0.591298 | 0.559441 | 0 | 51 | 24.235294 | 75 |
cpaszul/advent-of-code-2018 | 19,473,381,724,776 | 05f0f3f972669617456544c4392c94667ebbc84e | 75bddb587f00d67084f06d12810c1f2a797800c7 | /day10.py | 0fa32bc5fe4c542ae9a08b7153fe6e20c8df3508 | []
| no_license | https://github.com/cpaszul/advent-of-code-2018 | 742cd95f870c93ac0d31d42ff3bcf163c5bb0193 | 0444d93cb9e40713e20fbe8dc4ea1ed06e221a23 | refs/heads/master | 2021-06-23T01:07:55.875462 | 2020-12-08T19:35:53 | 2020-12-08T19:35:53 | 160,435,926 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import re
DEFAULT_INPUT = 'day10.txt'
class Point:
def __init__(self, x, y, vx, vy):
self.x = x
self.y = y
self.time = 0
self.vx = vx
self.vy = vy
def advance(self, n=1):
for _ in range(n):
self.x += self.vx
self.y += self.vy
self.time += 1
def reverse(self, n=1):
for _ in range(n):
self.x -= self.vx
self.y -= self.vy
self.time -= 1
def set_to_time(self, n):
if n > self.time:
self.advance(n - self.time)
elif n < self.time:
self.reverse(self.time - n)
def day_10(loc=DEFAULT_INPUT):
r = re.compile(r'position=< ?(-?\d+), +(-?\d+)> velocity=< ?(-?\d+), +(-?\d+)>')
points = []
with open(loc) as f:
for line in f.readlines():
line = line.rstrip()
m = r.match(line)
x = int(m.group(1))
y = int(m.group(2))
vx = int(m.group(3))
vy = int(m.group(4))
points.append(Point(x, y, vx, vy))
area = total_area(points)
t = 0
while True:
for point in points:
point.advance()
new_area = total_area(points)
if new_area > area:
closest_at = t
break
else:
area = new_area
t += 1
draw(points, t)
return t
def total_area(points):
min_x = min(points, key=lambda p:p.x).x
min_y = min(points, key=lambda p:p.y).y
max_x = max(points, key=lambda p:p.x).x
max_y = max(points, key=lambda p:p.y).y
return (max_x - min_x + 1) * (max_y - min_y + 1)
def draw(points, t=None):
if t:
for point in points:
point.set_to_time(t)
min_x = min(points, key=lambda p:p.x).x
min_y = min(points, key=lambda p:p.y).y
max_x = max(points, key=lambda p:p.x).x
max_y = max(points, key=lambda p:p.y).y
grid = []
width = max_x - min_x + 1
height = max_y - min_y + 1
for _ in range(height):
row = ['.' for _ in range(width)]
grid.append(row)
for point in points:
mod_x = point.x - min_x
mod_y = point.y - min_y
grid[mod_y][mod_x] = '#'
print('\n'.join(''.join(row) for row in grid))
if __name__ == '__main__':
print('Solution for Part Two:', day_10())
| UTF-8 | Python | false | false | 2,345 | py | 25 | day10.py | 25 | 0.479318 | 0.470362 | 0 | 86 | 26.267442 | 84 |
diemol/programming_foundations_with_python | 3,427,383,907,649 | 76df96e44625011f951ecb61e6542965eb64a960 | 338fc107b70382bfd425c9b5663a70472fd0b061 | /movies/entertainment_center.py | 9b1032b37ff5f1756555c30072393e8e657aaf7e | []
| no_license | https://github.com/diemol/programming_foundations_with_python | 6ca978b4a503a96b67196257f306687a067c2394 | 6663b76b1f2054acc1c823c6aed06901e90a07a9 | refs/heads/master | 2016-08-11T11:11:23.953304 | 2016-02-09T19:46:54 | 2016-02-09T19:46:54 | 51,385,486 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import media
import fresh_tomatoes
toy_story = media.Movie("Toy Story",
"A story of a boy and his toys that come to life",
"https://upload.wikimedia.org/wikipedia/en/1/13/Toy_Story.jpg",
"https://www.youtube.com/watch?v=4KPTXpQehio")
avatar = media.Movie("Avatar",
"A marine on an alien planet",
"https://upload.wikimedia.org/wikipedia/en/b/b0/Avatar-Teaser-Poster.jpg",
"https://www.youtube.com/watch?v=5PSNL1qE6VY")
movies = [toy_story, avatar, toy_story, avatar, toy_story, avatar]
# fresh_tomatoes.open_movies_page(movies)
| UTF-8 | Python | false | false | 669 | py | 8 | entertainment_center.py | 6 | 0.587444 | 0.575486 | 0 | 16 | 40.625 | 95 |
nanome-ai/plugin-matryx | 17,592,186,062,244 | 1c935cb7b3fec2e2245ace6011f3834fb87b6b4b | 69875004057402c50f428f3ada7d9a00559633e7 | /nanome_matryx/menus/select_winners/UpdateRoundMenu.py | 2d1359d59d211d7e291b0c72878b3611f9cf0ae9 | [
"MIT"
]
| permissive | https://github.com/nanome-ai/plugin-matryx | 20ad971da14860672dab4c0952d20c570fe58421 | 4030afd81348610e16c0787a74fca29aea601f33 | refs/heads/master | 2020-06-24T20:57:01.046999 | 2020-01-14T18:48:16 | 2020-01-14T18:48:16 | 199,087,399 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import requests
from functools import partial
from datetime import datetime, timedelta
import calendar
import math
from components.Calendar import Calendar
import nanome
import utils
from nanome.util import Logs
class UpdateRoundMenu:
def __init__(self, _plugin, select_winners_menu, on_close):
self._plugin = _plugin
menu = nanome.ui.Menu.io.from_json('menus/json/select_winners/update_round.json')
menu.register_closed_callback(on_close)
self._menu = menu
self._menu_select_winners = select_winners_menu
self._button_create = menu.root.find_node('Confirm').get_content()
self._button_create.register_pressed_callback(self.update_new_round)
self._button_cancel = menu.root.find_node('Cancel').get_content()
self._button_cancel.register_pressed_callback(on_close)
self._input_bounty = menu.root.find_node('Bounty Input').get_content()
left_container = menu.root.find_node('Start Cal Container')
self._calendar_start = Calendar(_plugin, left_container)
right_container = menu.root.find_node('End Cal Container')
self._calendar_end = Calendar(_plugin, right_container)
now = datetime.now()
self._start_datetime = now
self._end_datetime = now + timedelta(days=30)
self._calendar_start.set_datetime(self._start_datetime)
self._calendar_start.set_readonly(True)
self._calendar_end.set_datetime(self._end_datetime)
self._calendar_end.set_min_datetime(now + timedelta(hours=1))
self._calendar_end.set_max_datetime(now + timedelta(days=365))
self._calendar_end.register_changed_callback(self.update_round_end)
def show_menu(self, button=None):
self._plugin.open_menu(self._menu)
def update_new_round(self, button):
if not self._input_bounty.input_text:
self._plugin._modal.show_error('please enter a round bounty')
return
round_info = (
utils.date_to_timestamp(self._calendar_start._datetime), # start
utils.diff_seconds(self._start_datetime, self._end_datetime), # duration
60 * 60 * 24 * 7, # review
self._plugin._web3.to_wei(self._input_bounty.input_text) # bounty
)
self._menu_select_winners.select_winners(1, round_info)
def update_round_end(self, dt):
self._end_datetime = dt
self._plugin.refresh_menu() | UTF-8 | Python | false | false | 2,449 | py | 40 | UpdateRoundMenu.py | 23 | 0.658636 | 0.652511 | 0 | 66 | 36.121212 | 89 |
dyh1998/djangoweb | 8,864,812,519,070 | d2302a399de7ed3a493776258da8b49c7b4af04f | c3b96f6f23370c5d99667a7bf2e5b30679e990fc | /user/models.py | b8bd7a9428f7b898d0c640a9a0a16703fcfade0f | []
| no_license | https://github.com/dyh1998/djangoweb | 5c25012da101571db906ef63d3dd50387658ac0f | 5156651b197acc788daaf22d2e58329294c3acf8 | refs/heads/main | 2023-03-13T02:22:18.475184 | 2021-03-08T14:47:59 | 2021-03-08T14:47:59 | 341,481,731 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.db import models
from django.contrib.auth.models import User
# Create your models here.
# class Profile(models.Model):
# GENDER_CHOICES = (('M', 'Male'), ('F', 'Female'))
# user = models.OneToOneField(User, on_delete=models.CASCADE, verbose_name='用户')
# gender = models.CharField(max_length=1, choices=GENDER_CHOICES, verbose_name='性别') # 性别
# birth_date = models.DateField(null=True, blank=True, verbose_name='出生日期') # 出生日期
# tel_number = models.CharField(max_length=11)
| UTF-8 | Python | false | false | 547 | py | 33 | models.py | 21 | 0.676301 | 0.67052 | 0 | 11 | 45.181818 | 94 |
chea-young/Financial-Statistics-Practice-Using-Python | 9,620,726,772,177 | d7132925d9402e9f75ab3e1acb289e0c9e8ab49d | ed5d4fcb6566030a491911dcb89a85c823cf0f7f | /13주차/45.py | 8347bab89ea585c92725eb74be960e336f7fd75d | []
| no_license | https://github.com/chea-young/Financial-Statistics-Practice-Using-Python | 8d02fa66e33e4ccdf1761467d586f4b49237e4b8 | eac6a51b8798750117c1bd2d497e8ccf6e9caf3b | refs/heads/master | 2023-07-01T00:16:01.662422 | 2021-08-11T10:27:42 | 2021-08-11T10:27:42 | 356,812,926 | 0 | 0 | null | false | 2021-07-10T13:39:48 | 2021-04-11T08:36:54 | 2021-07-04T05:32:39 | 2021-07-10T13:39:47 | 6,814 | 0 | 0 | 0 | Python | false | false | """ ----------------------- 파이썬 금융통계 실습 ---------------------------"""
# 지난 시간에 간단히 계산했던 평균과 기대값을 조금 더 살펴봅시다.
#%% 1. 평균과 기대값
# 평균: 어떤 데이터를 하나로 요약할 수 있는 대표값으로서 일반적으로 산술평균을 의미
# average=1/n*sum(x_i)=sum(1/n*x_i)
# 여기서 x_i는 i번째 데이터, # n은 자료의 갯수, 즉 x_i들의 갯수
#
# 기대값: 데이터의 값에 데이터가 발생할 확률을 곱해서 구한 평균
# E(x)=sum(p_i*x_i)
# 여기서, p_i는 데이터 x_i가 발생할 확률, 즉 P(x_i)=p_i
#
# 평균과 기대값의 차이?
# - 평균: 사후적인 개념 ==> 평균을 계산하는 경우 동일한 가중치를 갖는 과거 데이터로부터 계산
# - 기대값: 사전적인 개념 ==> 평균을 계산하기 위해 미래 데이터가 발생가능한 확률을 부여한후
# 확률과 데이터의 곱을 합하여 계산
# 주사위 예제를 통해서 평균과 기대값의 차이를 살펴보도록 합시다.
# 우선, 1~6까지 발생할 가능성이 동일한 주사위를 던진 결과를 생성하여 봅시다.
import numpy as np
n=10 # 주사위 던진 횟수
dice=np.random.randint(1,7,size=n) # 주사위 던진 결과 저장
# 평균
average=sum(dice)/n
# 기대값: 주사위의 6개 수가 발생할 확률이 1/6으로 동일하다고 가정
case=np.array([1,2,3,4,5,6])
prob=np.array([1/6,1/6,1/6,1/6,1/6,1/6])
Expectation=sum(case*prob) # NOTE sum(p_i*x_i) 이 공식 사용
print(Expectation)
#%% 위의 평균과 기대값을 비교해 보니 조금 다릅니다. # 왜 이런 차이가 발생할까요?
# 기대값은 사전적으로 계산한 이론적인 평균인 반면 평균은 데이터로부터 경험적으로 계산한 평균
# 따라서, 과거 데이터의 요약에서는 평균(사후적)을 많이 사용하고,
# 미래 데이터의 요약에서는 기대값(사전적)을 많이 사용
# 그렇다면 평균과 기대값은 항상 다른가요?
# 우리가 미래를 예측한다고 할 때
# 과거의 충분한 양의 데이터를 가지고 있고,
# 과거에 발생한 개별 사건에 대한 정확한 확률만 부여할 수 있다면,
# 평균과 기대값은 일치하게 됩니다.
# 다만, 가정: 미래의 발생할 사건은 과거의 발생할 사건의 무작위적인 반복
# 이를 확인하기 위해 앞의 주사위 예제에서 주사위 던진 횟수를 크게 증가시켜 봅시다.
average=np.zeros(shape=(10000,2))
for n in range(1,10001):
dice=np.random.randint(1,7,size=n)
average[n-1,0]=n
average[n-1,1]=sum(dice)/n
import matplotlib.pyplot as plt
fig=plt.figure()
ax=fig.add_subplot(1,1,1)
ax.plot(average[:,0], average[:,1])
#%% 위의 그림에서 데이터의 갯수가 커질수록 평균은 이론적인 평균값(기대값)인 3.5로
# 수렴해 가는 것을 확인할 수 있음(물론 1~6까지 각각 발생할 확률이 1/6이 맞다는 가정하에)
# 앞의 주사위 예제에서 기대값은 주사위의 1~6까지 각각 발생할 확률을 안다고 가정하였다.
# 하지만, 일반적으로 금융데이터의 경우 각각의 금융데이터가 발생할 확률을 미리 아는 경우는 없다.
# 이런 경우 우리는 각각의 금융데이터가 미래에 발생할 확률을 과거의 자료로 부터 계산(추정)해야 한다.
# 주사위 예제로 부터 1~6까지 각각의 경우가 발생할 확률을 구해서 평균과 비교해보자.
n=100
# 주사위 던진 횟수
dice=np.random.randint(1,7,size=n) # 주사위 던진 결과 저장
average=dice.mean() # 평균계산
import pandas as pd
dice_df=pd.DataFrame(dice, columns=["dice"]) # array --> dataframe
print(dice_df)
dice_df_prob=dice_df.groupby('dice').dice.count()/n # 각각의 경험적 확률 계산
sum(dice_df_prob) # 합이 1인지 확인
case=np.array([1,2,3,4,5,6]) # 주사위 발생 가능 수
prob=dice_df_prob.to_numpy() # NOTE ndarray-like이지만 ndarray가 아니므로 실제 ndarray가 필요한 상황에 사용
Expectation=sum(case*prob) # 기대값 계산
print(Expectation)
#%% 위의 예제에서 평균(average)과 기대값(Expectation)은 정확하게 일치!
# 즉, 미래 발생할 사건의 확률에 대하여 알 수 없는 경우 과거의 충분한 자료로부터 확률을 구하면
# 사후적 평균과 사전적 기대값은 일치하게 된다.
# 여기서 중요한 것은 미래사건의 확률을 구하기 위하여 미래사건의 발생을 잘 반영하는
# 과거자료를 사용하여야 한다.
# 주가 예제: 2020년 1월 1일부터 2020년 12월 31일까지 KOSPI 일별주가수익률에 대하여
# 일별주가수익률이 양인 경우를 1, 음인 경우를 0으로 두고 다음을 계산하여 봅시다.
# i) 데이터를 바탕으로 일별주가수익률이 오를 확률과 내릴 확률
# ii) 과거 1년치 자료를 보았을 때 2021년 1월 1일 주가가 오를 확률은 얼마입니까?
import yfinance as yf
kospi = yf.download('^KS11', start="2020-01-01", end="2020-12-31")
kospi = yf.download('^KS11', start="2020-01-01", end="2020-12-31")['Adj Close']
kospi_rtn = np.log(kospi / kospi.shift(1))
kospi_rtn.plot()
prob_up=sum(kospi_rtn>0)/len(kospi_rtn) # 과거 1년 동안 코스피가 오른 경우에 대한 경험적 확률
print(prob_up)
prob_down=1-prob_up # 과거 1년 동안 코스피가 내린 경우에 대한 경험적 확률
print(prob_down)
# 위의 예시에서 우리는 내일의 주가가 오를지 내를지를 확률적으로 판단하기 위하여
# 얼마정도의 과거 데이터를 보아야 할까요? 다음 강의에서 다루어 봅시다.
| UTF-8 | Python | false | false | 5,684 | py | 90 | 45.py | 78 | 0.666667 | 0.623686 | 0 | 89 | 35.325843 | 88 |
rlbyrne/rlb_MWA | 5,884,105,236,531 | 5d6a0d328f472a4a35169b2f2ad4cf8a2590f4b1 | dec55c8820480cc4bb227fab7130f90104113de9 | /sky_imaging/load_healpix_map_basic.py | a8fea3ff4e7ea808a415d4003c29212b2b50795a | []
| no_license | https://github.com/rlbyrne/rlb_MWA | 0b59d540dd93f01c98c3c031c5088503636c4b7f | aa80e1c461493bf4182f366551edf7affdef3e67 | refs/heads/master | 2022-08-22T12:24:18.054575 | 2022-08-08T21:18:45 | 2022-08-08T21:18:45 | 41,505,677 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/python
from astropy.io import fits
import numpy as np
import healpy as hp
contents = fits.open('/Users/ruby/Astro/diffuse_map.healfits')
nside = contents[0].header['nside']
ordering = contents[0].header['ordering']
signal_data = contents[0].data
freq = contents[0].header['crval2'] # Frequency in MHz
pixel_vals = contents[1].data['hpx_inds']
contents.close()
stokes_I = np.squeeze(signal_data[:, 0, 0])
stokes_Q = np.squeeze(signal_data[:, 0, 1])
stokes_U = np.squeeze(signal_data[:, 0, 2])
stokes_V = np.squeeze(signal_data[:, 0, 3])
coords = 'C' # Map uses equitorial coordinates, you'll need this param for some healpy functions
if ordering.lower() == 'ring':
nest = False
elif ordering.lower() == 'nested':
nest = True
# Example of using healpy to calculate the pixel RA/Dec values
ra_arr, dec_arr = hp.pixelfunc.pix2ang(
nside, pixel_vals, nest=nest, lonlat=True
)
# Example of converting Stokes I from explicit to implicit pixel ordering
signal_I_implicit = np.full(12*nside**2, hp.pixelfunc.UNSEEN)
signal_I_implicit[pixel_vals] = stokes_I
| UTF-8 | Python | false | false | 1,081 | py | 150 | load_healpix_map_basic.py | 50 | 0.711378 | 0.694727 | 0 | 35 | 29.885714 | 97 |
RCOSDP/RDM-osf.io | 14,396,730,403,294 | f84eced411b3567205b2715c9c978004b5f27ddb | a394b1053f018ff8be63221c61682df03af4937b | /osf_tests/users/test_last_login_date.py | 7115b2dbd1dc6ac66bbb3599dac39af53366a852 | [
"Apache-2.0",
"LGPL-2.0-or-later",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"MIT",
"AGPL-3.0-only",
"LicenseRef-scancode-unknown-license-reference",
"MPL-1.1",
"CPAL-1.0",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-warranty-disclaimer",
"BSD-2-Clause"
]
| permissive | https://github.com/RCOSDP/RDM-osf.io | 81b11d9511f6248ec9bccb6c586b54a58429e1e7 | 5d632eb6d4566d7d31cd8d6b40d1bc93c60ddf5e | refs/heads/develop | 2023-09-01T09:10:17.297444 | 2023-08-28T04:59:04 | 2023-08-28T04:59:04 | 123,298,542 | 12 | 24 | Apache-2.0 | true | 2023-09-12T08:58:28 | 2018-02-28T14:46:05 | 2022-06-01T04:27:04 | 2023-09-12T08:58:28 | 220,791 | 10 | 20 | 18 | Python | false | false | import mock
import pytz
import pytest
import itsdangerous
from datetime import datetime, timedelta
from django.utils import timezone
from website import settings
from osf_tests.factories import (
AuthUserFactory,
SessionFactory
)
from tests.base import OsfTestCase
@pytest.mark.django_db
@pytest.mark.enable_enqueue_task
class TestUserLastLoginDate(OsfTestCase):
def setUp(self):
super(TestUserLastLoginDate, self).setUp()
self.user = AuthUserFactory()
self.session = SessionFactory(
data={
'auth_user_id': self.user._id,
'auth_user_username': self.user.username
}
)
self.cookie = itsdangerous.Signer(settings.SECRET_KEY).sign(self.session._id).decode()
@mock.patch.object(timezone, 'now')
def test_date_last_login_updated_from_none(self, mock_time):
now = datetime(2018, 2, 4, tzinfo=pytz.utc)
mock_time.return_value = now
assert self.user.date_last_login is None
self.app.set_cookie(settings.COOKIE_NAME, self.cookie)
self.app.get(f'{settings.DOMAIN}{self.user._id}') # user page will fail because not emberized
self.user.refresh_from_db()
assert self.user.date_last_login == now
@mock.patch.object(timezone, 'now')
def test_date_last_login_updated_below_threshold(self, mock_time):
now = datetime(2018, 2, 4, tzinfo=pytz.utc)
mock_time.return_value = now
self.user.date_last_login = now
self.user.save()
# Time is mocked one second below the last login date threshold, so it should not change.
mock_time.return_value = now + (settings.DATE_LAST_LOGIN_THROTTLE_DELTA - timedelta(seconds=1))
self.app.set_cookie(settings.COOKIE_NAME, self.cookie)
self.app.get(f'{settings.DOMAIN}{self.user._id}') # user page will fail because not emberized
self.user.refresh_from_db()
# date_last_login is unchanged
assert self.user.date_last_login == now
@mock.patch.object(timezone, 'now')
def test_date_last_login_updated_above_threshold(self, mock_time):
now = datetime(2018, 2, 4, tzinfo=pytz.utc)
mock_time.return_value = now
self.user.date_last_login = now
self.user.save()
# Time is mocked one second below the last login date threshold, so it should not change.
new_time = now + (settings.DATE_LAST_LOGIN_THROTTLE_DELTA + timedelta(seconds=1))
mock_time.return_value = new_time
self.app.set_cookie(settings.COOKIE_NAME, self.cookie)
self.app.get(f'{settings.DOMAIN}{self.user._id}') # user page will fail because not emberized
self.user.refresh_from_db()
# date_last_login is changed!
assert self.user.date_last_login == new_time
| UTF-8 | Python | false | false | 2,815 | py | 1,129 | test_last_login_date.py | 916 | 0.661812 | 0.654707 | 0 | 77 | 35.558442 | 103 |
ekazyam/Study | 4,466,766,023,440 | 1b01688fcd559c04dd69c0c1635e816f4d8e94c5 | fe647f264d49f240fe9e1d03270025900ec474ef | /パーフェクトPython/pp_057_グローバル変数.py | 326084e9bb1dbae4cbe74271b1ceb1aaa19bc3c2 | [
"MIT"
]
| permissive | https://github.com/ekazyam/Study | 0c61d8e5d5cfc3e51a80d0dccf9a71bb6cb16fdc | bd4d6bae8624c7b6e166881c898afa0afd3b0c70 | refs/heads/master | 2021-01-15T16:14:34.889391 | 2016-03-31T08:19:11 | 2016-03-31T08:19:11 | 34,328,191 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # グローバル変数という概念はpythonにはない
# 最大スコープはモジュール単位である。
# globalを指定すると、モジュール無いの変数に直接アクセスできるようになる。
def globaltest():
global hogehoge
hogehoge = 'hogehoge'
return True
globaltest()
print(hogehoge)
| UTF-8 | Python | false | false | 337 | py | 103 | pp_057_グローバル変数.py | 82 | 0.761194 | 0.761194 | 0 | 12 | 15.75 | 41 |
xutian2/hello-world | 17,892,833,775,806 | e47bcc2f63f6090bbfe090aa6381edab3f97b64f | 0dbdb4917628967c77219310e0002cbdc6d83291 | /py1.py | 83dc1d1118649c5f9a79d13f8a615318687f84d4 | []
| no_license | https://github.com/xutian2/hello-world | 940b304aac0fe150964a97baf54f03c058de27ff | 5dc23bb767b640d50aacb3c46a46de4384c0b96c | refs/heads/master | 2020-03-28T16:39:36.238018 | 2018-09-14T02:34:21 | 2018-09-14T02:34:21 | 148,716,484 | 3 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | def fibs():
a=0
b=1
iwhile a<100:
a,b=b,a+b
return a
f=fibs()
print(fibs)
| UTF-8 | Python | false | false | 103 | py | 1 | py1.py | 1 | 0.456311 | 0.407767 | 0 | 10 | 9.3 | 15 |
ebrahimsalehi1/frontend_projects | 5,291,399,729,558 | 607dc78f3b695194e37a5a018c71d373d01e8415 | 026f69fb57cda18def5720f3f2ded3b13b2a131a | /Python/ShekarAbadSky.py | a80594108c00d566bcfa1876c72c5b73aef3ffe4 | []
| no_license | https://github.com/ebrahimsalehi1/frontend_projects | 54962b0a2447c23f09e40fc50f906f8ed6226886 | 20fb76a6f8411499332c5d8487fc1d45770e598c | refs/heads/master | 2023-04-18T04:51:22.516129 | 2023-04-08T20:48:03 | 2023-04-08T20:48:03 | 245,576,365 | 3 | 0 | null | false | 2023-03-06T23:12:12 | 2020-03-07T05:49:25 | 2021-11-23T20:03:18 | 2023-03-06T23:12:11 | 38,151 | 2 | 0 | 24 | JavaScript | false | false |
line1 = input('').strip().split(' ');
m=int(line1[0])
n=int(line1[0])
countAll = 0
for i in range(m):
line=input('');
countAll = countAll+line.count('*')
print(countAll)
| UTF-8 | Python | false | false | 189 | py | 243 | ShekarAbadSky.py | 208 | 0.571429 | 0.539683 | 0 | 10 | 17.4 | 39 |
Prateek478/ds_algo_problem_solving_python- | 14,766,097,607,056 | e490a024fe238d855655ae084b10b1b4626b035f | 353c64425f87980101aa733088218da6187b8c75 | /37_group_anagrams.py | 4a2239110190d6859e0fe892dc067854b83df4e2 | []
| no_license | https://github.com/Prateek478/ds_algo_problem_solving_python- | 1187a92e336fd092de74fd1052d8d1c544ed1445 | 55e638f284297a7b64ba7a9c6b1240afe894704a | refs/heads/master | 2020-09-26T19:12:09.510421 | 2020-03-04T18:35:52 | 2020-03-04T18:35:52 | 226,322,080 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | """
Group Anagrams from given list
Anagrams are the words that are formed by similar elements but the orders in which these characters occur differ
Example:
The original list : ['lump', 'eat', 'me', 'tea', 'em', 'plum']
The grouped Anagrams : [['me', 'em'], ['lump', 'plum'], ['eat', 'tea']]
"""
def group_anagrams(lst):
occurances = dict()
for i in lst:
sort_input = "".join(sorted(i))
if sort_input in occurances.keys():
occurances[sort_input].append(i)
else:
occurances[sort_input] = list()
occurances[sort_input].append(i)
return occurances.values()
def group_anagrams_2(lst):
from itertools import groupby
temp = lambda test_list : sorted(test_list)
result = []
for key, val in groupby(sorted(lst, key=temp), temp):
result.append(list(val))
return result
if __name__ == "__main__":
print (group_anagrams(['lump', 'eat', 'me', 'tea', 'em', 'plum']))
print (group_anagrams_2(['lump', 'eat', 'me', 'tea', 'em', 'plum']))
| UTF-8 | Python | false | false | 1,034 | py | 28 | 37_group_anagrams.py | 27 | 0.601547 | 0.599613 | 0 | 30 | 33.466667 | 112 |
olsonm16/python | 8,117,488,229,619 | be983d5e3a74e0959fc863e1e9ebc4c91cca6009 | 314f438766488135c67af70607bf3817a8206260 | /CS112/Lab 5/lab5_1.py | 6886d8fde2a40b50db42b70fe567ec87b1fab7e6 | []
| no_license | https://github.com/olsonm16/python | b649dc871e5c863e47eecd9ba66fe648ff41e76e | bf83d6313ad2824cb7c4a52f57ba65f4c24cab25 | refs/heads/master | 2016-08-04T21:49:12.645932 | 2014-01-12T18:59:54 | 2014-01-12T18:59:54 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from array_set import ArraySet
from edit_distance import *
from hash_set import HashSet
import string
File = open("wordsEn.txt", "r")
A = HashSet(9973)
for line in File:
line = line.strip('\n')
A.add(hash(line))
File.close()
def find_closest(word):
File = open("wordsEn.txt", "r")
#word = str(word)
currentdistance = 10000
match = "No match"
for line in File:
line = line.strip('\n')
newdistance = edit_distance(line, word)
if newdistance < currentdistance:
currentdistance = newdistance
match = line
return match
File.close()
input_file = open("input.txt", "r")
for line in input_file:
line = line.strip('\n')
list_of_strings = line.split(" ")
for word in list_of_strings:
word = word.lower()
if not A.contains(hash(word)):
print("You said: " + word + ", did you mean: "+str(find_closest(word)) + "?")
| UTF-8 | Python | false | false | 844 | py | 47 | lab5_1.py | 44 | 0.664692 | 0.654028 | 0 | 38 | 21.157895 | 80 |
binh234/eBookstore | 11,768,210,442,432 | 8765d1b3b4179b96e3d2748e68b8c91549690039 | ad3426da477c57f78934ca57f4aa61a035bf7293 | /customer/customer_app/filters.py | 273edc03ef2faca618f2ec1cc18b46fafaa330aa | []
| no_license | https://github.com/binh234/eBookstore | fbac09a09e8b71766cd95c60175cbbcfe5ecc743 | a1fc9daf1aaa347b54a3f227bef2727697627551 | refs/heads/main | 2023-04-03T19:06:37.961356 | 2023-03-31T08:50:18 | 2023-03-31T08:50:18 | 314,125,130 | 0 | 0 | null | false | 2023-03-25T06:48:08 | 2020-11-19T03:24:16 | 2023-02-28T03:43:57 | 2023-03-25T06:47:59 | 41,713 | 0 | 0 | 1 | CSS | false | false | from django_filters import *
from django_filters.widgets import *
from django_filters.fields import *
from django import forms
from django.db import models
from django.db.models import Q
from .models import *
class MultiValueCharFilter(filters.BaseCSVFilter, filters.CharFilter):
def filter(self, qs, value):
values = value or []
if not values:
return qs
lookup = self.field_name + "__" + self.lookup_expr
if self.distinct:
qs = qs.distinct()
query = Q()
for value in values:
if value in EMPTY_VALUES:
return qs
# if not queryset:
# queryset = self.get_method(qs)(**{lookup: value})
# else:
# queryset = queryset | self.get_method(qs)(**{lookup: value})
query |= Q(**{lookup: value})
return self.get_method(qs)(query)
class BooleanForeignFilter(filters.BooleanFilter):
def filter(self, qs, value):
if value is not None:
lookup = self.field_name + "__" + self.lookup_expr
value = not value
print(lookup, value)
return self.get_method(qs)(**{lookup: value})
else:
return qs
class OrderByFilter(filters.ChoiceFilter):
def filter(self, qs, value):
if value:
return qs.order_by(value)
else:
return qs
class BookFilter(FilterSet):
BOOK_ORDER_CHOICE = (
("name", "Tên sách"),
("price", "Giá bán"),
("-avg_rating", "Đánh giá"),
)
order = OrderByFilter(choices=BOOK_ORDER_CHOICE, label="Sắp xếp theo")
ISBN = CharFilter(field_name="ISBN", lookup_expr="icontains", label="ISBN")
name = CharFilter(lookup_expr="icontains", label="Tên sách")
year = NumberFilter(label="Năm xuất bản")
price = RangeFilter(
field_name="price", widget=RangeWidget(attrs={"class": "col"}), label="Giá"
)
topic = MultiValueCharFilter(
field_name="topic__name",
lookup_expr="icontains",
label="Thể loại",
widget=CSVWidget(),
distinct=True,
help_text=None,
)
keyword = MultiValueCharFilter(
field_name="keyword__keyword",
lookup_expr="icontains",
label="Từ khóa",
widget=CSVWidget(),
distinct=True,
help_text=None,
)
publisher = ModelChoiceFilter(
queryset=Publisher.objects.all(), label="Nhà xuất bản"
)
traditional = BooleanForeignFilter(
field_name="traditional", lookup_expr="isnull", label="Sách truyền thống"
)
electronic = BooleanForeignFilter(
field_name="electronic", lookup_expr="isnull", label="Sách điện tử"
)
authors = ModelMultipleChoiceFilter(queryset=Author.objects.all(), label="Tác giả")
# topics = ModelMultipleChoiceFilter(field_name='topic__name', queryset=Topic.objects.values("name").distinct(), label='Thể loại')
class Meta:
model = Book
fields = [
"order",
"ISBN",
"name",
"year",
"price",
"topic",
"keyword",
"publisher",
"traditional",
"electronic",
"authors",
]
filter_overrides = {
models.CharField: {
"filter_class": CharFilter,
"extra": lambda f: {"lookup_expr": "icontains"},
},
}
def kFilter(self, queryset, name, value):
return queryset.filter(**{name: value})
class AuthorFilter(FilterSet):
ORDER_AUTHOR_CHOICE = (
("name", "Tên"),
("book_count", "Số lượng sách"),
)
order = OrderByFilter(choices=ORDER_AUTHOR_CHOICE, label="Sắp xếp theo")
topic = MultiValueCharFilter(
field_name="book__topic__name",
lookup_expr="icontains",
label="Thể loại",
widget=CSVWidget(attrs={"class": "col ml-2"}),
distinct=True,
help_text=None,
)
# topic = ModelChoiceFilter(queryset=Topic.objects.all().distinct(), label="Thể loại", method="topicFilter")
keyword = MultiValueCharFilter(
field_name="book__keyword__keyword",
lookup_expr="icontains",
label="Từ khóa",
widget=CSVWidget(attrs={"class": "col ml-2"}),
distinct=True,
help_text=None,
)
class Meta:
model = Author
fields = ["topic", "keyword", "order"]
def topicFilter(self, queryset, name, value):
field_name = "book__topic"
return queryset.filter(**{field_name: value}).distinct()
class OrderItemFilter(FilterSet):
order_date = DateFromToRangeFilter(
field_name="order__orderTime",
label="Ngày mua",
widget=RangeWidget(attrs={"type": "date"}),
)
class Meta:
model = OrderItem
fields = ["order_date"]
class OrderFilter(FilterSet):
BOOK_CHOICE = (("both", "Cả sách điện tử và truyền thống"),)
ORDER_BY_CHOICE = (
("-orderTime", "Thời gian đặt hàng"),
("-book_count", "Số lượng sách"),
)
order_date = DateFromToRangeFilter(
field_name="orderTime",
label="Ngày đặt hàng",
widget=RangeWidget(attrs={"type": "date"}),
)
status = MultipleChoiceFilter(
field_name="status",
choices=Order.ORDER_STATUS,
label="Trạng thái",
widget=forms.CheckboxSelectMultiple(),
)
book_type = ChoiceFilter(
field_name="orderitem__option",
choices=BOOK_CHOICE,
label="Loại sách",
method="bookFilter",
)
order = OrderByFilter(choices=ORDER_BY_CHOICE, label="Sắp xếp theo")
class Meta:
model = Order
fields = ["order_date", "book_type", "order", "status"]
def bookFilter(self, queryset, name, value):
if value is None:
return queryset
elif value == "both":
subquery = OrderItem.objects.filter(~Q(option="buy")).values_list(
"order_id", flat=True
)
return queryset.filter(**{name: "buy"}, id__in=subquery).distinct()
| UTF-8 | Python | false | false | 6,225 | py | 70 | filters.py | 20 | 0.57255 | 0.572223 | 0 | 203 | 29.1133 | 134 |
serj162218/schoolprogram | 17,944,373,385,847 | c0954be0d069ec8fe489b1608398aedc6b5e72af | b768363d4eff367959fd4489564641651227e7e2 | /programming language/Married.py | 0025c0e5a6cee0cf35365153c501b9bfc10de448 | []
| no_license | https://github.com/serj162218/schoolprogram | 6496e0693e9af5b799f7aefb653ef603577bea76 | b8871b16a686ade3a2479037e5f771f40704b1e4 | refs/heads/master | 2022-03-18T23:02:21.423864 | 2019-11-25T05:18:54 | 2019-11-25T05:18:54 | 223,866,314 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import os
name = input("Enter name: ")
age = int(input("Enter age: "))
salary = int(input("Enter Salary: "))
if ((18<=age<=26 and salary >= 10000) or (age > 85 and salary > 1000000)):
print("Welcome {0}!".format(name))
else:
print("Sorry {0}!".format(name))
os.system("pause")
| UTF-8 | Python | false | false | 294 | py | 34 | Married.py | 10 | 0.602041 | 0.534014 | 0 | 9 | 30.666667 | 74 |
tritechpaxos/PaxosSolutions | 5,592,047,459,056 | 703e7d59b02eef055511a9236d5d2647219ec94e | fea795de450992d8421f7adbd3c36253ca21571e | /PaxosProducts/CellConfig/src/cellcfg/cellconfig/ui/web/ras/views.py | ee6bbb3a57b91f290eaf1933ebd9a8283bd22eaa | []
| no_license | https://github.com/tritechpaxos/PaxosSolutions | c4de5c538f33e091c3007ef364b24fa4ef2a4f51 | dc481e143e3695499bce6a3da8e147dd57added0 | refs/heads/master | 2023-07-09T23:59:17.271576 | 2019-11-21T03:02:20 | 2019-11-21T03:02:20 | 63,924,770 | 0 | 1 | null | false | 2023-07-04T03:38:43 | 2016-07-22T04:59:06 | 2019-11-21T03:02:24 | 2023-07-04T03:38:38 | 6,853 | 0 | 1 | 0 | C | false | false | # -*- coding: utf-8 -*-
###########################################################################
# Cellcfg Copyright (C) 2016 triTech Inc. All Rights Reserved.
#
# This file is part of Cellcfg.
#
# Cellcfg is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Cellcfg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Cellcfg. If not, see <http://www.gnu.org/licenses/>.
###########################################################################
import logging
from flask import request, redirect, url_for, abort, render_template
from .. import app
from cellconfig.model import Cell
from cellconfig.ras.model import RasGroup, RasServer, RasTarget, RasAction
from .forms import *
from . import api, event
logger = logging.getLogger(__name__)
@app.template_global()
def status2ctxclass(status):
s = status['status']
if s == 'running':
return 'success'
elif s == 'stopped':
return 'active'
elif s == 'warning':
return 'warning'
elif s == 'error':
return 'danger'
else:
return ''
@app.template_global()
def actiontype2label(atype):
return RasActionForm.LABEL[atype]
@app.template_global()
def status2text(status):
s = status['status']
if s == 'running':
return u'正常'
elif s == 'stopped':
return u'停止'
elif s == 'warning':
return u'一部停止'
elif s == 'error':
return u'異常'
else:
return u'不明'
@app.route('/rascell', methods=['GET', 'POST'])
def rascell_list():
if request.method == 'GET':
cells = Cell.select().where(Cell.type == 'RAS')
return render_template('ras/cell_list.html', cells=cells)
elif request.method == 'POST':
form = RasCellForm(request.form)
if not form.validate():
return render_template('ras/cell_create.html', form=form)
form.create()
return redirect(url_for('rasgrp_list'))
@app.route('/rascell/create')
def rascell_create():
form = RasCellForm()
return render_template('ras/cell_create.html', form=form)
@app.route('/rasclstr', methods=['GET', 'POST'])
def rasgrp_list():
if request.method == 'GET':
if Cell.select().where(Cell.type == 'RAS').count() == 0:
return redirect(url_for('rascell_list'))
grps = RasGroup.select()
return render_template('ras/ras_index.html', groups=grps)
elif request.method == 'POST':
form = RasGroupForm(request.form)
if not form.validate():
return render_template('ras/group_create.html', form=form)
logger.debug('URL: {}'.format(request.url_root))
grp = form.create(request.url_root)
logger.debug('GROUP: {}'.format(str(grp)))
return redirect(url_for('rasgrp_list'))
@app.route('/rasclstr/create')
def rasgrp_create():
form = RasGroupForm()
return render_template('ras/group_create.html', form=form)
@app.route('/rasclstr/<int:id>', methods=['GET', 'POST'])
def rasgrp_detail(id):
if request.method == 'GET':
group = RasGroup.get(RasGroup.id == id)
return render_template('ras/group_show.html', group=group)
elif request.method == 'POST':
if request.form['operation'] == 'Delete':
RasGroupForm.delete(id)
return redirect(url_for('rasgrp_list'))
@app.route('/rasclstr/<int:id>/update')
def rasgrp_update(id):
pass
@app.route('/rasclstr/<int:id>/delete')
def rasgrp_delete(id):
grp = RasGroup.get(RasGroup.id == id)
form = RasGroupForm(data=RasGroupForm.obj2dict(grp))
return render_template('ras/group_delete.html', form=form, id=id)
@app.route('/rasclstr/<int:grpid>/rassrv/create')
def rassrv_create(grpid):
form = RasServerForm(group=grpid)
return render_template('ras/server_create.html', form=form, grpid=grpid)
@app.route('/rasclstr/<int:grpid>/rassrv/<int:srvid>/delete')
def rassrv_delete(grpid, srvid):
srv = RasServer.get(RasServer.id == srvid)
form = RasServerForm(obj=srv)
return render_template(
'ras/server_delete.html',
form=form, grpid=grpid, srvid=srvid, group_name=srv.group.name)
@app.route('/rasclstr/<int:grpid>/rassrv/<int:srvid>', methods=['POST'])
def rassrv_detail(grpid, srvid):
if request.form['operation'] == 'Delete':
RasServerForm.delete(srvid)
else:
abort(400)
return redirect(url_for('rasgrp_detail', id=grpid))
@app.route('/rasclstr/<int:grpid>/rassrv', methods=['POST'])
def rassrv_list(grpid):
if request.method == 'POST':
form = RasServerForm(request.form, group=grpid)
if not form.validate():
return render_template('ras/server_create.html',
form=form, grpid=grpid)
form.create(grpid)
return redirect(url_for('rasgrp_detail', id=grpid))
_TARGET_FORMS = {
'cell': RasTargetCellForm,
'ras': RasTargetRasForm,
'app': RasTargetAppForm,
}
@app.route('/rasclstr/<int:grpid>/rastgt/create/<typ>')
def rastgt_create(grpid, typ):
form = _TARGET_FORMS[typ]()
return render_template(
'ras/target_create.html', form=form, grpid=grpid, typ=typ)
@app.route('/rasclstr/<int:grpid>/rastgt', methods=['POST'])
def rastgt_list(grpid):
typ = request.form['type']
form = _TARGET_FORMS[typ](request.form)
if not form.validate():
return render_template(
'ras/target_create.html', form=form, grpid=grpid, typ=typ)
form.create(grpid)
return redirect(url_for('rasgrp_detail', id=grpid))
@app.route('/rasclstr/<int:grpid>/rastgt/delete/<typ>/<int:tgtid>')
def rastgt_delete(grpid, typ, tgtid):
tgt = RasTarget.get(RasTarget.id == tgtid)
fclass = _TARGET_FORMS[typ]
form = fclass(data=fclass.obj2dict(tgt))
return render_template(
'ras/target_delete.html',
form=form, grpid=grpid, typ=typ, tgtid=tgtid,
group_name=tgt.group.name)
@app.route('/rasclstr/<int:grpid>/rastgt/<int:tgtid>')
def rastgt_detail(grpid, tgtid):
tgt = RasTarget.get(RasTarget.id == tgtid)
return render_template('ras/target_show.html', target=tgt)
@app.route('/rasclstr/<int:grpid>/rastgt/<typ>/<int:tgtid>', methods=['POST'])
def rastgt_detail_op(grpid, typ, tgtid):
if request.form['operation'] == 'Delete':
_TARGET_FORMS[typ].delete(tgtid)
else:
abort(400)
return redirect(url_for('rasgrp_detail', id=grpid))
_ACTION_FORMS = {
'smtp': RasActionMailForm,
'syslog': RasActionSyslogForm,
'http': RasActionHttpForm,
'restart': RasActionRestartForm,
'script': RasActionScriptForm,
}
@app.route('/rasclstr/<int:grpid>/rastgt/<int:tgtid>/rasact/create/<atype>')
def rasact_create(grpid, tgtid, atype):
form = _ACTION_FORMS[atype]()
tgt = RasTarget.get(RasTarget.id == tgtid)
return render_template(
'ras/action_create.html',
form=form, grpid=grpid, target=tgt, atype=atype)
@app.route('/rasclstr/<int:grpid>/rastgt<int:tgtid>/rasact', methods=['POST'])
def rasact_list(grpid, tgtid):
atype = request.form['type']
form = _ACTION_FORMS[atype](request.form)
if not form.validate():
tgt = RasTarget.get(RasTarget.id == tgtid)
return render_template(
'ras/action_create.html',
form=form, grpid=grpid, target=tgt, atype=atype)
script = request.files['script'] if 'script' in request.files else None
form.create(tgtid, script)
return redirect(url_for(
'rastgt_detail', grpid=grpid, tgtid=tgtid))
@app.route(
'/rasclstr/<int:grpid>/rastgt/<int:tgtid>/rasact/delete/<typ>/<int:actid>')
def rasact_delete(grpid, tgtid, typ, actid):
act = RasAction.get(RasAction.id == actid)
logger.debug('ACTION: {} type={}'.format(str(act), typ))
fclass = _ACTION_FORMS[typ]
form = fclass(data=fclass.obj2dict(act))
return render_template(
'ras/action_delete.html',
form=form, grpid=grpid, tgtid=tgtid, typ=typ, actid=actid,
group_name=act.target.group.name, target_name=act.target.name)
@app.route(
'/rasclstr/<int:grpid>/rastgt/<int:tgtid>/rasact/update/<typ>/<int:actid>')
def rasact_update(grpid, tgtid, typ, actid):
act = RasAction.get(RasAction.id == actid)
fclass = _ACTION_FORMS[typ]
form = fclass(data=fclass.obj2dict(act))
return render_template(
'ras/action_update.html',
form=form, grpid=grpid, tgtid=tgtid, typ=typ, actid=actid)
@app.route(
'/rasclstr/<int:grpid>/rastgt/<int:tgtid>/rasact/<typ>/<int:actid>',
methods=['POST'])
def rasact_detail(grpid, tgtid, typ, actid):
if request.form['operation'] == 'Delete':
_ACTION_FORMS[typ].delete(actid)
elif request.form['operation'] == 'Update':
fclass = _ACTION_FORMS[typ]
form = fclass(request.form)
act = RasAction.get(RasAction.id == actid)
if not form.validate():
return render_template(
'ras/action_update.html',
form=form, grpid=grpid, tgtid=tgtid, typ=typ, actid=actid)
script = (request.files['script']
if 'script' in request.files else None)
form.update(actid, tgtid, script)
else:
abort(400)
return redirect(url_for(
'rastgt_detail', grpid=grpid, tgtid=tgtid))
| UTF-8 | Python | false | false | 9,722 | py | 503 | views.py | 182 | 0.633223 | 0.630852 | 0 | 296 | 31.763514 | 79 |
cihanyatbaz/Trajectories | 11,862,699,704,647 | 19b424378fb264b6271bcbe8480f633592e2b586 | 074ca044b0fa4f269fa499250eab187ea5b06d82 | /Util.py | e51703cff3ac56e01a4f9faec78c7d14cf172676 | []
| no_license | https://github.com/cihanyatbaz/Trajectories | 11b5925b16bb9dcc5c4e5688ec589722fecb8f31 | e94c7d5640057aa428e0985937a18cceab474556 | refs/heads/master | 2023-01-14T08:30:48.518214 | 2020-11-22T16:06:29 | 2020-11-22T16:06:29 | 221,267,830 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import math
import pandas as pd
def euclideanDistance(pos1, pos2):
return math.sqrt(pow((pos1[0]-pos2[0]),2) + pow((pos1[1]-pos2[1]),2))
def dotProduct(a,b):
result = 0
if len(a) == len(b):
for i in range(len(a)):
result += a[i] * b[i]
return result
else:
return -1
def vectLen(a):
val = dotProduct(a,a)
return math.sqrt(val)
def getSimilarity(a,b):
return dotProduct(a,b) / (vectLen(a) * vectLen(b))
def plotTrajectory(T):
# plotting trajectory T
import matplotlib
matplotlib.use("TKAgg")
from matplotlib import pyplot as plt
X = [int(elem) for elem in T["X (pixel)"].tolist()]
Y = [int(elem) for elem in T["Y (pixel)"].tolist()]
plt.scatter(X, Y, cmap="Paired")
plt.title(str(T["Trajectory"].values[0]) + "and" + str(T["Trajectory"].values[-1]))
plt.show() | UTF-8 | Python | false | false | 865 | py | 11 | Util.py | 4 | 0.595376 | 0.576879 | 0 | 34 | 24.470588 | 87 |
vagaganova/homeworks | 13,048,110,678,947 | 496fa426bdf3593015949c9f057d4e5b728965ec | ee7f2ae9f58c6724c4774ab541661277f5019907 | /HW08092020/HW2.py | b86f242aa104d6642f410b1124699aa4da0867f0 | []
| no_license | https://github.com/vagaganova/homeworks | db55d74a16dd9f7c80442e8a0b8ed613139e6eb5 | f41a0737fc0189d7040f1f1c2914a35c2ab10afc | refs/heads/master | 2023-01-20T06:35:06.984628 | 2020-11-28T19:18:02 | 2020-11-28T19:18:02 | 290,870,266 | 0 | 0 | null | false | 2020-09-20T18:44:35 | 2020-08-27T20:03:57 | 2020-09-20T18:44:24 | 2020-09-20T18:44:21 | 25 | 0 | 0 | 1 | Python | false | false | with open('my_file.txt') as my_file:
lines = my_file.readlines()
print ('Количество строк: ', len(lines))
for i in range(len(lines)):
if len(lines[i].replace(' ', '').replace('\n','')) > 0:
print('В строке ', i, ':', len(lines[i].split(' ')))
else:
print('В строке ', i, ':','0')
| UTF-8 | Python | false | false | 361 | py | 41 | HW2.py | 39 | 0.493976 | 0.487952 | 0 | 8 | 40.5 | 64 |
zhangwei725/PythonBase | 6,897,717,519,616 | ad9c412c82ccafe7874cf5c9f288fd4435a6023c | 147bc95b8b8a36014ec001bb1f79100095fa90a4 | /day16/函数_装饰器_带参数.py | 22bf57d40c5d8195c2bd1871b8f57978ab63352f | []
| no_license | https://github.com/zhangwei725/PythonBase | fd20293b7f7ebee9f11a5df8f4761cad7f1ff4c7 | 6c9165caed48418eb55cf7622359105c9124e580 | refs/heads/master | 2020-04-30T00:32:40.360722 | 2019-04-03T01:41:51 | 2019-04-03T01:41:51 | 176,505,618 | 2 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | import time
"""
1.装饰带参数
当修饰的函数有参数的时候,需要将参数定义在装饰器内部函数上
2> 如果修饰的函数有返回值
1 > 在调用核心函数的地方接受返回值
2 > 将返回的结果当做内部函数的返回值
"""
# 带参数 无返回值
def outer(func):
def inner(x, y):
start = time.time()
func(x, y)
end = time.time() - start
print(end)
return inner
# 带参数 带返回值
def outer1(func):
def inner(x, y):
start = time.time()
result = func(x, y)
end = time.time() - start
print(end)
return result
return inner
# 修饰的函数参数不固定,
def outer2(func):
def inner(x, y):
start = time.time()
result = func(x, y)
end = time.time() - start
print(end)
return result
return inner
@outer1
def add(x, y):
return x + y
if __name__ == '__main__':
# add = outer(func=add)
# add(1,3)
print(add(1, 2))
| UTF-8 | Python | false | false | 1,038 | py | 136 | 函数_装饰器_带参数.py | 125 | 0.525 | 0.511905 | 0 | 59 | 13.237288 | 33 |
pradionova/python67 | 9,354,438,810,164 | 636c207964992a39b0839b7f57c42c6c1f0ec5ac | 8cb9325b3f13fcd8596fbf54638ee1a04e0dfef5 | /chapter 1/test_1_12_7.py | eb9fcbb370f981c80816cc4f321f8a8cba3d5d64 | []
| no_license | https://github.com/pradionova/python67 | 781ac8531bbcd00a67997ddd033dc0cd92b09251 | 67c8e536b7a3fd6da02ab1c6f4df6d6ced030e89 | refs/heads/main | 2023-04-25T22:37:07.700104 | 2021-05-29T13:31:36 | 2021-05-29T13:31:36 | 332,923,333 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import math
a = input()
pi = 3.14
if a == 'прямоугольник':
b = int(input())
c = int(input())
print(b * c)
elif a == 'треугольник':
a = int(input())
b = int(input())
c = int(input())
p = (a + b + c) / 2
s = p * (p - a) * (p - b) * (p - c)
print(math.sqrt(s))
elif a == 'круг':
r = int(input())
print(pi * r * r) | UTF-8 | Python | false | false | 384 | py | 37 | test_1_12_7.py | 36 | 0.446629 | 0.435393 | 0 | 19 | 17.789474 | 39 |
PebbleDev/libpebble | 16,140,487,119,338 | d3ffa52efff7890e8915ac96ae073ffb4b8c1fdb | e5c93a0989572cb7a503acf6cd587cc69f4cc219 | /pebble.py | 40e9ed7a000538e58984b29b6981b679d126bf01 | []
| no_license | https://github.com/PebbleDev/libpebble | a67e97de246b43459688dde0b3e362e334482ea5 | 42bac698328ff947d03a428892291e705e99a729 | refs/heads/master | 2021-01-21T00:56:28.883087 | 2013-02-07T02:32:27 | 2013-02-07T02:32:27 | 8,069,845 | 9 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
import serial, codecs, sys, binascii, time, threading, stm32_crc, zipfile
from pprint import pprint
from struct import *
class EndpointSync():
timeout = 10
def __init__(self, pebble, endpoint):
pebble.register_endpoint(endpoint, self.callback)
self.marker = threading.Event()
def callback(self, *args):
self.data = args
self.marker.set()
def get_data(self):
try:
self.marker.wait(timeout=self.timeout)
return self.data[1]
except:
return False
class Pebble(object):
endpoints = {
"TIME": 11,
"VERSION": 16,
"PHONE_VERSION": 17,
"SYSTEM_MESSAGE": 18,
"MUSIC_CONTROL": 32,
"PHONE_CONTROL": 33,
"LOGS": 2000,
"PING": 2001,
"LOG_DUMP": 2002,
"RESET": 2003,
"APP": 2004,
"NOTIFICATION": 3000,
"RESOURCE": 4000,
"APP_MANAGER": 6000,
"PUTBYTES": 48879
}
def __init__(self, id):
self._alive = True
self._endpoint_handlers = {}
self._internal_endpoint_handlers = {
self.endpoints["TIME"]: self._get_time_response,
self.endpoints["VERSION"]: self._version_response,
self.endpoints["PING"]: self._ping_response,
self.endpoints["APP_MANAGER"]: self._appbank_status_response
}
try:
self._ser = serial.Serial("/dev/tty.Pebble"+id+"-SerialPortSe", 19200, timeout=1)
# we get a null response when we connect, discard it
self._ser.read(5)
self._read_thread = threading.Thread(target=self._reader)
self._read_thread.setDaemon(True)
self._read_thread.start()
except:
raise Exception("Failed to connect to Pebble")
def __del__(self):
try:
self._ser.close()
except:
pass
def _reader(self):
while self._alive:
endpoint, resp = self._recv_message()
if resp == None:
continue
if endpoint in self._internal_endpoint_handlers:
resp = self._internal_endpoint_handlers[endpoint](endpoint, resp)
if endpoint in self._endpoint_handlers:
self._endpoint_handlers[endpoint](endpoint, resp)
def _build_message(self, endpoint, data):
return pack("!HH", len(data), endpoint)+data
def _send_message(self, endpoint, data, callback = None):
if endpoint not in self.endpoints:
raise Exception("Invalid endpoint specified")
msg = self._build_message(self.endpoints[endpoint], data)
self._ser.write(msg)
def _recv_message(self):
data = self._ser.read(4)
if len(data) == 0:
return (None, None)
elif len(data) < 4:
raise Exception("Malformed response with length "+str(len(data)))
size, endpoint = unpack("!HH", data)
resp = self._ser.read(size)
return (endpoint, resp)
def register_endpoint(self, endpoint_name, func):
if endpoint_name not in self.endpoints:
raise Exception("Invalid endpoint specified")
endpoint = self.endpoints[endpoint_name]
self._endpoint_handlers[endpoint] = func
def notification_sms(self, sender, body):
ts = str(int(time.time())*1000)
parts = [sender, body, ts]
data = "\x01"
for part in parts:
data += pack("!b", len(part))+part
self._send_message("NOTIFICATION", data)
def notification_email(self, sender, subject, body):
ts = str(int(time.time())*1000)
parts = [sender, subject, ts, body]
data = "\x00"
for part in parts:
data += pack("!b", len(part))+part
self._send_message("NOTIFICATION", data)
def set_nowplaying_metadata(self, track, album, artist):
ts = str(int(time.time())*1000)
parts = [artist, album, track]
data = pack("!b", 16)
for part in parts:
data += pack("!b", len(part))+part
self._send_message("MUSIC_CONTROL", data)
def get_versions(self, async = False):
self._send_message("VERSION", "\x00")
if not async:
return EndpointSync(self, "VERSION").get_data()
def get_appbank_status(self, async = False):
self._send_message("APP_MANAGER", "\x01")
if not async:
return EndpointSync(self, "APP_MANAGER").get_data()
def remove_app(self, appid, index):
data = pack("!bII", 2, appid, index)
self._send_message("APP_MANAGER", data)
def get_time(self, async = False):
self._send_message("TIME", "\x00")
if not async:
return EndpointSync(self, "TIME").get_data()
def set_time(self, timestamp):
data = pack("!bL", 2, timestamp)
self._send_message("TIME", data)
def install_app(self, pbz_path):
with zipfile.ZipFile(pbz_path) as pbz:
binary = pbz.read("pebble-app.bin")
resources = pbz.read("app_resources.pbpack")
apps = self.get_appbank_status()
first_free = 1
for app in apps["apps"]:
if app["index"] == first_free:
first_free += 1
if first_free == apps["banks"]:
raise Exception("No available app banks left")
client = PutBytesClient(self, first_free, "BINARY", binary)
self.register_endpoint("PUTBYTES", client.handle_message)
client.init()
while not client._done:
pass
client = PutBytesClient(self, first_free, "RESOURCES", resources)
self.register_endpoint("PUTBYTES", client.handle_message)
client.init()
while not client._done:
pass
self._add_app(first_free)
"""
Valid commands:
FIRMWARE_AVAILABLE = 0
FIRMWARE_START = 1
FIRMWARE_COMPLETE = 2
FIRMWARE_FAIL = 3
FIRMWARE_UP_TO_DATE = 4
FIRMWARE_OUT_OF_DATE = 5
"""
def system_message(self, command):
data = pack("!bb", 0, command)
self._send_message("SYSTEM_MESSAGE", data)
def ping(self, cookie = 0, async = False):
data = pack("!bL", 0, cookie)
self._send_message("PING", data)
if not async:
return EndpointSync(self, "PING").get_data()
def reset(self):
self._send_message("RESET", "\x00")
def disconnect(self):
self._alive = False
self._ser.close()
def _add_app(self, index):
data = pack("!bI", 3, index)
self._send_message("APP_MANAGER", data)
def _ping_response(self, endpoint, data):
restype, retcookie = unpack("!bL", data)
return retcookie
def _get_time_response(self, endpoint, data):
restype, timestamp = unpack("!bL", data)
return timestamp
def _appbank_status_response(self, endpoint, data):
apps = {}
restype, = unpack("!b", data[0])
if restype == 1:
apps["banks"], apps_installed = unpack("!II", data[1:9])
apps["apps"] = []
appinfo_size = 78
offset = 9
for i in xrange(apps_installed):
app = {}
app["id"], app["index"], app["name"], app["company"], app["flags"], app["version"] = \
unpack("!II32s32sIH", data[offset:offset+appinfo_size])
app["name"] = app["name"].replace("\x00", "")
app["company"] = app["company"].replace("\x00", "")
apps["apps"] += [app]
offset += appinfo_size
return apps
def _version_response(self, endpoint, data):
fw_names = {
0: "normal_fw",
1: "recovery_fw"
}
resp = {}
for i in xrange(2):
fwver_size = 47
offset = i*fwver_size+1
fw = {}
fw["timestamp"],fw["version"],fw["commit"],fw["is_recovery"], \
fw["hardware_platform"],fw["metadata_ver"] = \
unpack("!i32s8s?bb", data[offset:offset+fwver_size])
fw["version"] = fw["version"].replace("\x00", "")
fw["commit"] = fw["commit"].replace("\x00", "")
fw_name = fw_names[i]
resp[fw_name] = fw
resp["bootloader_timestamp"],resp["hw_version"],resp["serial"] = \
unpack("!L9s12s", data[95:120])
resp["hw_version"] = resp["hw_version"].replace("\x00","")
btmac_hex = binascii.hexlify(data[120:126])
resp["btmac"] = ":".join([btmac_hex[i:i+2].upper() for i in reversed(xrange(0, 12, 2))])
return resp
class PutBytesClient(object):
states = {
"NOT_STARTED": 0,
"WAIT_FOR_TOKEN": 1,
"IN_PROGRESS": 2,
"COMMIT": 3,
"COMPLETE": 4,
"FAILED": 5
}
transfer_types = {
"RESOURCES": 4,
"BINARY": 5
}
def __init__(self, pebble, index, transfer_type, buffer):
self._pebble = pebble
self._state = self.states["NOT_STARTED"]
self._transfer_type = self.transfer_types[transfer_type]
self._buffer = buffer
self._index = index
self._done = False
def init(self):
data = pack("!bIbb", 1, len(self._buffer), self._transfer_type, self._index)
self._pebble._send_message("PUTBYTES", data)
self._state = self.states["WAIT_FOR_TOKEN"]
def wait_for_token(self, resp):
res, = unpack("!b", resp[0])
if res != 1:
self.abort()
return
self._token, = unpack("!I", resp[1:])
self._left = len(self._buffer)
self._state = self.states["IN_PROGRESS"]
self.send()
def in_progress(self, resp):
res, = unpack("!b", resp[0])
if res != 1:
self.abort()
return
if self._left > 0:
self.send()
else:
self._state = self.states["COMMIT"]
self.commit()
def commit(self):
data = pack("!bII", 3, self._token & 0xFFFFFFFF, stm32_crc.crc32(self._buffer))
self._pebble._send_message("PUTBYTES", data)
def handle_commit(self, resp):
res, = unpack("!b", resp[0])
if res != 1:
self.abort()
return
self._state = self.states["COMPLETE"]
self.complete()
def complete(self):
data = pack("!bI", 5, self._token & 0xFFFFFFFF)
self._pebble._send_message("PUTBYTES", data)
def handle_complete(self, resp):
res, = unpack("!b", resp[0])
if res != 1:
self.abort()
return
self._done = True
def abort(self):
# error handling? what error handling!
pass
def send(self):
datalen = min(self._left, 2000)
rg = len(self._buffer)-self._left
msgdata = pack("!bII", 2, self._token & 0xFFFFFFFF, datalen)
msgdata += self._buffer[rg:rg+datalen]
self._pebble._send_message("PUTBYTES", msgdata)
self._left -= datalen
def handle_message(self, endpoint, resp):
if self._state == self.states["WAIT_FOR_TOKEN"]:
self.wait_for_token(resp)
elif self._state == self.states["IN_PROGRESS"]:
self.in_progress(resp)
elif self._state == self.states["COMMIT"]:
self.handle_commit(resp)
elif self._state == self.states["COMPLETE"]:
self.handle_complete(resp)
if __name__ == '__main__':
pebble_id = sys.argv[1] if len(sys.argv) > 1 else "402F"
pebble = Pebble(pebble_id)
pebble.notification_sms("libpebble", "Hello, Pebble!")
# install app.pbz
#print "Installing app.pbz"
#pebble.install_app("app.pbz")
# delete all apps
#for app in pebble.get_appbank_status()["apps"]:
# pebble.remove_app(app["id"], app["index"])
versions = pebble.get_versions()
curtime = pebble.get_time()
apps = pebble.get_appbank_status()
print "Pebble "+pebble_id
print "Firmware "+versions["normal_fw"]["version"]
print "Recovery "+versions["recovery_fw"]["version"]
print "Timestamp: "+str(curtime)
print "Installed apps:"
for app in apps["apps"]:
print " - "+app["name"]
| UTF-8 | Python | false | false | 10,431 | py | 1 | pebble.py | 1 | 0.641453 | 0.623143 | 0.001055 | 397 | 25.274559 | 90 |
tchico/budget | 1,949,915,156,331 | 5c3c571c09a0c17c48089a68eb4746b3d3d91249 | 9840040a49c030dbd85223c0e76d8e4921f29b19 | /Budget/website/model/budget.py | a8f5d605b59054cffdf57d7c9cdd3d44b059c952 | []
| no_license | https://github.com/tchico/budget | 71c1fb2531c7e4bb3f0eb8b9489a3c3b8046cf31 | 1f980632f5e360df51150068f2ca1b0df64a59f6 | refs/heads/master | 2015-07-21T17:14:33 | 2015-07-07T22:16:40 | 2015-07-07T22:16:40 | 34,820,742 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | '''
Created on 18 Jun 2015
@author: zk9vist
'''
from django.db import models
from website.model.category_budget import CategoryBudget
class Budget(models.Model):
'''
Budget class
'''
year = models.PositiveSmallIntegerField()
month = models.PositiveSmallIntegerField()
budget_categories = models.ForeignKey(CategoryBudget)
def add_category_budget(self, category, amount):
self.budget_categories[category.__name__] = {'amount': amount}
def get_category_budget(self, category):
return self.budget_categories.get(category)
| UTF-8 | Python | false | false | 600 | py | 16 | budget.py | 12 | 0.676667 | 0.665 | 0 | 23 | 24.086957 | 70 |
Kwangsoo-kim/HW-Big | 12,395,275,618,403 | 40551275d26ea0dd9773daea5836a5fb4f729ed2 | 2098d18347dbcc241859a95ceba8949abf7f02bf | /src/b_11_Django/ch04_haksa/students/urls.py | 0f924775901d87251606ea133e4479fdaf1b138d | []
| no_license | https://github.com/Kwangsoo-kim/HW-Big | 42775d813ecdc1d097ff84985a0b52a6790b44f2 | 9ea939894aa622950b949beee30191cf1cfd7040 | refs/heads/main | 2023-06-24T15:27:30.323867 | 2021-07-06T08:42:14 | 2021-07-06T08:42:14 | 317,099,102 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.urls import path
from . import views
app_name='students'
urlpatterns=[
path('listAll/',views.listAll,name='listAll')
] | UTF-8 | Python | false | false | 138 | py | 750 | urls.py | 423 | 0.717391 | 0.717391 | 0 | 10 | 12.9 | 49 |
pell13/2021-Fall_Python-Algorithm- | 12,257,836,669,240 | 0929a3d78b5a1347621d6a676c2f3149abb43e6a | 279c21d9945390d85a5da6887b2028c41bcb010c | /BOJ_solution/2504.괄호의값.py | 5bbf709e7e11965615bc0396eec70feb4d2cb009 | []
| no_license | https://github.com/pell13/2021-Fall_Python-Algorithm- | 2051b9f13a9158682855eb71f440b4d75895e61e | 309dfab37fe13749dbaeea06d12d23e6ff53d785 | refs/heads/main | 2023-08-21T17:54:36.940977 | 2023-02-25T12:40:46 | 2023-02-25T12:40:46 | 415,779,121 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # 문제 유형 : 구현
#시작시간: 1:15 -> 2시간 넘게 품.. ㅠㅠ
#() -> 2
#[] -> 3
#오답노트 : 처음에는 괄호에서 시작하는대로 하려고 별의별 뻘짓을 다하다가, 재귀로도 해보다가... 어떻게 재는지 몰라서 계속 하다가 결국 while문으로 노가다하는 방식을 선택했다.
#하지만 더하는 상황, 곱하는 상황을 분간하기가 너무 어려웠다...
#굉장히 잘 한 답을 보니까 곱하는 걸 나중에 하는게 아니라 시작하면서 곱한다는 점이 좋아 보였고, 그리고 내 안에 브라켓이 있는지 체크할때 stack이 아니라 원본 어레이를 활용했다.
#여는 걸 넣을때 값을 곱한다. 닫힌 괄호를 만나면 tmp를 전체 결과인 res에 넣고 괄호를 나눠준다.
#아, 그러면 이게 어떻게 보면 분배법칙이 성립하는 셈이다
#(()[[]])
# 2 * ( 2 + 3 * 3 )
# 인데 이거를 2 * 2 + 2 * 3 * 3 이라고 풀어서 넣은 거다!
# 여는 괄호를 할때 ( ( 로 4가 곱해진거고, ) 가 닫혔으니 4를 더해주고 ()만큼은 덧셈에 효력이 없으니 2로 나눠주는 감성이다.
# 내 생각에는 오늘 밤새 이걸 풀었다고 해도 이 방법은 절대 생각 못했을 것 같다. while 문으로 어떻게든 꾸역꾸역 구현했지만 답을 보고 앞으로 쓸 수 있는 하나의 테크닉을 배웠다.
def sol(arr):
stack = [arr[0]]
score = []
result = []
depth = []
flag = 0
for i in range(1, len(arr)):
top = ''
if len(stack) != 0: top = stack[-1]
if (top == '[' and arr[i] == ']') or (top == '(' and arr[i] == ')'):
depth.append(flag)
flag = flag - 1
stack.pop()
result.append(arr[i])
if top == '[': n = 3
if top == '(': n = 2
score.append(n)
else:
flag = flag + 1
stack.append(arr[i])
if len(stack) != 0:
return 0
max_value = max(depth)
while max_value > 0 :
new_d = []
new_s = []
for i in range(len(depth) -1):
if depth[i] == max_value and depth[i] == depth[i + 1]:
score[i+1] += score[i]
elif depth[i] == max_value and (depth[i+1] + 1 == (depth[i])):
score[i+1] *= score[i]
elif depth[i] != max_value:
new_s.append(score[i])
new_d.append(depth[i])
new_d.append(depth[-1])
new_s.append(score[-1])
depth = new_d
score = new_s
max_value = max(depth)
return sum(score)
bracket_arr = input()
print(sol(bracket_arr)) | UTF-8 | Python | false | false | 2,673 | py | 62 | 2504.괄호의값.py | 61 | 0.487868 | 0.46825 | 0 | 77 | 24.168831 | 104 |
yannickulrich/IRIS | 11,055,245,848,607 | f4877bd8c7912ea2691be1caab096c7fe8795b26 | c1bc4c77d54d70f167a8063e917b286b2e27c655 | /libs/tts/utils.py | 085d4c5228cd8b75c097687f664040a7ce861c67 | []
| no_license | https://github.com/yannickulrich/IRIS | 09c0292506a1f4a112d29f7d9af8008a791762e2 | fb13496308fff73640ae306b1bbd88a5bd71d59f | refs/heads/master | 2018-01-12T08:56:28.559987 | 2016-03-02T16:57:05 | 2016-03-02T16:57:05 | 51,516,141 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | def get_profile(profile, key="", default=""):
tts_profile = {}
if profile:
if 'tts' in profile:
tts_profile = profile['tts']
if key:
if key in tts_profile:
return tts_profile[key]
elif default:
return default
else:
raise KeyError("Key not found and default not set")
else:
return tts_profile
| UTF-8 | Python | false | false | 397 | py | 36 | utils.py | 34 | 0.534005 | 0.534005 | 0 | 15 | 25.466667 | 63 |
atinghosh/Deep-learning-for-Glaucoma | 9,466,107,924,434 | 739909b92677484647158a0d88d41c92d70571b6 | 2366e392f09fd50368acf69ff04cf32a3187d08a | /Huy/data.py | 28492737d55a6706d79354a699ff09c6e3394dab | []
| no_license | https://github.com/atinghosh/Deep-learning-for-Glaucoma | b41de26604999424de6aaeaccaac33bb0d9b74e2 | 4993b5937cb87dcc9a660f30caa37b4346767a3d | refs/heads/master | 2020-03-18T18:58:24.347534 | 2018-05-29T21:42:37 | 2018-05-29T21:42:37 | 135,125,903 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | import os
from os import path
import glob
import math
import shutil
import random
import pandas as pd
import numpy as np
from scipy import signal
from PIL import Image
import torch
import torch.utils.data as data
import torchvision
from torchvision import transforms as T
import Huy.transforms as imgT
#------------------------------------------------------------------------------
#Initial Dataset
#------------------------------------------------------------------------------
def is_file(filename, exts):
return any(filename.endswith(extension) for extension in exts)
def convert_label(trainYn, nb_classes):
a,b,c,_= np.shape(trainYn)
trainY = np.zeros((a,b,c,nb_classes))
for i in range(0,a):
trainY[i,:,:,0:4]= trainYn[i,:,:,0:4]
trainY[i,:,:,4] = (trainYn[i,:,:,4] + trainYn[i,:,:,5] + trainYn[i,:,:,6] + trainYn[i,:,:,7])
return trainY
def load_data(filelist):
nb_classes = 5
inputData = np.load(filelist[0])
targetData = np.load(filelist[1])
targetData = convert_label(targetData, nb_classes)
return inputData, targetData
class DatasetFromFile(data.Dataset):
"""Maniplulate dataset before loading
"""
def __init__(self, filelist=['input','target'], transform=None, islabel=True):
super(DatasetFromFile, self).__init__()
for filepath in filelist:
if not path.isfile(filepath):
raise ValueError(filepath, ' is not a file')
self.filelist = filelist
self.transform = transform
self.islabel = islabel #often use for prediction
self.inputData, self.targetData = load_data(filelist)
self.len = self.inputData.shape[0]
def __getitem__(self, index):
input = self.inputData[index]
input = input/255
if self.islabel:
target = self.targetData[index]
if self.transform is not None:
input, target = self.transform([input, target])
else:
target = -1
input = self.transform(input)
return input, target, index
def __len__(self):
return self.len
#------------------------------------------------------------------------------
#Do transforms
#------------------------------------------------------------------------------
def create_dark_mask():
"""create dark mask for adding black box
"""
black_dx = 60
black_dy = 20
dark_mask = np.zeros((black_dx, black_dy))
for k in range(black_dy):
dark_mask[:,k] = (np.abs(k-black_dy//2) / (black_dy/2.))**2
return dark_mask
def create_elastic_indices():
"""create indices for elastic deformation
used once at the start epoch
"""
#initial values
alpha, alpha2, sigma = 10, 15, 50
shape = (480, 352) #same as shape of input images
x_mesh, y_mesh = np.meshgrid(np.arange(shape[1]), np.arange(shape[0]))
#below is used once per epoch for the elastic deformation
g_1d = signal.gaussian(300, sigma)
kernel_deform = np.outer(g_1d, g_1d)
dx = signal.fftconvolve(np.random.rand(*shape) * 2 - 1, kernel_deform, mode='same')
dy = signal.fftconvolve(np.random.rand(*shape) * 2 - 1, kernel_deform, mode='same')
dx = alpha * (dx - np.mean(dx)) / np.std(dx)
dy = alpha2 * (dy - np.mean(dy))/ np.std(dy)
indices_x, indices_y = x_mesh+dx, y_mesh+dy
indices_x_clipped = np.clip(indices_x, a_min=0, a_max=shape[1]-1)
indices_y_clipped = np.clip(indices_y, a_min=0, a_max=shape[0]-1)
return indices_x_clipped, indices_y_clipped
def train_transform(dark_mask, indices_x_clipped, indices_y_clipped):
return imgT.EnhancedCompose([
#random flip
T.Lambda(imgT.randomlyFlip),
#intensity nonlinear
[T.Lambda(imgT.intensityNonliearShift), None],
#add blackbox
[imgT.AddBlackBox(dark_mask), None],
#imgT.RandomRotate(),
#elastic deformation
imgT.ElasticDeformation(indices_x_clipped, indices_y_clipped),
#multiplicative gauss
[imgT.AddGaussian(ismulti=True), None],
#additive gauss
[imgT.AddGaussian(ismulti=False), None],
# for non-pytorch usage, remove to_tensor conversion
[T.Lambda(imgT.to_tensor), T.Lambda(imgT.to_tensor_target)]
])
def val_transform():
return imgT.EnhancedCompose([
# for non-pytorch usage, remove to_tensor conversion
[T.Lambda(imgT.to_tensor), T.Lambda(imgT.to_tensor_target)]
])
#------------------------------------------------------------------------------
#Get method (used for DataLoader)
#------------------------------------------------------------------------------
def get_trainSet(train_filelist):
dark_mask = create_dark_mask()
indices_x_clipped, indices_y_clipped = create_elastic_indices()
return DatasetFromFile(train_filelist, transform=train_transform(dark_mask, indices_x_clipped, indices_y_clipped))
def get_valSet(val_filelist):
dark_mask = create_dark_mask()
indices_x_clipped, indices_y_clipped = create_elastic_indices()
return DatasetFromFile(val_filelist, transform= val_transform())#train_transform(dark_mask, indices_x_clipped, indices_y_clipped))
| UTF-8 | Python | false | false | 5,236 | py | 11 | data.py | 10 | 0.582315 | 0.571429 | 0 | 139 | 36.661871 | 134 |
belatrix/BelatrixEventsBackend | 17,695,265,280,752 | f06677e1feb8a7c0108674b7219a8d4200b5c4cb | 25766f56887a1c6975258d8b45c87389092c07d2 | /events/admin.py | abb9ac8e813e040db26613970e4f3ab1d314a8d7 | [
"MIT"
]
| permissive | https://github.com/belatrix/BelatrixEventsBackend | 275f757d754d1c42d70a27598274d883cf76671b | eb38574bba0ca0269b17d0be938cc46787c21895 | refs/heads/master | 2020-05-20T17:58:30.484229 | 2018-06-04T13:57:07 | 2018-06-04T13:57:07 | 84,498,341 | 0 | 0 | MIT | false | 2018-06-04T13:57:08 | 2017-03-09T23:28:38 | 2018-05-03T20:32:06 | 2018-06-04T13:57:08 | 172 | 0 | 0 | 4 | Python | false | null | from django.contrib import admin
from .models import City, Event, Interaction, Location, EventParticipant
from .models import Meeting, Attendance
class CityAdmin(admin.ModelAdmin):
list_display = ('name', )
class EventAdmin(admin.ModelAdmin):
list_display = ('title', 'datetime', 'image', 'details', 'is_upcoming', 'is_featured', 'get_cities', 'is_active')
class EventParticipantAdmin(admin.ModelAdmin):
list_display = ('event', 'participant')
class InteractionAdmin(admin.ModelAdmin):
list_display = ('text', 'event', 'votes')
class LocationAdmin(admin.ModelAdmin):
list_display = ('name', 'latitude', 'longitude')
class MeetingAdmin(admin.ModelAdmin):
list_display = ('name', 'start_date', 'end_date', 'event', 'is_over', 'is_active')
class AttendanceAdmin(admin.ModelAdmin):
list_display = ('datetime', 'meeting', 'participant')
search_fields = ['participant__email']
admin.site.register(City, CityAdmin)
admin.site.register(Event, EventAdmin)
admin.site.register(EventParticipant, EventParticipantAdmin)
admin.site.register(Interaction, InteractionAdmin)
admin.site.register(Location, LocationAdmin)
admin.site.register(Meeting, MeetingAdmin)
admin.site.register(Attendance, AttendanceAdmin)
| UTF-8 | Python | false | false | 1,244 | py | 67 | admin.py | 42 | 0.73955 | 0.73955 | 0 | 41 | 29.341463 | 117 |
seanmavley/Menpha | 15,006,615,748,011 | 9b907ae5edf594a46ac5444682aa27332b5f13e9 | c37a9705b51fb466fbe73c3ccfa10c274ed51063 | /main/tests.py | 74028a7f858ec62fea43f891c5c38e5820885900 | []
| no_license | https://github.com/seanmavley/Menpha | 812aa69f9aaa54f4c05d177df456ddae9bd1e832 | f1164dda2349d0c2a273dc417c98c425b91c9b68 | refs/heads/master | 2016-04-03T03:36:30.962295 | 2015-02-15T08:17:52 | 2015-02-15T08:17:52 | 27,644,277 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.test import TestCase
# from django.core.urlresolvers import resolve
from .models import Item
from django.contrib.auth.models import User
class start():
def create_user(self):
self.user_in = User.objects.create_user(
'khophi',
'email@email.com',
'password'
)
# create user khophi
self.user_in.save()
def create_item(self):
self.username = User.objects.get(username='khophi')
self.item = Item(
device='Google Nexus 6',
slug='0000000000',
type_of_item='md',
description='An awesome phone I bought from Google',
stolen='s',
created_by=self.username
)
self.item.save()
# Check save and retrieve from DB
class SaveToDBDirect(TestCase):
def setUp(self):
begin = start()
begin.create_user()
begin.create_item()
def test_check_user_account(self):
self.user = User.objects.all()[0]
self.assertEqual(str(self.user), '[<User: khophi>]')
def test_check_new_item(self):
from_db = Item.objects.count()
self.assertEqual(from_db, 1)
# Check request, save and retrieve from DB works via views
# Non REST
class TestView(TestCase):
def test_check_login(self):
request = self.client.post('/admin/', {'username': 'khophi', 'password': 'password'})
self.assertEqual(request.status_code, 200)
def test_check_details(self):
request = self.client.get('/detail/0000000000')
self.assertEqual(request.status_code, 200)
# Check request, save and retrieve from DB works via views
# REST way
# Check post and get works via browser
# Mr. Selenium comes in
# Searched empty, response "Not searched for anything"
# if not stolen, don't show in results
# Mylist count
# Account login, logout
# get_absolute_urls on models
| UTF-8 | Python | false | false | 1,923 | py | 47 | tests.py | 8 | 0.621425 | 0.606344 | 0 | 75 | 24.64 | 93 |
areski/Facio | 8,358,006,368,416 | 27b461310273f4af7d80aaabc95a32c1d390cfc0 | 9e9c192fc5b72d61b2913f9b1e79bc2c8dd52757 | /src/facio/config.py | cf83d8be0976f8e2be69190816965fee0fd28194 | [
"BSD-2-Clause"
]
| permissive | https://github.com/areski/Facio | 98bc1cc00e7bcac93ee361c2a0209152a7dfc94c | 73a5793c8fa7821173c400aa3192e3ba9c8cd4de | refs/heads/master | 2021-01-18T08:56:00.493525 | 2013-05-19T10:21:03 | 2013-05-19T10:21:03 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | """
facio.config
------------
Sets up variables and configuration for Facio from command line and / or
a configuration file.
"""
import ConfigParser
import os
import sys
try:
from clint.textui import puts, indent
from clint.textui.colored import blue
except ImportError:
pass
from random import choice
from .cli import CLIOptions
class ConfigFile(object):
templates = {}
sections = {
'misc': ['install', ],
'template': [],
'virtualenv': ['venv_create', 'venv_path', 'venv_use_site_packages'],
}
path = os.path.join(os.path.expanduser('~'), '.facio.cfg')
def __init__(self):
if os.path.isfile(self.path):
self._parse_config()
else:
self.cfg_loaded = False
def _parse_config(self):
self.parser = ConfigParser.ConfigParser()
try:
self.parser.read(self.path)
except ConfigParser.MissingSectionHeaderError:
self.cfg_loaded = False
# TODO: print warning to user
except ConfigParser.ParsingError:
# TODO: print warning to user
self.cfg_loaded = False
else:
self.cfg_loaded = True
with indent(4, quote=' >'):
puts(blue('Loaded ~/.facio.cfg'))
for section in self.sections:
try:
items = self.parser.items(section)
except ConfigParser.NoSectionError:
pass
else:
if section == 'template':
self._add_templates(items)
else:
self._set_attributes(section, items)
def _add_templates(self, items):
for item in items:
name, value = item
self.templates[name] = value
def _set_attributes(self, section, items):
opts = self.sections[section]
for opt in opts:
try:
opt, val = [(x, y) for x, y in items if x == opt][0]
except IndexError:
pass
else:
if val == '0' or val == '1':
val = False if val == '0' else True
setattr(self, opt, val)
class Config(object):
default_template = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'default_template')
def __init__(self):
self.cli_args = CLIOptions()
self.file_args = ConfigFile()
self.django_secret_key
def _error(self, msg):
self.cli_args._parser.error(msg)
#
# Project Properties
#
@property
def project_name(self):
return self.cli_args.project_name
#
# Template Properties
#
def _validate_template_options(self):
if (not self._tpl.startswith('git+') and
not os.path.isdir(self._tpl)):
self._error('The path to your template does not exist.')
def _template_choice_prompt(self):
templates = self.file_args.templates
max_tries = 5
template_list = list(templates)
i = 0
sys.stdout.write("Please choose a template:\n\n")
for name in templates:
template = templates[name]
sys.stdout.write("{0}) {1}: {2}\n".format((i + 1), name, template))
i += 1
i = 1
while True:
if i > max_tries:
self._error('You failed to enter a valid template number.')
try:
num = int(raw_input(
'\nEnter the number for the template '
'({0} of {1} tries): '.format(i, max_tries)))
if num == 0:
raise ValueError
template = templates[template_list[num - 1]]
except (ValueError, IndexError):
sys.stdout.write('\nPlease choose a number between 1 and '
'{0}\n'.format(len(template_list)))
i += 1
else:
return template
@property
def _cli_template(self):
try:
return self.cli_args.template
except AttributeError:
return False
@property
def _cli_choose_template(self):
try:
return self.cli_args.choose_template
except AttributeError:
return False
@property
def template(self):
if not getattr(self, '_tpl', None):
if self._cli_template:
self._tpl = self._cli_template
elif self._cli_choose_template:
self._tpl = self._template_choice_prompt()
else:
try:
self._tpl = self.file_args.templates['default']
except KeyError:
self._tpl = self.default_template
self._validate_template_options()
return self._tpl
@property
def template_settings_dir(self):
try:
return self.cli_args.template_settings_dir
except AssertionError:
return False
@property
def variables(self):
try:
return self.cli_args.variables
except AssertionError:
return False
#
# Python Properties (Experimental)
#
@property
def _file_args_install(self):
try:
return self.file_args.install
except AttributeError:
return False
@property
def _cli_args_install(self):
try:
return self.cli_args.install
except AttributeError:
return False
@property
def install(self):
if self._cli_args_install or self._file_args_install:
return True
return False
#
# Virtual Environment Properties (Experimental)
#
def _validate_virtualenv_options(self):
if not self.venv_path:
self._error('You need to provide a virtualenv path where the '
'venv will be created')
@property
def _file_args_venv_create(self):
try:
return self.file_args.venv_create
except AttributeError:
return False
@property
def _cli_args_venv_create(self):
try:
return self.cli_args.venv_create
except AttributeError:
return False
@property
def _file_args_venv_path(self):
try:
return self.file_args.venv_path
except AttributeError:
return False
@property
def _cli_args_venv_path(self):
try:
return self.cli_args.venv_path
except AttributeError:
return False
@property
def _file_args_venv_use_site_packages(self):
try:
return self.file_args.venv_use_site_packages
except AttributeError:
return False
@property
def _cli_args_venv_use_site_packages(self):
try:
return self.cli_args.venv_use_site_packages
except AttributeError:
return False
@property
def venv_create(self):
if self._cli_args_venv_create or self._file_args_venv_create:
self._validate_virtualenv_options()
return True
return False
@property
def venv_path(self):
if self._file_args_venv_path and not self._cli_args_venv_path:
return self._file_args_venv_path
elif self._cli_args_venv_path:
return self.cli_args.venv_path
return False
@property
def venv_use_site_packages(self):
if (self._cli_args_venv_use_site_packages
or self._file_args_venv_use_site_packages):
return True
return False
@property
def venv_prefix(self):
try:
return self.cli_args.venv_prefix
except AttributeError:
return False
#
# Django Secret Key Generation
#
@property
def django_secret_key(self):
'''Generate a secret key for Django Projects.'''
if hasattr(self, 'generated_django_secret_key'):
return self.generated_django_secret_key
else:
choice_str = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
key = ''.join([choice(choice_str) for i in range(50)])
self.generated_django_secret_key = key
return key
| UTF-8 | Python | false | false | 8,354 | py | 39 | config.py | 16 | 0.541776 | 0.537946 | 0 | 302 | 26.662252 | 79 |
olasson/SDCND-P3-BehavioralCloning | 8,375,186,259,730 | 2d4f3ff87c840c00e8cd171d7153d61205250984 | b0c576d458016fa6c3474be7f51d9f9ea66aa495 | /code/io.py | 12d45b305a86a1ff0f497984dce03ddb9b606880 | []
| no_license | https://github.com/olasson/SDCND-P3-BehavioralCloning | 0cf4962a9395fa5a433778e7e10c20f79930c780 | 2527bf63ac98171265c16cd9af04c53d6ae6e58b | refs/heads/master | 2023-07-02T19:01:26.016259 | 2021-08-07T11:23:25 | 2021-08-07T11:23:25 | 386,737,758 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | """
This file contains save and load (I/O) functions.
"""
import json
import pickle
import numpy as np
from glob import glob
from pandas import read_csv
from os.path import join as path_join
import matplotlib.image as mpimg
# Custom imports
from code.misc import parse_file_path
def load_image(file_path):
# Wrapper
image = mpimg.imread(file_path)
return image
# Images
def load_images(file_paths):
"""
Load a set of images into memory
Inputs
----------
file_paths : list or numpy.ndarray
A list or array of file_paths - ['./example/myimg1.jpg'... './example/myimgN.jpg',]
Outputs
-------
images: numpy.ndarray
Numpy array containing 'images'
file_names: numpy.ndarray
Numpy array containing the file names - ['myimg1.jpg'... 'myimgN.jpg',]
"""
n_images = len(file_paths)
image_shape = load_image(file_paths[0]).shape
n_rows = image_shape[0]
n_cols = image_shape[1]
# RGB or grayscale
if len(image_shape) > 2:
n_channels = 3
else:
n_channels = 1
images = np.zeros((n_images, n_rows, n_cols, n_channels), dtype = np.uint8)
file_names = np.zeros((n_images), dtype = 'U25')
for i in range(n_images):
images[i] = load_image(file_paths[i])
file_names[i] = parse_file_path(file_paths[i])[1]
return images, file_names
# Config
def load_config(file_path):
"""
Check if a file exists
Inputs
----------
file_path: str
Path to a .json file.
Outputs
-------
config: dict
Dictionary containing the config from file_path.
"""
if (file_path == '') or (file_path is None):
return None
with open(file_path) as f:
config = json.load(f)
return config
# Pickled
def save_pickled_data(file_path, data1, data2, key1 = 'images', key2 = 'angles'):
"""
Save two data files as a single pickled (.p) file.
Inputs
----------
file_path: str
File path to a pickled file - './path/to/myfile.p'
data1,data2: numpy.ndarray, numpy.ndarray
Numpy arrays containing data.
key1, key2: str, str
Dictionary keys.
Outputs
-------
N/A
"""
data = {key1: data1, key2: data2}
with open(file_path, mode = 'wb') as f:
pickle.dump(data, f, protocol = pickle.HIGHEST_PROTOCOL)
def load_pickled_data(file_path, key1 = 'images', key2 = 'angles'):
"""
Load a single pickled (.p) file into two numpy arrays.
Inputs
----------
file_path: str
File path to a pickled file - './path/to/myfile.p'
key1, key2: str, str
Dictionary keys.
Outputs
-------
data1,data2: numpy.ndarray, numpy.ndarray
Numpy arrays containing data.
"""
with open(file_path, mode = 'rb') as f:
data = pickle.load(f)
data1 = data[key1]
data2 = data[key2]
return data1, data2
def load_sim_log(path):
"""
Load the contents of the driving_log.csv
Inputs
----------
path: str
Path to driving_log.csv
Outputs
-------
angles: numpy.ndarray
Numpy array of floats containing one angle for each cam
file_paths: numpy.ndarray
Numpy array of strings containing paths to a set of images
"""
sim_log = read_csv(path, names = ['center', 'left', 'right', 'angle', 'throttle', 'break', 'speed'])
file_paths = sim_log[['center', 'left', 'right']].to_numpy().flatten()
angles = sim_log[['angle', 'angle', 'angle']].to_numpy().flatten()
return angles, file_paths
| UTF-8 | Python | false | false | 3,669 | py | 12 | io.py | 9 | 0.578632 | 0.56882 | 0 | 169 | 20.680473 | 104 |
dkowsikpai/s1python | 4,312,147,173,707 | 0dcbe50143aa53fe2fd9d576c244a0ae585e5602 | aaba51e1466fb888cc0a351332fa198c610da014 | /Python/gro_dic.py | 3c6e7e0d81b85168a55eea5f32e22568ffcd0587 | []
| no_license | https://github.com/dkowsikpai/s1python | 01cdfefabf4abb37fa205605717589bb008ab28f | 2dc0912326426214184746af8978b92715056fe7 | refs/heads/master | 2020-03-29T08:39:04.662383 | 2019-04-02T13:33:57 | 2019-04-02T13:33:57 | 149,721,539 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | it_db={}
while True:
it=""
it=input("Enter the items:")
if it!="":
it_db[it]=int(input("Enter the price:"))
else: break
it_bt={}
print("-------Purchase--------")
while True:
it=""
it=input("Enter the items:")
if it!="":
it_bt[it]=int(input("Enter the quantity:"))
it_bt[it]=it_bt[it]*it_db[it]
else: break
lt=it_bt.keys()
Tsum=0
for i in lt:
Tsum+=it_bt[i]
print(i," : ",it_bt[i])
print("Grand Total: ",Tsum,"/-")
| UTF-8 | Python | false | false | 478 | py | 26 | gro_dic.py | 26 | 0.514644 | 0.512552 | 0 | 22 | 20.727273 | 51 |
kayarre/vmtktools | 14,525,579,428,487 | e2dff9b5665ff163ee580881ddfa35f2ca0f525e | 7bc5fca6d7e8bc7e38e7afeae10a116e4422d96c | /automatic_clipping.py | 26942d88928217fdb76165a1d4495a0fe1022c62 | []
| no_license | https://github.com/kayarre/vmtktools | be7b802a96dd101434b502e3092f7133c3dbaf24 | 0a32f6614b2bc3c386a631cc9e2a7c9e99060b76 | refs/heads/master | 2021-01-14T11:19:50.412840 | 2016-06-01T14:08:49 | 2016-06-01T14:08:49 | 68,652,798 | 0 | 3 | null | true | 2016-09-19T22:43:21 | 2016-09-19T22:43:21 | 2016-03-30T21:43:30 | 2016-06-01T14:08:54 | 112 | 0 | 0 | 0 | null | null | null | from argparse import ArgumentParser
from common import *
import numpy as np
def read_command_line():
"""Read arguments from commandline"""
parser = ArgumentParser()
parser.add_argument('--d', '--dir_path', type=str, default=".",
help="Path to the folder with all the cases")
parser.add_argument('--m', type=str, default="model_smoothed.vtp", help="Name of the model file")
parser.add_argument('--c', type=str, default="centerline_complete.vtp", help="Name of the centerline file")
parser.add_argument('--anu', type=bool, default=False)
args = parser.parse_args()
return args.d, args.m, args.c, args.anu
def move_past_sphere(cl, points, direction):
i = 0 if not direction else cl.GetNumberOfPoints() - 1
j = 0 if direction else cl.GetNumberOfPoints() - 1
r = cl.GetPointData().GetArray(radiusArrayName).GetTuple1(i)
center = cl.GetPoints().GetPoint(i)
MISphere = vtk.vtkSphere()
MISphere.SetCenter(center)
MISphere.SetRadius(r*(1./3))
direction = -1 if direction else 1
for k in range(i, j, direction):
value = MISphere.EvaluateFunction(cl.GetPoint(k))
if value >= 0:
break
return cl.GetPoint(k), cl.GetPointData().GetArray(radiusArrayName).GetTuple1(i)
def getBoundingBox(cl, inlet):
endPoint = cl.GetPoint(cl.GetNumberOfPoints() - 1) if inlet else cl.GetPoint(0)
bottom, bottom_r = move_past_sphere(cl, endPoint, inlet)
line = CenterlineAttribiutes(cl)
E1 = get_array("ParallelTransportNormals", line, k=3)
E1 = E1[E1.shape[0]-1,:]
T = get_array("FrenetTangent", line, k=3)
T = T[T.shape[0]-1,:]
E2 = np.zeros(3)
V = np.eye(3)
V[:, 0] = T
V[:, 1] = E1
V = GramSchmidt(V)
E1 = V[:,1] * bottom_r * 1.5
E2 = V[:,2] * bottom_r * 1.6
T = T * bottom_r * 3 if not inlet else T * bottom_r * 3 * (-1)
corners = []
for O in [bottom, bottom + T]:
for dir1 in [1, -1]:
for dir2 in [1, -1]:
corners.append(O + dir1*E1 + dir2*E2)
viz(line, [bottom, endPoint] + corners)
corners = np.array(corners)
limits = []
for i in range(3):
for f in [np.min, np.max]:
limits.append(f(corners[:,i]))
return limits
def clipp(dir_path, model, centerline, anu):
cl = ReadPolyData(path.join(dir_path, centerline))
surface = ReadPolyData(path.join(dir_path, model))
#clipper = vtk.vtkBoxClipDataSet()
box = vtk.vtkBox()
clipper = vtk.vtkClipPolyData()
clipper.SetInput(surface)
clipper.SetClipFunction(box)
inlet = True
for i in [0] + range(cl.GetNumberOfLines() - anu):
limits = getBoundingBox(ExtractSingleLine(cl, i), inlet)
inlet = False
box.SetBonds(limits[0], limits[1], limits[2], limits[3], limits[4], limits[5])
clipper.Update()
#clipper.SetBoxClip(limits[0], limits[1], limits[2], limits[3], limits[4], limits[5])
clipper.GenerateClippedOutputOn()
clipper.Update()
filter = vtk.vtkGeometryFilter()
filter.SetInput(clipper.GetClippedOutput())
filter.Update()
surface.DeepCopy(filter.GetOutput())
clipper.Update()
#TODO: Deep copy of surface and update clipper
WritePolyData(surface, "test_clipping.vtp")
sys.exit(0)
if __name__ == "__main__":
dir_path, model, centerline, anu = read_command_line()
clipp(dir_path, model, centerline, anu)
| UTF-8 | Python | false | false | 3,484 | py | 26 | automatic_clipping.py | 26 | 0.616533 | 0.597876 | 0 | 111 | 30.387387 | 111 |
jjh42/oldalgo | 6,227,702,610,427 | 4dc7857a96a9cc78a9810bab3383b6395ad78eda | b31638e4bf085263922f0fcc78fce2b659e2815f | /mergesort.py | 8422d1513f467c6650edf67f77738a18d4952439 | [
"LicenseRef-scancode-warranty-disclaimer",
"MIT"
]
| permissive | https://github.com/jjh42/oldalgo | ba9023db7ccaf6a339bc28b0cbc81ba6c8c050f0 | 8fb748bcdcde05ebb6ed8b6b5749e2d3713d14ef | refs/heads/master | 2020-06-03T16:32:07.595692 | 2014-08-10T20:56:33 | 2014-08-10T20:56:33 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def split(a):
"""Split array a into two halves with the larger half on the right for odd lengths."""
middle = len(a)/2
left = a[0:middle]
right = a[middle:]
return left, right
def merge(left, right):
"""Merge two sorted arrays into a concatenated sorted array."""
leftindex = 0
rightindex = 0
m = []
for k in range(len(left) + len(right)):
if left[leftindex] < right[rightindex]:
m.append(left[leftindex])
leftindex += 1
if leftindex == len(left): # We've completed the left side
m.extend(right[rightindex:])
break
else:
m.append(right[rightindex])
rightindex += 1
if rightindex == len(right): # We've completed the left side
m.extend(left[leftindex:])
break
return m
def mergesort(a):
"""Use recursive merge sort (without any cleverness to reduce memory access)
to sort the array a into a least first list.
Returns sorted list.
Assumes that all list elements are comparable"""
# Deal with the base case
if len(a) <= 1:
return a
# Recurse for everyone else
left, right = split(a)
left = mergesort(left)
right = mergesort(right)
return merge(left, right)
| UTF-8 | Python | false | false | 1,309 | py | 7 | mergesort.py | 6 | 0.586707 | 0.58136 | 0 | 44 | 28.727273 | 90 |
RaspberryWallet/Backend | 15,676,630,674,621 | dd82c696129074ac6554931d4ab2af9d36bbedd8 | f04eabeb86f64e0711566a089fbe74b389c5be7a | /Scripts/webapp/copyWebApp.py | 3aafe0293ad2cdc4cc10526a05d35551c168b399 | []
| no_license | https://github.com/RaspberryWallet/Backend | 753069b94e4e5bd130c12f18bf455b6ddc6677bd | aa65c50746ac95d3c24e93175cf64d78ffb827bf | refs/heads/master | 2022-11-22T18:20:13.577031 | 2019-11-13T12:34:27 | 2019-11-13T12:34:27 | 138,017,958 | 4 | 2 | null | false | 2022-11-15T23:52:23 | 2018-06-20T10:35:44 | 2019-11-13T12:34:31 | 2022-11-15T23:52:20 | 8,202 | 4 | 2 | 17 | Java | false | false | #!/usr/local/bin/python3
import os
import shutil
while not os.getcwd().lower().endswith("backend"):
os.chdir("..")
shutil.rmtree("ServerHttp/src/main/resources/assets/static/js")
os.system("cp -rf ../../JSProjects/raspberry-wallet-frontend/build/ ServerHttp/src/main/resources/assets/")
os.system("git add ServerHttp/src/main/resources/assets")
| UTF-8 | Python | false | false | 352 | py | 157 | copyWebApp.py | 139 | 0.741477 | 0.738636 | 0 | 11 | 31 | 107 |
ti132520/pytest-vlog | 11,493,332,506,780 | 73b2356c10f62f85b8fd936a906feb034fae5f9d | a3529c0d2d26169b22244d6ff1bfdfc8d350eaf4 | /backen-20210718/backend/server.py | 44168c350894c30a6dd68a6ad1643e95af9f6781 | [
"CC0-1.0"
]
| permissive | https://github.com/ti132520/pytest-vlog | 8041321f9e8ab0d892d20ebc42a28f18f6be4996 | c6688ed2b1c5e10b91057a22e672dffc1cd54d53 | refs/heads/main | 2023-08-03T19:15:32.645834 | 2021-09-11T02:11:29 | 2021-09-11T02:11:29 | 355,898,041 | 0 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null | import json
from flask import Flask, request
from flask_restful import Resource, Api
from flask_sqlalchemy import SQLAlchemy
from flask_cors import CORS
app = Flask(__name__)
api = Api(app)
CORS(app, supports_credentials=True, origins="*")
username = "root"
pwd = "123456"
ip = "134.175.28.202"
port = "8888"
database = "test_ck18"
app.config['SQLALCHEMY_DATABASE_URI'] = f'mysql+pymysql://{username}:{pwd}@{ip}:{port}/{database}?charset=utf8'
# 解决warning 问题
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
db = SQLAlchemy(app)
class Testcase(db.Model):
id = db.Column(db.Integer, primary_key=True)
nodeid = db.Column(db.String(80), nullable=False)
remark = db.Column(db.String(120))
def as_dict(self):
"""
返回一个标准的python 结构体
:return:
"""
return {"id": self.id,
"nodeid": self.nodeid,
"remark": self.remark}
# 类代表是哪个接口资源,每个方法,代表对此资源的操作,比如 get、post等
# 在类服务中继承resource,表示使用flask-restful
class TestCaseService(Resource):
"""
测试用例服务
"""
# 方法名,对应 app.route中的methods
def get(self):
"""
查询接口,查询用例数据信息
"""
# request 获取 接口发过来的请求信息
case_id = request.args.get("id")
if case_id:
# 当传入caseID时,查询单条数据信息
case_data = Testcase.query.filter_by(id=case_id).first()
app.logger.info(case_data)
# data = [{"id": case_data.id, "nodeid": case_data.nodeid, "remark": case_data.remark}]
data = [case_data.as_dict()]
else:
# 反之查询所有的用例信息
case_data = Testcase.query.all()
# data = [{"id": i.id, "nodeid": i.nodeid, "remark": i.remark} for i in case_data]
data = [i.as_dict() for i in case_data]
return {"error": 0, "msg": {"data": data}}
def post(self):
# 增加一条用例
case_data = request.json
app.logger.info(case_data)
# 从接口中拿到的字典数据进行解包,使用关键字传参传入Testcase
testcase = Testcase(**case_data)
# 如果数据字段存在列表,需要做一次转换
testcase.nodeid = json.dumps(request.json.get("nodeid"))
db.session.add(testcase)
db.session.commit()
return {"error": 0, "msg": "post success"}
def put(self):
"""
修改接口信息
:return:
"""
app.logger.info(request.json)
# 获取被修改的接口信息
case_id = request.json.get("id")
# 通过id 找到要修改的内容, 然后通过update修改对应的数据
# 找到被修改的接口信息然后做修改操作
case = Testcase.query.\
filter_by(id=case_id).\
update(request.json)
app.logger.info(f"数据已修改,id{case}被修改为{request.json}")
# 返回被修改数据的id
return {"error": 0, "msg": {"id": case}}
def delete(self):
"""
删除操作
:return:
"""
case_id = request.args.get("id")
if not case_id:
return {"error": 40001, "msg": "Delete case_id can't be null"}
# 返回一个主键
case = Testcase.query.filter_by(id=case_id).delete()
db.session.commit()
app.logger.info(case)
return {"error": 0, "msg": {"id": case}}
class TaskService(Resource):
pass
if __name__ == '__main__':
# 把服务添加到app flask 中
# 第一个参数是添加的接口服务, 第二个参数,是指定对应接口服务使用的路由
# db.create_all()
api.add_resource(TestCaseService, "/testcase")
# api.add_resource(TaskService, "/task")
app.run(debug=True)
| UTF-8 | Python | false | false | 3,942 | py | 122 | server.py | 93 | 0.571471 | 0.560155 | 0 | 121 | 26.752066 | 111 |
smorenburg/python | 3,874,060,523,770 | 5fd13e1533c54cd3f74a91116fd3448f99f73e4b | 307b247f36383e8c54a7bc017358f9c84fd6a12b | /src/old/gen.py | d20fc5ffefb663570aab0abfbeafc5804a1b8542 | [
"MIT"
]
| permissive | https://github.com/smorenburg/python | 4c6f5e7009156ec87ed535a119f97ff15347e850 | 74b1e72944dfd244f0169e8a7adb9e29ed1a7d27 | refs/heads/main | 2023-02-16T01:24:52.473945 | 2021-01-13T07:36:02 | 2021-01-13T07:36:02 | 313,896,985 | 0 | 0 | MIT | false | 2020-12-21T17:20:54 | 2020-11-18T10:19:33 | 2020-12-20T16:03:00 | 2020-12-21T17:20:53 | 55 | 0 | 0 | 0 | Python | false | false | #!/usr/bin/env python3
def gen_range(stop, start=1, step=1):
num = start
while num <= stop:
yield num
num += step
def gen_fib():
a, b = 0, 1
while True:
a, b = b, a + b
yield a
| UTF-8 | Python | false | false | 228 | py | 85 | gen.py | 84 | 0.482456 | 0.460526 | 0 | 14 | 15.285714 | 37 |
92hackers/algorithms | 16,552,803,961,039 | cab14d3da92d25d6ac0995cbcfea2a8bef236675 | fb8123ec81a0e12bb3f1e09c1afc070394d277c1 | /linked_list.py | 9f355a789e34011677f3c42baa37da33479a171a | []
| no_license | https://github.com/92hackers/algorithms | 72ba1b68d56a882fda64a71e4b399b3bb3eea3a0 | 911b35d246b2cec84e99dc6deb82e2e2c3067fa3 | refs/heads/master | 2022-06-15T09:59:15.295332 | 2022-03-08T10:01:59 | 2022-03-08T10:01:59 | 216,991,634 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # 链表
# 支持基本的增删改查操作
# TODO: how to sort a list
class ListNode:
def __init__(self, value):
self.data = value
self.next = None
self.prev = None
class LinkedList:
def __init__(self, *args):
self.head = None
self.tail = None
self.size = None
self.build_list(*args)
def build_list(self, *args):
head = None
tail = None
for i in args:
new_node = ListNode(i)
if tail:
tail.next = new_node
new_node.prev = tail
else:
head = new_node
tail = new_node
self.head = head
self.tail = tail
self.get_size() # calculate the list size
def insert(self, value, index):
# return new size of the list
max_index = self.size - 1
if index < 0 or index > max_index:
raise Exception('index out of range')
new_node = ListNode(value)
self.size += 1
# insert as the new head
if index == 0:
new_node.next = self.head
self.head.prev = new_node
self.head = new_node
return self.size
target = self.head.next
current_index = 1
while current_index < max_index + 1:
if current_index == index:
prev_node = target.prev
prev_node.next = new_node
new_node.prev = prev_node
new_node.next = target
target.prev = new_node
return self.size
else:
current_index += 1
target = target.next
def push(self, value):
new_node = ListNode(value)
if self.tail:
self.tail.next = new_node
new_node.prev = self.tail
else:
self.head = new_node
self.tail = new_node
self.size += 1
return self.size
def pop(self):
# 从链表中去除该元素
if self.size == 0:
return
if self.size == 1:
result = self.head.data
self.head = None
self.tail = None
self.size = 0
return result
self.size -= 1
target = self.tail.prev
target.next = None
# 返回
result = self.tail.data
self.tail = target
return result
def delete(self, value):
if value == self.head.data:
next_head = self.head.next
next_head.prev = None
self.head.next = None
self.head = next_head
self.size -= 1
return self.size
if value == self.tail.data:
next_tail = self.tail.prev
next_tail.next = None
self.tail.prev = None
self.tail = next_tail
self.size -= 1
return self.size
target = self.head.next
while target is not None:
if value == target.data:
prev_node = target.prev
next_node = target.next
prev_node.next = next_node
next_node.prev = prev_node
self.size -= 1
return self.size
else:
target = target.next
raise Exception(value + ' not found in the linked list')
def indexOf(self, value):
# get index of a value in list
target = self.head
count = 0
while target is not None:
if target.data == value:
return count
target = target.next
count += 1
return -1
def get_size(self):
if self.size is not None:
return self.size
count = 0
target = self.head
while target is not None:
count += 1
target = target.next
self.size = count
return count
def get_list(self):
target = self.head
arr = []
while target is not None:
arr.append(target.data)
target = target.next
return arr
def get_reversed(self):
target = self.tail
arr = []
while target is not None:
arr.append(target.data)
target = target.prev
return arr
a = LinkedList(23, 19, 0, 34, 29)
print(a.get_list())
a.push(99)
print(a.get_list())
a.pop()
print(a.get_list())
print(a.indexOf(10))
a.insert(555, 4)
print(a.get_list())
a.insert(999, 0)
print(a.get_list())
print(a.get_reversed())
a.insert(66666, a.get_size() - 1)
print(a.get_list())
print(a.get_reversed())
a.delete(555)
print(a.get_list())
print(a.get_reversed())
| UTF-8 | Python | false | false | 4,712 | py | 15 | linked_list.py | 15 | 0.494425 | 0.483705 | 0 | 201 | 22.20398 | 64 |
fireae/PyLf | 7,911,329,772,408 | 0ec2d9a6dc5746422e14436d93e3afdcadd67cb5 | bbc459427a89ed7d316ca4892298d0886d398e7e | /tests/util.py | 249afb731bb49631bad97d3e56c8b56b6442802b | [
"BSD-3-Clause"
]
| permissive | https://github.com/fireae/PyLf | a91030c0aa11acce86c0664f0c24dfb0697a810a | 54d65284bef0328842b1534dd1899710e7803821 | refs/heads/master | 2020-03-21T07:29:28.978804 | 2018-06-16T15:17:31 | 2018-06-16T15:17:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
"""
This module provides the essential functionality for the whole test suite.
WARNING: Do not change the location of this file!
"""
import math
import os
from PIL import ImageFont as image_font
THRESHOLD = 17.0
def compare_histogram(image1, image2) -> float:
"""
Compare the two images and return the root mean square in histogram
This algorithm is inspired by the discussion about "Compare two images the python/linux way" in Stackoverflow
"""
if image1.mode != image2.mode or image1.size != image2.size:
raise ValueError("image1 and image2 must have same mode and same size")
h1 = image1.histogram()
h2 = image2.histogram()
assert len(h1) == len(h2)
s = 0
for c1, c2 in zip(h1, h2):
s += (c1 - c2) ** 2
return math.sqrt(s / len(h1))
def absolute_equal(image1, image2) -> bool:
return image1.tobytes() == image2.tobytes()
def compare_pixel(image1, image2) -> float:
""" Compare the two images pixel by pixel and return the root mean square """
# TODO
pass
def get_path(path:str) -> str:
return os.path.join(os.path.abspath(os.path.dirname(__file__)), path)
def get_short_text() -> str:
""" Return one short sentence """
return "我能吞下玻璃而不伤身体。"
def get_long_text() -> str:
""" Return a article """
with open(get_path("data/texts/荷塘月色.txt"), encoding='utf-8') as f:
return f.read()
def get_default_font():
return image_font.truetype(get_path("data/fonts/Bo Le Locust Tree Handwriting Pen Chinese Font-Simplified Chinese Fonts.ttf"))
| UTF-8 | Python | false | false | 1,612 | py | 13 | util.py | 8 | 0.663924 | 0.642405 | 0 | 57 | 26.719298 | 130 |
dyunttang/PythonWorkSpace | 18,098,992,221,541 | c2f737c7fb0300fbee4d3af3d13579b1631bff48 | 0f74ad63b648043f68189f1f6cd0bdc9114972d5 | /ex01/strex01.py | 336c735a65211002c7ea93cde27890161ea2ebad | []
| no_license | https://github.com/dyunttang/PythonWorkSpace | e6253ace3c2245ff189b3818ea48008b71fdd509 | 6645bd1ed0c9115d440d2c9b8cdaa5aed6a00594 | refs/heads/master | 2023-08-08T04:08:31.926432 | 2021-09-14T02:07:42 | 2021-09-14T02:07:42 | 405,211,922 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | a = '홍길동'
print(a)
print("="*50)
b = "가나다라마" # CharSequence
print(len(b))
print(b[0])
print(b[4])
print(b[-1])
print(b[0:3])
print(b[1:])
print(b[:3]+"...")
| UTF-8 | Python | false | false | 179 | py | 20 | strex01.py | 19 | 0.533742 | 0.478528 | 0 | 16 | 9.1875 | 27 |
DRAMCO/Interreg-NOMADe | 19,061,064,898,733 | 6f282963746f28fe874b2f94b264238c5b184495 | 4425c77c34ae4155222619384adf7d062c1daf65 | /PythonScripts/Plot_orientation_all_sensors/functions.py | afd8b06e801488568fba5bd1c21b10316dd849bf | []
| no_license | https://github.com/DRAMCO/Interreg-NOMADe | a21faf18f161eb510c31b1815ff7e621af8f4fcb | cf4cc04800ca8f0355cf6cbd914a8c7413348f5a | refs/heads/master | 2023-04-19T03:20:38.310061 | 2021-04-16T06:44:38 | 2021-04-16T06:44:38 | 243,491,542 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # ____ ____ _ __ __ ____ ___
# | _ \| _ \ / \ | \/ |/ ___/ _ \
# | | | | |_) | / _ \ | |\/| | | | | | |
# | |_| | _ < / ___ \| | | | |__| |_| |
# |____/|_| \_\/_/ \_\_| |_|\____\___/
# research group
# dramco.be/
#
# KU Leuven - Technology Campus Gent,
# Gebroeders De Smetstraat 1,
# B-9000 Gent, Belgium
#
# File: functions.py
# Created: 2020-10-06
# Author: Jarne Van Mulders
# Version: V1.0
#
# Description:
# Plot the data from multiple IMU
#
# Commissioned by Interreg NOMADe Project
import matplotlib.pyplot as plt
import numpy as np
import csv
import math as m
def load_measurement_data(number_of_lines, y, path):
for i in range(number_of_lines):
y.append([])
with open(path, 'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
for row in plots:
for i in range(number_of_lines):
y[i].append(float(row[i]))
def get_data_lines(y):
return len(y[0])
def get_no_connected_sensors(y, total_number_of_data_rows, sensor_slot_data_available):
x = 0
for i in range(1, 7):
for j in range(total_number_of_data_rows):
if y[0][j] == i:
sensor_slot_data_available[i - 1] = 1
break
return sensor_slot_data_available.count(1)
def plot_quaternions_all_sensors(connected_sensors, sampling_frequency, sample_list, plot_name):
lab = ["q0", "q1", "q2", "q3"]
color = ["blue", "green", "cyan", "grey"]
fig = plt.figure(figsize=(18, 9))
for j in range(6):
if connected_sensors[j]:
samples = len(sample_list[j][0])
x = np.linspace(0, samples / 1000, samples)
plt.subplot(2, 3, j + 1)
plt.title('Sensor slot %s [%s Hz]' % (int(j + 1), int(sampling_frequency)))
for i in range(4):
plt.plot(x, sample_list[j][i], label=lab[i], color=color[i])
plt.ylabel('Value [-]')
plt.xlabel('Samples [KS]')
plt.tight_layout()
plt.legend()
plt.grid(True)
fig.savefig(plot_name, dpi=100)
def plot_ypr_all_sensors(ypr_sample_list, sampling_frequency, connected_sensors, plot_name):
lab = ["Yaw", "Pitch", "Roll"]
color = ["blue", "green", "cyan"]
fig = plt.figure(figsize=(18, 9))
for j in range(6):
if connected_sensors[j]:
samples = len(ypr_sample_list[j][0])
x = np.linspace(0, samples / 1000, samples)
plt.subplot(2, 3, j + 1)
plt.title('Sensor slot %s [%s Hz]' % (int(j+1), int(sampling_frequency)))
for i in range(3):
plt.plot(x, ypr_sample_list[j][i], label=lab[i], color=color[i])
plt.ylabel('Degrees [°]')
plt.xlabel('Samples [KS]')
plt.tight_layout()
plt.legend()
plt.grid(True)
fig.savefig(plot_name, dpi=100)
def convert_txt_file(sample_list, y, connected_sensors, no_samples, tot_no_datarows):
no_variables = 4
# Create 3 dimensional list
create_3d_list(sample_list, no_samples, no_variables)
# Seperate samples in 3 dimensional list
for i in range(0, 6):
if connected_sensors[i]:
u = 0
for j in range(tot_no_datarows):
if y[0][j] == i + 1:
for k in range(no_variables):
sample_list[i][k][u] = y[k + 3][j]
u = u + 1
def create_3d_list(sample_list, no_samples, no_variables):
for i in range(6):
sample_list.append([])
for j in range(no_variables):
sample_list[i].append([])
for k in range(no_samples):
sample_list[i][j].append(0)
def convert_sample_list_ypr(sample_list, new_sample_list, connected_sensors):
for k in range(6):
if connected_sensors[k]:
transp = np.array(sample_list[k])
sens_samples = np.transpose(transp)
clms = len(transp[0])
sens_samples_ypr = np.zeros((clms, 3))
for i in range(clms):
convert_quaternion_sample_to_ypr(sens_samples[i], sens_samples_ypr[i])
new_sample_list.append(np.transpose(sens_samples_ypr).tolist())
else:
new_sample_list.append([])
def convert_sample_list_ypr_degrees(ypr_sample_list, connected_sensors):
for i in range(6):
if connected_sensors[i]:
for k in range(3):
for l in range(len(ypr_sample_list[i][k])):
ypr_sample_list[i][k][l] = ypr_sample_list[i][k][l] * 180/m.pi + 180
def convert_quaternion_sample_to_ypr(data, new_data):
q = np.zeros((4, 1))
q[0] = data[0] / 16384.0
q[1] = data[1] / 16384.0
q[2] = data[2] / 16384.0
q[3] = data[3] / 16384.0
gravity = np.zeros((3, 1))
gravity[0] = 2 * (q[1] * q[3] - q[0] * q[2]) #x
gravity[1] = 2 * (q[0] * q[1] + q[2] * q[3]) #y
gravity[2] = q[0] * q[0] - q[1] * q[1] - q[2] * q[2] + q[3] * q[3] #z
# yaw: (about Z axis)
new_data[0] = m.atan2(2 * q[1] * q[2] - 2 * q[0] * q[3], 2 * q[0] * q[0] + 2 * q[1] * q[1] - 1)
# pitch: (nose up/down, about Y axis)
new_data[1] = m.atan2(gravity[0], m.sqrt(gravity[1] * gravity[1] + gravity[2] * gravity[2]))
# roll: (tilt left/right, about X axis)
new_data[2] = m.atan2(gravity[1], gravity[2])
if gravity[2] < 0:
if new_data[1] > 0:
new_data[1] = m.pi - new_data[1]
else:
new_data[1] = -m.pi - new_data[1] | UTF-8 | Python | false | false | 5,659 | py | 102 | functions.py | 14 | 0.510251 | 0.478614 | 0 | 168 | 32.684524 | 99 |
v0001/python_dojang | 5,188,320,499,233 | c773c91f4f9bb37fd46a4b5b40cceeb1ba511368 | b5244df01da4feacb4fb027387fea650e4752065 | /Easy EX/ex1.py | abfbc9395518443584640a678960cf86be1d323c | []
| no_license | https://github.com/v0001/python_dojang | 122f517939f432e5f2a0fab971b9dc776a941d4e | dba3d4bab85f95b68f68786fb8fcca616a055c2c | refs/heads/master | 2022-12-13T15:06:52.942858 | 2020-08-31T13:09:32 | 2020-08-31T13:09:32 | 291,714,164 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | print('Hello World')
print(int(3.3)) # 숫자
print(divmod(5,2))
a, b = divmod(7,3)
print(a,b)
| UTF-8 | Python | false | false | 100 | py | 2 | ex1.py | 2 | 0.59375 | 0.53125 | 0 | 8 | 10.75 | 20 |
adhaka/summers | 3,796,751,132,571 | 4a329b6a07b81f3927dab32da4e90700e7ec3051 | c51bd00a697e04fb429ec80a1648ed3335b93ea1 | /pythonwork/linearRegression.py | 0f3ace89d2371a8752756584c67a89a3e2b474ed | []
| no_license | https://github.com/adhaka/summers | c8282c06e5fb10d8084a8151a83df0e5ee7f1355 | ae59dc770091776abf8eea156b4cb0702158567a | refs/heads/master | 2017-12-21T19:15:50.029085 | 2015-08-31T10:50:00 | 2015-08-31T10:50:00 | 38,587,234 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import numpy as np
import random
import theano
import theano.tensor as T
from theano import function
class LinearRegression():
def __init__(self, x, y):
if x.shape[0] != len(y):
raise Exception("x and y are not of the same length")
self.x = x
self.y = y
def train(self):
iters = 500
train = self.compileTrain()
for i in xrange(iters):
train(self.x, self.y)
def compileTrain(self):
x = T.dmatrix('x')
y = T.vector('y')
self.dims = self.x.shape[1]
self.w = theano.shared(value= np.zeros(self.dims, dtype=theano.config.floatX), name='w')
self.b = theano.shared(value=0., name='b')
# estimate = self.model(self.w)
# cost = self.cost(estimate)
estimate = T.dot(x, self.w) + self.b
rms = (y - estimate) ** 2
cost = rms.mean()
gw, gb = T.grad(cost=cost, wrt=[self.w, self.b])
updates = [[self.w, self.w - gw* 0.01], [self.b, self.b - gb * 0.01]]
train = theano.function(inputs=[x, y], outputs=[estimate, rms], updates=updates, allow_input_downcast = True)
return train
# def __str__(self):
# pass
# return "Weight is:" +self.w.get_value() + " " + "bias is:" + self.b.get_value()
def prettyprint(self):
print self.w.get_value()
print self.b.get_value()
print self.model()
def model(self):
model = T.dot(self.x, self.w) + self.b
return model
def cost(self, estimate):
cost = (self.y - estimate) ** 2
return cost.mean()
if __name__ == "__main__":
x = np.random.randn(100,5)
y = np.random.random(size = 100)
LR1 = LinearRegression(x, y)
LR1.train()
LR1.prettyprint()
print LR1
| UTF-8 | Python | false | false | 1,571 | py | 10 | linearRegression.py | 10 | 0.624443 | 0.60853 | 0 | 74 | 20.202703 | 111 |
gfcastellano/Agenda_Leonel | 13,709,535,636,300 | 47b4d98e54657894c074dfb3908fce674a369f48 | 811ef9d490209900a5e1e272d0a4d5da7ded3a7c | /kivymd/uix/snackbar.py | f2ed3453fa96fc8d8da6130696173fc1da240c69 | []
| permissive | https://github.com/gfcastellano/Agenda_Leonel | c98823b092706e46ec25d2ecb999ce30969a003d | 543dd46e1836410ec1ad7093ef4a07c2f84678a6 | refs/heads/master | 2022-12-05T18:14:22.562930 | 2020-07-28T19:17:55 | 2020-07-28T19:17:55 | 269,536,042 | 1 | 0 | MIT | false | 2020-08-26T18:54:48 | 2020-06-05T05:09:44 | 2020-08-08T14:56:36 | 2020-08-26T18:54:47 | 15,448 | 1 | 0 | 0 | Python | false | false | """
Components/Snackbar
===================
.. seealso::
`Material Design spec, Snackbars <https://material.io/components/snackbars>`_
.. rubric:: Snackbars provide brief messages about app processes at the bottom
of the screen.
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/snackbar.png
:align: center
Usage
-----
.. code-block:: python
from kivy.lang import Builder
from kivymd.app import MDApp
KV = '''
#:import Snackbar kivymd.uix.snackbar.Snackbar
Screen:
MDRaisedButton:
text: "Create simple snackbar"
on_release: Snackbar(text="This is a snackbar!").show()
pos_hint: {"center_x": .5, "center_y": .5}
'''
class Test(MDApp):
def build(self):
return Builder.load_string(KV)
Test().run()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/snackbar-simple.gif
:align: center
Usage with padding
------------------
.. code-block:: python
Snackbar(text="This is a snackbar!", padding="20dp").show()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/snackbar-padding.gif
:align: center
Usage with button
-----------------
.. code-block:: python
Snackbar(
text="This is a snackbar",
button_text="BUTTON",
button_callback=app.callback
).show()
.. code-block:: python
def callback(self, instance):
from kivymd.toast import toast
toast(instance.text)
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/snackbar-button.gif
:align: center
Using a button with custom color
-------------------------------
.. code-block:: python
Snackbar(
text="This is a snackbar!",
padding="20dp",
button_text="ACTION",
button_color=(1, 0, 1, 1)
).show()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/snackbar-button-custom-color.gif
:align: center
Custom usage
------------
.. code-block:: python
from kivy.lang import Builder
from kivy.animation import Animation
from kivy.clock import Clock
from kivy.metrics import dp
from kivymd.app import MDApp
from kivymd.uix.snackbar import Snackbar
KV = '''
Screen:
MDFloatingActionButton:
id: button
x: root.width - self.width - dp(10)
y: dp(10)
on_release: app.snackbar_show()
'''
class Test(MDApp):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.screen = Builder.load_string(KV)
self.snackbar = None
self._interval = 0
def build(self):
return self.screen
def wait_interval(self, interval):
self._interval += interval
if self._interval > self.snackbar.duration:
anim = Animation(y=dp(10), d=.2)
anim.start(self.screen.ids.button)
Clock.unschedule(self.wait_interval)
self._interval = 0
self.snackbar = None
def snackbar_show(self):
if not self.snackbar:
self.snackbar = Snackbar(text="This is a snackbar!")
self.snackbar.show()
anim = Animation(y=dp(72), d=.2)
anim.bind(on_complete=lambda *args: Clock.schedule_interval(
self.wait_interval, 0))
anim.start(self.screen.ids.button)
Test().run()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/snackbar-custom-usage.gif
:align: center
Custom Snackbar
---------------
.. code-block:: python
from kivy.lang import Builder
from kivy.properties import StringProperty
from kivymd.app import MDApp
from kivymd.uix.snackbar import Snackbar
KV = '''
<-Snackbar>
MDCard:
id: box
size_hint_y: None
height: dp(58)
spacing: dp(5)
padding: dp(10)
y: -self.height
x: root.padding
md_bg_color: get_color_from_hex('323232')
radius: (5, 5, 5, 5) if root.padding else (0, 0, 0, 0)
elevation: 11 if root.padding else 0
MDIconButton:
pos_hint: {'center_y': .5}
icon: root.icon
opposite_colors: True
MDLabel:
id: text_bar
size_hint_y: None
height: self.texture_size[1]
text: root.text
font_size: root.font_size
theme_text_color: 'Custom'
text_color: get_color_from_hex('ffffff')
shorten: True
shorten_from: 'right'
pos_hint: {'center_y': .5}
Screen:
MDRaisedButton:
text: "SHOW"
pos_hint: {"center_x": .5, "center_y": .45}
on_press: app.show()
'''
class CustomSnackbar(Snackbar):
icon = StringProperty()
class Test(MDApp):
def build(self):
return Builder.load_string(KV)
def show(self):
CustomSnackbar(
text="This is a snackbar!",
icon="information",
padding="20dp",
button_text="ACTION",
button_color=(1, 0, 1, 1)
).show()
Test().run()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/snackbar-custom.png
:align: center
"""
__all__ = ("Snackbar",)
from kivy.animation import Animation
from kivy.clock import Clock
from kivy.core.window import Window
from kivy.lang import Builder
from kivy.metrics import dp
from kivy.properties import (
ListProperty,
NumericProperty,
ObjectProperty,
StringProperty,
)
from kivymd.uix.button import MDFlatButton
from kivymd.uix.floatlayout import MDFloatLayout
Builder.load_string(
"""
#:import get_color_from_hex kivy.utils.get_color_from_hex
<Snackbar>
MDCard:
id: box
size_hint_y: None
height: dp(58)
spacing: dp(5)
padding: dp(10)
y: -self.height
x: root.padding
md_bg_color: get_color_from_hex('323232')
radius: (5, 5, 5, 5) if root.padding else (0, 0, 0, 0)
elevation: 11 if root.padding else 0
MDLabel:
id: text_bar
size_hint_y: None
height: self.texture_size[1]
text: root.text
font_size: root.font_size
theme_text_color: 'Custom'
text_color: get_color_from_hex('ffffff')
shorten: True
shorten_from: 'right'
pos_hint: {'center_y': .5}
"""
)
class Snackbar(MDFloatLayout):
text = StringProperty()
"""The text that will appear in the snackbar.
:attr:`text` is a :class:`~kivy.properties.StringProperty`
and defaults to `''`.
"""
font_size = NumericProperty("15sp")
"""The font size of the text that will appear in the snackbar.
:attr:`font_size` is a :class:`~kivy.properties.NumericProperty` and
defaults to `'15sp'`.
"""
button_text = StringProperty()
"""The text that will appear in the snackbar's button.
.. Note::
If this variable is None, the snackbar will have no button.
:attr:`button_text` is a :class:`~kivy.properties.StringProperty`
and defaults to `''`.
"""
button_callback = ObjectProperty()
"""The callback that will be triggered when the snackbar's
button is pressed.
.. Note::
If this variable is None, the snackbar will have no button.
:attr:`button_callback` is a :class:`~kivy.properties.ObjectProperty`
and defaults to `None`.
"""
button_color = ListProperty()
"""Button color.
:attr:`button_color` is a :class:`~kivy.properties.ListProperty`
and defaults to `[]`.
"""
duration = NumericProperty(3)
"""The amount of time that the snackbar will stay on screen for.
:attr:`duration` is a :class:`~kivy.properties.NumericProperty`
and defaults to `3`.
"""
padding = NumericProperty("0dp")
"""Snackbar padding.
:attr:`padding` is a :class:`~kivy.properties.NumericProperty`
and defaults to `'0dp'`.
"""
_interval = 0
def __init__(self, **kwargs):
super().__init__(**kwargs)
if self.button_text != "":
button = MDFlatButton(text=self.button_text)
button.text_color = (
(1, 1, 1, 1) if not self.button_color else self.button_color
)
self.ids.box.add_widget(button)
if self.button_callback:
button.bind(on_release=self.button_callback)
def show(self):
"""Show the snackbar."""
def wait_interval(interval):
self._interval += interval
if self._interval > self.duration:
anim = Animation(y=-self.ids.box.height, d=0.2)
anim.bind(
on_complete=lambda *args: Window.parent.remove_widget(self)
)
anim.start(self.ids.box)
Clock.unschedule(wait_interval)
self._interval = 0
self.size_hint_x = None
self.width = Window.width - dp(self.padding) * 2
Window.parent.add_widget(self)
anim = Animation(y=self.padding, d=0.2)
anim.bind(
on_complete=lambda *args: Clock.schedule_interval(wait_interval, 0)
)
anim.start(self.ids.box)
| UTF-8 | Python | false | false | 9,628 | py | 91 | snackbar.py | 73 | 0.5699 | 0.55941 | 0 | 370 | 25.021622 | 114 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.