BugZoid commited on
Commit
7384c00
·
verified ·
1 Parent(s): 3251884

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +80 -58
app.py CHANGED
@@ -3,6 +3,38 @@ from transformers import pipeline, GPT2LMHeadModel, GPT2Tokenizer
3
  import os
4
  import streamlit as st
5
  from datetime import datetime
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
 
7
  # Verificar variáveis de ambiente
8
  required_vars = [
@@ -27,7 +59,8 @@ if missing_vars:
27
 
28
  # Autenticação com Twitter para leitura
29
  client = tweepy.Client(
30
- bearer_token=os.getenv('TWITTER_BEARER_TOKEN')
 
31
  )
32
 
33
  # Autenticação com Twitter para postagem
@@ -38,37 +71,35 @@ auth = tweepy.OAuth1UserHandler(
38
  os.getenv('TWITTER_ACCESS_TOKEN_SECRET')
39
  )
40
 
41
- api = tweepy.API(auth)
42
 
43
  # Configuração da query e campos do tweet
44
  query = 'BBB25 -filter:retweets lang:pt -is:reply'
45
  tweet_fields = ['text', 'created_at', 'lang', 'public_metrics']
46
 
47
  try:
48
- # Busca tweets com os campos especificados
49
- tweets = client.search_recent_tweets(
50
- query=query,
51
- max_results=100,
52
- tweet_fields=tweet_fields
53
- )
54
 
55
  if not tweets.data:
56
- print("Nenhum tweet encontrado")
57
- st.error("Nenhum tweet encontrado para análise")
58
  st.stop()
59
 
60
  # Análise de sentimentos
61
- sentiment_pipeline = pipeline(
62
- 'sentiment-analysis',
63
- model='cardiffnlp/twitter-xlm-roberta-base-sentiment'
64
- )
65
-
66
- sentiments = []
67
- for tweet in tweets.data:
68
- # Verificação adicional para garantir que temos tweets em português
69
- if hasattr(tweet, 'lang') and tweet.lang == 'pt':
70
- result = sentiment_pipeline(tweet.text)
71
- sentiments.append(result[0]['label'])
 
 
 
72
 
73
  # Calcular taxas
74
  if sentiments:
@@ -82,32 +113,33 @@ try:
82
  neutral_ratio = neutral / total
83
 
84
  # Gerar mensagem com IA
85
- tokenizer = GPT2Tokenizer.from_pretrained('gpt2')
86
- model = GPT2LMHeadModel.from_pretrained('gpt2')
87
-
88
- if positive_ratio > 0.6:
89
- prompt = "Write an exciting tweet about BBB25 with a positive tone in Portuguese."
90
- elif negative_ratio > 0.6:
91
- prompt = "Write an informative tweet about BBB25 with a neutral tone in Portuguese."
92
- else:
93
- prompt = "Write a buzzing tweet about BBB25 with an engaging tone in Portuguese."
94
-
95
- # Gerar texto
96
- input_ids = tokenizer.encode(prompt, return_tensors='pt')
97
- outputs = model.generate(
98
- input_ids,
99
- max_length=25,
100
- do_sample=True,
101
- pad_token_id=tokenizer.eos_token_id
102
- )
103
-
104
- generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
105
- generated_text = generated_text[:280] # Limitar a 280 caracteres
106
 
107
  try:
108
- # Postar no Twitter
109
- api.update_status(status=generated_text)
110
- print(f"Tweet postado com sucesso: {generated_text}")
 
111
 
112
  # Interface Streamlit
113
  st.title("Análise de Sentimentos - BBB25")
@@ -137,19 +169,9 @@ try:
137
  with open('posting_log.txt', 'a') as f:
138
  f.write(f"{str(log_entry)}\n")
139
 
140
- except Exception as e:
141
- st.error(f"Erro ao postar tweet: {str(e)}")
142
- print(f"Erro ao postar: {e}")
143
-
144
- except tweepy.errors.BadRequest as e:
145
- st.error(f"Erro na requisição ao Twitter: {str(e)}")
146
- print(f"Erro na requisição: {str(e)}")
147
- except tweepy.errors.TweepyException as e:
148
- st.error(f"Erro do Tweepy: {str(e)}")
149
- print(f"Erro do Tweepy: {str(e)}")
150
  except Exception as e:
151
- st.error(f"Erro inesperado: {str(e)}")
152
- print(f"Erro inesperado: {str(e)}")
153
 
154
  # Footer
155
  st.markdown("---")
@@ -160,4 +182,4 @@ st.markdown(
160
  </div>
161
  """,
162
  unsafe_allow_html=True
163
- )
 
3
  import os
4
  import streamlit as st
5
  from datetime import datetime
6
+ import time
7
+ from tenacity import retry, stop_after_attempt, wait_exponential
8
+
9
+ # Função com retry para buscar tweets
10
+ @retry(
11
+ stop=stop_after_attempt(3),
12
+ wait=wait_exponential(multiplier=1, min=4, max=10),
13
+ retry=lambda e: isinstance(e, tweepy.errors.TooManyRequests)
14
+ )
15
+ def fetch_tweets(client, query, tweet_fields):
16
+ try:
17
+ tweets = client.search_recent_tweets(
18
+ query=query,
19
+ max_results=10, # Reduzido para 10 para evitar rate limits
20
+ tweet_fields=tweet_fields
21
+ )
22
+ return tweets
23
+ except tweepy.errors.TooManyRequests as e:
24
+ reset_time = int(e.response.headers.get('x-rate-limit-reset', 0))
25
+ wait_time = max(reset_time - time.time(), 0)
26
+ print(f"Rate limit atingido. Aguardando {wait_time:.0f} segundos...")
27
+ time.sleep(wait_time + 1)
28
+ raise e
29
+
30
+ # Função com retry para postar tweets
31
+ @retry(
32
+ stop=stop_after_attempt(3),
33
+ wait=wait_exponential(multiplier=1, min=4, max=10),
34
+ retry=lambda e: isinstance(e, tweepy.errors.TooManyRequests)
35
+ )
36
+ def post_tweet(api, text):
37
+ return api.update_status(status=text)
38
 
39
  # Verificar variáveis de ambiente
40
  required_vars = [
 
59
 
60
  # Autenticação com Twitter para leitura
61
  client = tweepy.Client(
62
+ bearer_token=os.getenv('TWITTER_BEARER_TOKEN'),
63
+ wait_on_rate_limit=True # Importante: aguarda automaticamente quando atingir rate limit
64
  )
65
 
66
  # Autenticação com Twitter para postagem
 
71
  os.getenv('TWITTER_ACCESS_TOKEN_SECRET')
72
  )
73
 
74
+ api = tweepy.API(auth, wait_on_rate_limit=True)
75
 
76
  # Configuração da query e campos do tweet
77
  query = 'BBB25 -filter:retweets lang:pt -is:reply'
78
  tweet_fields = ['text', 'created_at', 'lang', 'public_metrics']
79
 
80
  try:
81
+ with st.spinner('Buscando tweets...'):
82
+ tweets = fetch_tweets(client, query, tweet_fields)
 
 
 
 
83
 
84
  if not tweets.data:
85
+ st.warning("Nenhum tweet encontrado")
 
86
  st.stop()
87
 
88
  # Análise de sentimentos
89
+ with st.spinner('Analisando sentimentos...'):
90
+ sentiment_pipeline = pipeline(
91
+ 'sentiment-analysis',
92
+ model='cardiffnlp/twitter-xlm-roberta-base-sentiment'
93
+ )
94
+
95
+ sentiments = []
96
+ for tweet in tweets.data:
97
+ if hasattr(tweet, 'lang') and tweet.lang == 'pt':
98
+ result = sentiment_pipeline(tweet.text)
99
+ sentiments.append(result[0]['label'])
100
+
101
+ # Adicionar delay entre processamentos
102
+ time.sleep(1)
103
 
104
  # Calcular taxas
105
  if sentiments:
 
113
  neutral_ratio = neutral / total
114
 
115
  # Gerar mensagem com IA
116
+ with st.spinner('Gerando novo tweet...'):
117
+ tokenizer = GPT2Tokenizer.from_pretrained('gpt2')
118
+ model = GPT2LMHeadModel.from_pretrained('gpt2')
119
+
120
+ if positive_ratio > 0.6:
121
+ prompt = "Write an exciting tweet about BBB25 with a positive tone in Portuguese."
122
+ elif negative_ratio > 0.6:
123
+ prompt = "Write an informative tweet about BBB25 with a neutral tone in Portuguese."
124
+ else:
125
+ prompt = "Write a buzzing tweet about BBB25 with an engaging tone in Portuguese."
126
+
127
+ input_ids = tokenizer.encode(prompt, return_tensors='pt')
128
+ outputs = model.generate(
129
+ input_ids,
130
+ max_length=25,
131
+ do_sample=True,
132
+ pad_token_id=tokenizer.eos_token_id
133
+ )
134
+
135
+ generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
136
+ generated_text = generated_text[:280]
137
 
138
  try:
139
+ # Postar no Twitter com retry
140
+ with st.spinner('Postando tweet...'):
141
+ post_tweet(api, generated_text)
142
+ st.success("Tweet postado com sucesso!")
143
 
144
  # Interface Streamlit
145
  st.title("Análise de Sentimentos - BBB25")
 
169
  with open('posting_log.txt', 'a') as f:
170
  f.write(f"{str(log_entry)}\n")
171
 
 
 
 
 
 
 
 
 
 
 
172
  except Exception as e:
173
+ st.error(f"Erro: {str(e)}")
174
+ print(f"Erro: {e}")
175
 
176
  # Footer
177
  st.markdown("---")
 
182
  </div>
183
  """,
184
  unsafe_allow_html=True
185
+ )