From e2dfa984037a4b8051ac0efcfdaa48f0d6c5ee59 Mon Sep 17 00:00:00 2001 From: Nunzi99 Date: Wed, 8 Oct 2025 20:58:30 +0200 Subject: [PATCH 01/11] Create x.py --- src/app/social/x.py | 74 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 74 insertions(+) create mode 100644 src/app/social/x.py diff --git a/src/app/social/x.py b/src/app/social/x.py new file mode 100644 index 0000000..ef97772 --- /dev/null +++ b/src/app/social/x.py @@ -0,0 +1,74 @@ +''' +THIS CURRENTLY DOES NOT WORK AS INTENDED DUE TO THE FACT THAT THE TWEETS AREN'T IN CHRONOLOGICAL ORDER +Usiamo l'API rettiwt per ottenere dati da X aggirando i limiti dell'API free +Questo potrebbe portare al ban dell'account anche se improbabile, non usare l'account personale +Per farlo funzionare è necessario installare npm in un container docker ed installarlo con npm install -g rettiwt-api dopo essersi connessi al docker +https://www.npmjs.com/package/rettiwt-api +''' + +import docker +import json +from .base import SocialWrapper, SocialPost, SocialComment +class XWrapper(SocialWrapper): + def __init__(self): + ''' + This wrapper uses the rettiwt API to get data from X in order to avoid the rate limits of the free X API, + even if improbable this could lead to a ban so do not use the personal account, + In order to work a docker container with npm installed is needed, it's also necessary to install rettiwt in the container with npm install -g rettiwt-api + ''' + # This is the list of users that can be interesting + # To get the ID of a new user is necessary to search it on X, copy the url and insert it in a service like "https://get-id-x.foundtt.com/en/" + self.users = [ + 'watcherguru', + 'Cointelegraph', + 'BTC_Archive', + 'elonmusk' + ] + self.api_key = "ADD_API_KEY" + ''' + Per ottenere questa API è necessario seguire i seguenti passaggi: + - Installare l'estensione su chrome X Auth Helper + - Dargli il permesso di girare in incognito + - Andare in incognito ed entrare sul proprio account X + - Aprire l'estensione e fare "get key" + - Chiudere chrome + DOvrebbe funzionare per 5 anni o finchè non si si fa il log out, in ogni caso si può ricreare + ''' + # Connection to the docker deamon + self.client = docker.from_env() + # Connect with the relative container + self.container = self.client.containers.get("node_rettiwt") + self.social_posts: list[SocialPost] = [] + def get_top_crypto_posts(self, limit = 5) -> list[SocialPost]: #-> list[SocialPost]: + ''' + Get the top crypto tweets from X, the limit is reffered to the number of tweets for each user + ''' + social_posts: list[SocialPost] = [] + for user in self.users: + # This currently doesn't work as intended since it returns the posts in random order + tweets = self.container.exec_run("rettiwt -k" + self.api_key + " tweet search -f " + str(user), tty=True) + tweets = tweets.output.decode() + tweets = json.loads(tweets) + tweets: list[dict] = tweets['list'] + tweets = tweets[:limit] + for tweet in tweets: + social_post = SocialPost() + social_post.time = tweet['createdAt'] + social_post.title = str(user) + " tweeted: " + social_post.description = tweet['fullText'] + social_posts.append(social_post) + self.social_posts = social_posts + return social_posts + def print(self): + i = 1 + for post in self.social_posts: + print(f"Post {i}:") + print(f"Time: {post.time}") + print(f"Title: {post.title}") + print(f"Description: {post.description}") + print() + i += 1 + +# x_wrapper = XWrapper() +# social_posts = x_wrapper.get_top_crypto_posts(limit=3) +# x_wrapper.print() \ No newline at end of file -- 2.49.1 From ed6e5f6e3dc91abfbceba2bb8f33698377307e22 Mon Sep 17 00:00:00 2001 From: Nunzi99 Date: Fri, 10 Oct 2025 20:32:45 +0200 Subject: [PATCH 02/11] Imports and APIs for social Aggiunta la chiave per X in .env.example Aggiunto docker a pyproject.toml --- .env.example | 10 ++++++++++ pyproject.toml | 1 + src/app/social/x.py | 15 +++------------ 3 files changed, 14 insertions(+), 12 deletions(-) diff --git a/.env.example b/.env.example index fd9a427..ef15542 100644 --- a/.env.example +++ b/.env.example @@ -38,3 +38,13 @@ CRYPTOPANIC_API_KEY= # https://www.reddit.com/prefs/apps REDDIT_API_CLIENT_ID= REDDIT_API_CLIENT_SECRET= + + +# Per ottenere questa API è necessario seguire i seguenti passaggi: +# - Installare l'estensione su chrome X Auth Helper +# - Dargli il permesso di girare in incognito +# - Andare in incognito ed entrare sul proprio account X +# - Aprire l'estensione e fare "get key" +# - Chiudere chrome +# Dovrebbe funzionare per 5 anni o finchè non si si fa il log out, in ogni caso si può ricreare +X_API_KEY= \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index d039c6b..74fa4eb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,7 @@ dependencies = [ # API di social media "praw", # Reddit + "docker", # Necessario per usare Rettiwt per X ] [tool.pytest.ini_options] diff --git a/src/app/social/x.py b/src/app/social/x.py index ef97772..3dfba03 100644 --- a/src/app/social/x.py +++ b/src/app/social/x.py @@ -1,11 +1,11 @@ ''' -THIS CURRENTLY DOES NOT WORK AS INTENDED DUE TO THE FACT THAT THE TWEETS AREN'T IN CHRONOLOGICAL ORDER Usiamo l'API rettiwt per ottenere dati da X aggirando i limiti dell'API free Questo potrebbe portare al ban dell'account anche se improbabile, non usare l'account personale Per farlo funzionare è necessario installare npm in un container docker ed installarlo con npm install -g rettiwt-api dopo essersi connessi al docker https://www.npmjs.com/package/rettiwt-api ''' +import os import docker import json from .base import SocialWrapper, SocialPost, SocialComment @@ -24,16 +24,7 @@ class XWrapper(SocialWrapper): 'BTC_Archive', 'elonmusk' ] - self.api_key = "ADD_API_KEY" - ''' - Per ottenere questa API è necessario seguire i seguenti passaggi: - - Installare l'estensione su chrome X Auth Helper - - Dargli il permesso di girare in incognito - - Andare in incognito ed entrare sul proprio account X - - Aprire l'estensione e fare "get key" - - Chiudere chrome - DOvrebbe funzionare per 5 anni o finchè non si si fa il log out, in ogni caso si può ricreare - ''' + self.api_key = os.getenv("X_API_KEY") # Connection to the docker deamon self.client = docker.from_env() # Connect with the relative container @@ -41,7 +32,7 @@ class XWrapper(SocialWrapper): self.social_posts: list[SocialPost] = [] def get_top_crypto_posts(self, limit = 5) -> list[SocialPost]: #-> list[SocialPost]: ''' - Get the top crypto tweets from X, the limit is reffered to the number of tweets for each user + Otteniamo i post più recenti da X, il limite si applica al numero di post per ogni utente nella lista interna ''' social_posts: list[SocialPost] = [] for user in self.users: -- 2.49.1 From ed6a4d12a6640a05f80d7043ea92ceb5db0e3d27 Mon Sep 17 00:00:00 2001 From: Nunzi99 Date: Sat, 11 Oct 2025 16:37:48 +0200 Subject: [PATCH 03/11] Aggiunto Wrapper per 4 chan --- src/app/social/__init__.py | 6 +- src/app/social/chan.py | 161 +++++++++++++++++++++++++++++++++++++ src/app/social/x.py | 1 + 3 files changed, 166 insertions(+), 2 deletions(-) create mode 100644 src/app/social/chan.py diff --git a/src/app/social/__init__.py b/src/app/social/__init__.py index 9ce3708..10f08f5 100644 --- a/src/app/social/__init__.py +++ b/src/app/social/__init__.py @@ -2,8 +2,10 @@ from agno.tools import Toolkit from app.utils.wrapper_handler import WrapperHandler from .base import SocialPost, SocialWrapper from .reddit import RedditWrapper +from .x import XWrapper +from .chan import ChanWrapper -__all__ = ["SocialAPIsTool", "SOCIAL_INSTRUCTIONS", "RedditWrapper"] +__all__ = ["SocialAPIsTool", "SOCIAL_INSTRUCTIONS", "RedditWrapper", "XWrapper", "ChanWrapper"] class SocialAPIsTool(SocialWrapper, Toolkit): @@ -25,7 +27,7 @@ class SocialAPIsTool(SocialWrapper, Toolkit): - RedditWrapper. """ - wrappers = [RedditWrapper] + wrappers = [RedditWrapper, XWrapper, ChanWrapper] self.wrapper_handler: WrapperHandler[SocialWrapper] = WrapperHandler.build_wrappers(wrappers) Toolkit.__init__( diff --git a/src/app/social/chan.py b/src/app/social/chan.py new file mode 100644 index 0000000..a1cecca --- /dev/null +++ b/src/app/social/chan.py @@ -0,0 +1,161 @@ +''' +Usiamo le API di 4chan per ottenere un catalogo di threads dalla board /biz/ +''' +import requests +import re +import html +from bs4 import BeautifulSoup + +from .base import SocialWrapper, SocialPost, SocialComment +class ChanWrapper(SocialWrapper): + def __init__(self): + super().__init__() + + def get_top_crypto_posts(self, limit: int = 5) -> list[SocialPost]: + # Url dell'API della board /biz/ + json_url = 'https://a.4cdn.org/biz/catalog.json' + json = requests.get(json_url) + + if json.status_code == 200: + page_list: list[dict] = json.json() # Questa lista contiene un dizionario per ogni pagina della board di questo tipo {"page": page_number, "threads": [{thread_data}]} + else: + print("Error:", json.status_code) + + # Lista dei post + social_posts: list[SocialPost] = [] + + for page in page_list: + thread_list: list[dict] = page['threads'] + ''' + Per ogni thread ci interessano i seguenti campi: + - "sticky": ci indica se il thread è stato fissato o meno, se non è presente vuol dire che non è stato fissato, i thread sticky possono essere ignorati + - "now": la data di creazione del thread tipo "MM/GG/AA(day)hh:mm:ss", ci interessa solo MM/GG/AA + - "name": il nome dell'utente + - "sub": il nome del thread, può contenere anche elementi di formattazione html che saranno da ignorare, potrebbe non essere presente + - "com": il commento del thread, può contenere anche elementi di formattazione html che saranno da ignorare + - "last_replies": una lista di dizionari conteneti le risposte al thread principale, sono strutturate similarmente al thread, di queste ci interessano i seguenti campi: + - "now": la data di creazione della risposta tipo "MM/GG/AA(day)hh:mm:ss", ci interessa solo MM/GG/AA + - "name": il nome dell'utente + - "com": il commento della risposta, possono contenere anche elementi di formattazione html che saranno da ignorare + ''' + for thread in thread_list: + # Ignoriamo i dizionari dei thread nei quali è presente la key "sticky" + if 'sticky' in thread: + continue + else: + # print(thread) + # Otteniamo la data + time: str = thread['now'] + # Otteniamo dalla data il mese (primi 2 caratteri) + month: str = time[:2] + # Otteniamo dalla data il giorno (caratteri 4 e 5) + day: str = time[4:6] + # Otteniamo dalla data l'anno (caratteri 7 e 8) + year: str = time[7:9] + # Ricreiamo la data completa come dd/mm/yy + time: str = day + '/' + month + '/' + year + + # Otteniamo il nome dell'utente + name: str = thread['name'] + # Proviamo a recuperare il titolo + try: + # Otteniamo il titolo del thread contenuto nella key "sub" + title: str = thread['sub'] + # Ripuliamo la stringa + # Decodifichiamo caratteri ed entità HTML + html_entities = html.unescape(title) + # Rimuoviamo caratteri HTML + soup = BeautifulSoup(html_entities, 'html.parser') + title = soup.get_text(separator=" ") + # Rimuoviamo backlash e doppi slash + title = re.sub(r"[\\/]+", "/", title) + # Rimuoviamo spazi in piú + title = re.sub(r"\s+", " ", title).strip() + # Aggiungiamo il nome dell'utente al titolo + title = name + " posted: " + title + except: + title: str = name + " posted" + + try: + # Otteniamo il commento del thread contenuto nella key "com" + thread_description: str = thread['com'] + # Ripuliamo la stringa + # Decodifichiamo caratteri ed entità HTML + html_entities = html.unescape(thread_description) + # Rimuoviamo caratteri HTML + soup = BeautifulSoup(html_entities, 'html.parser') + thread_description = soup.get_text(separator=" ") + # Rimuoviamo backlash e doppi slash + thread_description = re.sub(r"[\\/]+", "/", thread_description) + # Rimuoviamo spazi in piú + thread_description = re.sub(r"\s+", " ", thread_description).strip() + except: + thread_description = None + # Creiamo la lista delle risposte al thread + try: + response_list: list[dict] = thread['last_replies'] + except: + response_list: list[dict] = [] + # Creiamo la lista che conterrà i commenti + comments_list: list[SocialComment] = [] + + # Otteniamo i primi 5 commenti + i = 0 + for response in response_list: + # Otteniamo la data + time: str = response['now'] + # print(time) + # Otteniamo dalla data il mese (primi 2 caratteri) + month: str = time[:2] + # Otteniamo dalla data il giorno (caratteri 4 e 5) + day: str = time[3:5] + # Otteniamo dalla data l'anno (caratteri 7 e 8) + year: str = time[6:8] + # Ricreiamo la data completa come dd/mm/yy + time: str = day + '/' + month + '/' + year + + try: + # Otteniamo il commento della risposta contenuto nella key "com" + comment_description: str = response['com'] + # Ripuliamo la stringa + # Decodifichiamo caratteri ed entità HTML + html_entities = html.unescape(comment_description) + # Rimuoviamo caratteri HTML + soup = BeautifulSoup(html_entities, 'html.parser') + comment_description = soup.get_text(separator=" ") + # Rimuoviamo backlash e doppi slash + comment_description = re.sub(r"[\\/]+", "/", comment_description) + # Rimuoviamo spazi in piú + comment_description = re.sub(r"\s+", " ", comment_description).strip() + except: + comment_description = None + # Se la descrizione del commento non esiste, passiamo al commento successivo + if comment_description is None: + continue + else: + # Creiamo il SocialComment + social_comment: SocialComment = SocialComment( + time=time, + description=comment_description + ) + comments_list.append(social_comment) + i += 1 + if i >= 5: + break + if thread_description is None: + continue + else: + # Creiamo il SocialPost + social_post: SocialPost = SocialPost( + time=time, + title=title, + description=thread_description, + comments=comments_list + ) + social_posts.append(social_post) + + return social_posts[:limit] +# Stampiamo i post +# chan_wrapper = ChanWrapper() +# social_posts = chan_wrapper.get_top_crypto_posts() +# print(len(social_posts)) diff --git a/src/app/social/x.py b/src/app/social/x.py index 3dfba03..200f884 100644 --- a/src/app/social/x.py +++ b/src/app/social/x.py @@ -25,6 +25,7 @@ class XWrapper(SocialWrapper): 'elonmusk' ] self.api_key = os.getenv("X_API_KEY") + assert self.api_key, "X_API_KEY environment variable not set" # Connection to the docker deamon self.client = docker.from_env() # Connect with the relative container -- 2.49.1 From e2125d5815860f812acc0dddc0a8a34b05dde710 Mon Sep 17 00:00:00 2001 From: Nunzi99 Date: Fri, 17 Oct 2025 15:22:39 +0200 Subject: [PATCH 04/11] added_predictors --- .env.example | 2 ++ pyproject.toml | 3 ++ src/app.py | 1 + src/app/models.py | 27 ++++++++++++++--- uv.lock | 76 +++++++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 105 insertions(+), 4 deletions(-) diff --git a/.env.example b/.env.example index ef15542..4b4c3bd 100644 --- a/.env.example +++ b/.env.example @@ -5,6 +5,8 @@ # https://makersuite.google.com/app/apikey GOOGLE_API_KEY= +OPENAI_API_KEY= +DEEPSEEK_API_KEY= ############################################################################### # Configurazioni per gli agenti di mercato ############################################################################### diff --git a/pyproject.toml b/pyproject.toml index 74fa4eb..59fd0c1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,9 @@ dependencies = [ # Modelli supportati e installati (aggiungere qui sotto quelli che si vogliono usare) "google-genai", "ollama", + + "openai", + "deepseek", # API di exchange di criptovalute "coinbase-advanced-py", diff --git a/src/app.py b/src/app.py index 65c22cc..ed58311 100644 --- a/src/app.py +++ b/src/app.py @@ -51,6 +51,7 @@ if __name__ == "__main__": type="index", label="Modello da usare" ) + # Per qualche motivo deep seek non viene mostrato ma se si fa print(provider.choices) sembra esserci provider.change(fn=chat.choose_provider, inputs=provider, outputs=None) style = gr.Dropdown( diff --git a/src/app/models.py b/src/app/models.py index 4cc591d..0a90d71 100644 --- a/src/app/models.py +++ b/src/app/models.py @@ -5,6 +5,8 @@ from agno.agent import Agent from agno.models.base import Model from agno.models.google import Gemini from agno.models.ollama import Ollama +from agno.models.openai import OpenAIChat +from agno.models.deepseek import DeepSeek from agno.utils.log import log_warning from agno.tools import Toolkit from pydantic import BaseModel @@ -19,6 +21,8 @@ class AppModels(Enum): """ GEMINI = "gemini-2.0-flash" # API online GEMINI_PRO = "gemini-2.0-pro" # API online, più costoso ma migliore + GPT_4 = "gpt-4" + DEEPSEEK = "deepseek-chat" OLLAMA_GPT = "gpt-oss:latest" # + good - slow (13b) OLLAMA_QWEN = "qwen3:latest" # + good + fast (8b) OLLAMA_QWEN_4B = "qwen3:4b" # + fast + decent (4b) @@ -49,10 +53,20 @@ class AppModels(Enum): Controlla quali provider di modelli LLM online hanno le loro API keys disponibili come variabili d'ambiente e ritorna una lista di provider disponibili. """ + availables = [] if not os.getenv("GOOGLE_API_KEY"): log_warning("No GOOGLE_API_KEY set in environment variables.") - return [] - availables = [AppModels.GEMINI, AppModels.GEMINI_PRO] + else: + availables.append(AppModels.GEMINI) + availables.append(AppModels.GEMINI_PRO) + if not os.getenv("OPENAI_API_KEY"): + log_warning("No OPENAI_API_KEY set in environment variables.") + else: + availables.append(AppModels.GPT_4) + if not os.getenv("DEEPSEEK_API_KEY"): + log_warning("No DEEPSEEK_API_KEY set in environment variables.") + else: + availables.append(AppModels.DEEPSEEK) return availables @staticmethod @@ -63,7 +77,9 @@ class AppModels(Enum): d'ambiente e ritorna una lista di provider disponibili. L'ordine di preferenza è: 1. Gemini (Google) - 2. Ollama (locale) + 2. OpenAI + 3. DeepSeek + 4. Ollama (locale) """ availables = [ *AppModels.availables_online(), @@ -87,7 +103,10 @@ class AppModels(Enum): return Gemini(name, instructions=[instructions]) elif self in {model for model in AppModels if model.name.startswith("OLLAMA")}: return Ollama(name, instructions=[instructions]) - + elif self in {model for model in AppModels if model.name.startswith("GPT")}: + return OpenAIChat(name, instructions=[instructions]) + elif self in {model for model in AppModels if model.name.startswith("DEEPSEEK")}: + return DeepSeek(name, instructions=[instructions]) raise ValueError(f"Modello non supportato: {self}") def get_agent(self, instructions: str, name: str = "", output: BaseModel | None = None, tools: list[Toolkit] = []) -> Agent: diff --git a/uv.lock b/uv.lock index d8114d6..a76f77a 100644 --- a/uv.lock +++ b/uv.lock @@ -377,6 +377,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/40/cd/ef820662e0d87f46b829bba7e2324c7978e0153692bbd2f08f7746049708/ddgs-9.6.0-py3-none-any.whl", hash = "sha256:24120f1b672fd3a28309db029e7038eb3054381730aea7a08d51bb909dd55520", size = 41558, upload-time = "2025-09-17T13:27:08.99Z" }, ] +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, +] + [[package]] name = "dnspython" version = "2.8.0" @@ -386,6 +395,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" }, ] +[[package]] +name = "docker" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834, upload-time = "2024-05-23T11:13:57.216Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, +] + [[package]] name = "docstring-parser" version = "0.17.0" @@ -776,6 +799,26 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, ] +[[package]] +name = "jiter" +version = "0.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/c0/a3bb4cc13aced219dd18191ea66e874266bd8aa7b96744e495e1c733aa2d/jiter-0.11.0.tar.gz", hash = "sha256:1d9637eaf8c1d6a63d6562f2a6e5ab3af946c66037eb1b894e8fad75422266e4", size = 167094, upload-time = "2025-09-15T09:20:38.212Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/b5/3009b112b8f673e568ef79af9863d8309a15f0a8cdcc06ed6092051f377e/jiter-0.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2fb7b377688cc3850bbe5c192a6bd493562a0bc50cbc8b047316428fbae00ada", size = 305510, upload-time = "2025-09-15T09:19:25.893Z" }, + { url = "https://files.pythonhosted.org/packages/fe/82/15514244e03b9e71e086bbe2a6de3e4616b48f07d5f834200c873956fb8c/jiter-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a1b7cbe3f25bd0d8abb468ba4302a5d45617ee61b2a7a638f63fee1dc086be99", size = 316521, upload-time = "2025-09-15T09:19:27.525Z" }, + { url = "https://files.pythonhosted.org/packages/92/94/7a2e905f40ad2d6d660e00b68d818f9e29fb87ffe82774f06191e93cbe4a/jiter-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0a7f0ec81d5b7588c5cade1eb1925b91436ae6726dc2df2348524aeabad5de6", size = 338214, upload-time = "2025-09-15T09:19:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9c/5791ed5bdc76f12110158d3316a7a3ec0b1413d018b41c5ed399549d3ad5/jiter-0.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07630bb46ea2a6b9c6ed986c6e17e35b26148cce2c535454b26ee3f0e8dcaba1", size = 361280, upload-time = "2025-09-15T09:19:30.013Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7f/b7d82d77ff0d2cb06424141000176b53a9e6b16a1125525bb51ea4990c2e/jiter-0.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7764f27d28cd4a9cbc61704dfcd80c903ce3aad106a37902d3270cd6673d17f4", size = 487895, upload-time = "2025-09-15T09:19:31.424Z" }, + { url = "https://files.pythonhosted.org/packages/42/44/10a1475d46f1fc1fd5cc2e82c58e7bca0ce5852208e0fa5df2f949353321/jiter-0.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4a6c4a737d486f77f842aeb22807edecb4a9417e6700c7b981e16d34ba7c72", size = 378421, upload-time = "2025-09-15T09:19:32.746Z" }, + { url = "https://files.pythonhosted.org/packages/9a/5f/0dc34563d8164d31d07bc09d141d3da08157a68dcd1f9b886fa4e917805b/jiter-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf408d2a0abd919b60de8c2e7bc5eeab72d4dafd18784152acc7c9adc3291591", size = 347932, upload-time = "2025-09-15T09:19:34.612Z" }, + { url = "https://files.pythonhosted.org/packages/f7/de/b68f32a4fcb7b4a682b37c73a0e5dae32180140cd1caf11aef6ad40ddbf2/jiter-0.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cdef53eda7d18e799625023e1e250dbc18fbc275153039b873ec74d7e8883e09", size = 386959, upload-time = "2025-09-15T09:19:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/c08c92e713b6e28972a846a81ce374883dac2f78ec6f39a0dad9f2339c3a/jiter-0.11.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:53933a38ef7b551dd9c7f1064f9d7bb235bb3168d0fa5f14f0798d1b7ea0d9c5", size = 517187, upload-time = "2025-09-15T09:19:37.426Z" }, + { url = "https://files.pythonhosted.org/packages/89/b5/4a283bec43b15aad54fcae18d951f06a2ec3f78db5708d3b59a48e9c3fbd/jiter-0.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11840d2324c9ab5162fc1abba23bc922124fedcff0d7b7f85fffa291e2f69206", size = 509461, upload-time = "2025-09-15T09:19:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/34/a5/f8bad793010534ea73c985caaeef8cc22dfb1fedb15220ecdf15c623c07a/jiter-0.11.0-cp312-cp312-win32.whl", hash = "sha256:4f01a744d24a5f2bb4a11657a1b27b61dc038ae2e674621a74020406e08f749b", size = 206664, upload-time = "2025-09-15T09:19:40.096Z" }, + { url = "https://files.pythonhosted.org/packages/ed/42/5823ec2b1469395a160b4bf5f14326b4a098f3b6898fbd327366789fa5d3/jiter-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:29fff31190ab3a26de026da2f187814f4b9c6695361e20a9ac2123e4d4378a4c", size = 203520, upload-time = "2025-09-15T09:19:41.798Z" }, +] + [[package]] name = "lxml" version = "6.0.2" @@ -919,6 +962,25 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b5/c1/edc9f41b425ca40b26b7c104c5f6841a4537bb2552bfa6ca66e81405bb95/ollama-0.6.0-py3-none-any.whl", hash = "sha256:534511b3ccea2dff419ae06c3b58d7f217c55be7897c8ce5868dfb6b219cf7a0", size = 14130, upload-time = "2025-09-24T22:46:01.19Z" }, ] +[[package]] +name = "openai" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/90/8f26554d24d63ed4f94d33c24271559863223a67e624f4d2e65ba8e48dca/openai-2.3.0.tar.gz", hash = "sha256:8d213ee5aaf91737faea2d7fc1cd608657a5367a18966372a3756ceaabfbd812", size = 589616, upload-time = "2025-10-10T01:12:50.851Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/5b/4be258ff072ed8ee15f6bfd8d5a1a4618aa4704b127c0c5959212ad177d6/openai-2.3.0-py3-none-any.whl", hash = "sha256:a7aa83be6f7b0ab2e4d4d7bcaf36e3d790874c0167380c5d0afd0ed99a86bd7b", size = 999768, upload-time = "2025-10-10T01:12:48.647Z" }, +] + [[package]] name = "orjson" version = "3.11.3" @@ -1298,6 +1360,16 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, ] +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, +] + [[package]] name = "pyyaml" version = "6.0.3" @@ -1605,12 +1677,14 @@ dependencies = [ { name = "agno" }, { name = "coinbase-advanced-py" }, { name = "ddgs" }, + { name = "docker" }, { name = "dotenv" }, { name = "gnews" }, { name = "google-genai" }, { name = "gradio" }, { name = "newsapi-python" }, { name = "ollama" }, + { name = "openai" }, { name = "praw" }, { name = "pytest" }, { name = "python-binance" }, @@ -1622,12 +1696,14 @@ requires-dist = [ { name = "agno" }, { name = "coinbase-advanced-py" }, { name = "ddgs" }, + { name = "docker" }, { name = "dotenv" }, { name = "gnews" }, { name = "google-genai" }, { name = "gradio" }, { name = "newsapi-python" }, { name = "ollama" }, + { name = "openai" }, { name = "praw" }, { name = "pytest" }, { name = "python-binance" }, -- 2.49.1 From 6b71a5a553e0e4fefd2c3d439fcdf02cc3250346 Mon Sep 17 00:00:00 2001 From: Nunzi99 Date: Mon, 20 Oct 2025 15:10:17 +0200 Subject: [PATCH 05/11] Soddisfatto Giacomo --- Dockerfile | 2 ++ pyproject.toml | 1 - src/app/social/chan.py | 38 -------------------------------------- src/app/social/x.py | 11 ++++++++--- uv.lock | 39 +++++++++++++-------------------------- 5 files changed, 23 insertions(+), 68 deletions(-) diff --git a/Dockerfile b/Dockerfile index 16868ac..b821525 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,6 +2,8 @@ # Infatti scegliamo l'immagine ufficiale di uv che ha già tutto configurato FROM ghcr.io/astral-sh/uv:python3.12-alpine +RUN apk add --update npm +RUN npm install -g rettiwt-api # Dopo aver definito la workdir mi trovo già in essa WORKDIR /app diff --git a/pyproject.toml b/pyproject.toml index 59fd0c1..f3f7ceb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,6 @@ dependencies = [ # API di social media "praw", # Reddit - "docker", # Necessario per usare Rettiwt per X ] [tool.pytest.ini_options] diff --git a/src/app/social/chan.py b/src/app/social/chan.py index a1cecca..901383f 100644 --- a/src/app/social/chan.py +++ b/src/app/social/chan.py @@ -43,97 +43,60 @@ class ChanWrapper(SocialWrapper): if 'sticky' in thread: continue else: - # print(thread) - # Otteniamo la data time: str = thread['now'] - # Otteniamo dalla data il mese (primi 2 caratteri) month: str = time[:2] - # Otteniamo dalla data il giorno (caratteri 4 e 5) day: str = time[4:6] - # Otteniamo dalla data l'anno (caratteri 7 e 8) year: str = time[7:9] - # Ricreiamo la data completa come dd/mm/yy time: str = day + '/' + month + '/' + year - # Otteniamo il nome dell'utente name: str = thread['name'] - # Proviamo a recuperare il titolo try: - # Otteniamo il titolo del thread contenuto nella key "sub" title: str = thread['sub'] - # Ripuliamo la stringa - # Decodifichiamo caratteri ed entità HTML html_entities = html.unescape(title) - # Rimuoviamo caratteri HTML soup = BeautifulSoup(html_entities, 'html.parser') title = soup.get_text(separator=" ") - # Rimuoviamo backlash e doppi slash title = re.sub(r"[\\/]+", "/", title) - # Rimuoviamo spazi in piú title = re.sub(r"\s+", " ", title).strip() - # Aggiungiamo il nome dell'utente al titolo title = name + " posted: " + title except: title: str = name + " posted" try: - # Otteniamo il commento del thread contenuto nella key "com" thread_description: str = thread['com'] - # Ripuliamo la stringa - # Decodifichiamo caratteri ed entità HTML html_entities = html.unescape(thread_description) - # Rimuoviamo caratteri HTML soup = BeautifulSoup(html_entities, 'html.parser') thread_description = soup.get_text(separator=" ") - # Rimuoviamo backlash e doppi slash thread_description = re.sub(r"[\\/]+", "/", thread_description) - # Rimuoviamo spazi in piú thread_description = re.sub(r"\s+", " ", thread_description).strip() except: thread_description = None - # Creiamo la lista delle risposte al thread try: response_list: list[dict] = thread['last_replies'] except: response_list: list[dict] = [] - # Creiamo la lista che conterrà i commenti comments_list: list[SocialComment] = [] # Otteniamo i primi 5 commenti i = 0 for response in response_list: - # Otteniamo la data time: str = response['now'] - # print(time) - # Otteniamo dalla data il mese (primi 2 caratteri) month: str = time[:2] - # Otteniamo dalla data il giorno (caratteri 4 e 5) day: str = time[3:5] - # Otteniamo dalla data l'anno (caratteri 7 e 8) year: str = time[6:8] - # Ricreiamo la data completa come dd/mm/yy time: str = day + '/' + month + '/' + year try: - # Otteniamo il commento della risposta contenuto nella key "com" comment_description: str = response['com'] - # Ripuliamo la stringa - # Decodifichiamo caratteri ed entità HTML html_entities = html.unescape(comment_description) - # Rimuoviamo caratteri HTML soup = BeautifulSoup(html_entities, 'html.parser') comment_description = soup.get_text(separator=" ") - # Rimuoviamo backlash e doppi slash comment_description = re.sub(r"[\\/]+", "/", comment_description) - # Rimuoviamo spazi in piú comment_description = re.sub(r"\s+", " ", comment_description).strip() except: comment_description = None - # Se la descrizione del commento non esiste, passiamo al commento successivo if comment_description is None: continue else: - # Creiamo il SocialComment social_comment: SocialComment = SocialComment( time=time, description=comment_description @@ -145,7 +108,6 @@ class ChanWrapper(SocialWrapper): if thread_description is None: continue else: - # Creiamo il SocialPost social_post: SocialPost = SocialPost( time=time, title=title, diff --git a/src/app/social/x.py b/src/app/social/x.py index 200f884..5d7f470 100644 --- a/src/app/social/x.py +++ b/src/app/social/x.py @@ -6,9 +6,10 @@ https://www.npmjs.com/package/rettiwt-api ''' import os -import docker import json -from .base import SocialWrapper, SocialPost, SocialComment +from .base import SocialWrapper, SocialPost +from shutil import which +import subprocess class XWrapper(SocialWrapper): def __init__(self): ''' @@ -26,10 +27,13 @@ class XWrapper(SocialWrapper): ] self.api_key = os.getenv("X_API_KEY") assert self.api_key, "X_API_KEY environment variable not set" + ''' # Connection to the docker deamon self.client = docker.from_env() # Connect with the relative container self.container = self.client.containers.get("node_rettiwt") + ''' + assert which('rettiwt') is not None, "Command `rettiwt` not installed" self.social_posts: list[SocialPost] = [] def get_top_crypto_posts(self, limit = 5) -> list[SocialPost]: #-> list[SocialPost]: ''' @@ -38,7 +42,8 @@ class XWrapper(SocialWrapper): social_posts: list[SocialPost] = [] for user in self.users: # This currently doesn't work as intended since it returns the posts in random order - tweets = self.container.exec_run("rettiwt -k" + self.api_key + " tweet search -f " + str(user), tty=True) + # tweets = self.container.exec_run("rettiwt -k" + self.api_key + " tweet search -f " + str(user), tty=True) + tweets = subprocess.run("rettiwt -k" + self.api_key + " tweet search -f " + str(user)) tweets = tweets.output.decode() tweets = json.loads(tweets) tweets: list[dict] = tweets['list'] diff --git a/uv.lock b/uv.lock index a76f77a..ad5365e 100644 --- a/uv.lock +++ b/uv.lock @@ -377,6 +377,17 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/40/cd/ef820662e0d87f46b829bba7e2324c7978e0153692bbd2f08f7746049708/ddgs-9.6.0-py3-none-any.whl", hash = "sha256:24120f1b672fd3a28309db029e7038eb3054381730aea7a08d51bb909dd55520", size = 41558, upload-time = "2025-09-17T13:27:08.99Z" }, ] +[[package]] +name = "deepseek" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/7b/bede06edf1a25a6ab06553b15f6abf8e912848dfa5f68514720d3e388550/deepseek-1.0.0-py3-none-any.whl", hash = "sha256:ee4175bfcb7ac1154369dbd86a4d8bc1809f6fa20e3e7baa362544567197cb3f", size = 4542, upload-time = "2025-01-03T08:06:23.887Z" }, +] + [[package]] name = "distro" version = "1.9.0" @@ -395,20 +406,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" }, ] -[[package]] -name = "docker" -version = "7.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pywin32", marker = "sys_platform == 'win32'" }, - { name = "requests" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834, upload-time = "2024-05-23T11:13:57.216Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, -] - [[package]] name = "docstring-parser" version = "0.17.0" @@ -1360,16 +1357,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, ] -[[package]] -name = "pywin32" -version = "311" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, - { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, -] - [[package]] name = "pyyaml" version = "6.0.3" @@ -1677,7 +1664,7 @@ dependencies = [ { name = "agno" }, { name = "coinbase-advanced-py" }, { name = "ddgs" }, - { name = "docker" }, + { name = "deepseek" }, { name = "dotenv" }, { name = "gnews" }, { name = "google-genai" }, @@ -1696,7 +1683,7 @@ requires-dist = [ { name = "agno" }, { name = "coinbase-advanced-py" }, { name = "ddgs" }, - { name = "docker" }, + { name = "deepseek" }, { name = "dotenv" }, { name = "gnews" }, { name = "google-genai" }, -- 2.49.1 From c7a58cd96094d2ab44bd3dc0ce5ab771b77f7242 Mon Sep 17 00:00:00 2001 From: Berack96 Date: Mon, 20 Oct 2025 15:45:49 +0200 Subject: [PATCH 06/11] Fixed typing in XWrapper --- src/app/api/social/x.py | 62 ++++++++++++----------------------------- 1 file changed, 18 insertions(+), 44 deletions(-) diff --git a/src/app/api/social/x.py b/src/app/api/social/x.py index 2f1d76e..5222092 100644 --- a/src/app/api/social/x.py +++ b/src/app/api/social/x.py @@ -1,23 +1,22 @@ -''' -Usiamo l'API rettiwt per ottenere dati da X aggirando i limiti dell'API free -Questo potrebbe portare al ban dell'account anche se improbabile, non usare l'account personale -Per farlo funzionare è necessario installare npm in un container docker ed installarlo con npm install -g rettiwt-api dopo essersi connessi al docker -https://www.npmjs.com/package/rettiwt-api -''' - import os import json import subprocess from shutil import which from app.api.core.social import SocialWrapper, SocialPost + class XWrapper(SocialWrapper): def __init__(self): ''' This wrapper uses the rettiwt API to get data from X in order to avoid the rate limits of the free X API, even if improbable this could lead to a ban so do not use the personal account, - In order to work a docker container with npm installed is needed, it's also necessary to install rettiwt in the container with npm install -g rettiwt-api + In order to work it is necessary to install the rettiwt cli tool, for more information visit the official documentation at https://www.npmjs.com/package/rettiwt-api ''' + + self.api_key = os.getenv("X_API_KEY") + assert self.api_key, "X_API_KEY environment variable not set" + assert which('rettiwt') is not None, "Command `rettiwt` not installed" + # This is the list of users that can be interesting # To get the ID of a new user is necessary to search it on X, copy the url and insert it in a service like "https://get-id-x.foundtt.com/en/" self.users = [ @@ -26,47 +25,22 @@ class XWrapper(SocialWrapper): 'BTC_Archive', 'elonmusk' ] - self.api_key = os.getenv("X_API_KEY") - assert self.api_key, "X_API_KEY environment variable not set" - ''' - # Connection to the docker deamon - self.client = docker.from_env() - # Connect with the relative container - self.container = self.client.containers.get("node_rettiwt") - ''' - assert which('rettiwt') is not None, "Command `rettiwt` not installed" - self.social_posts: list[SocialPost] = [] - def get_top_crypto_posts(self, limit = 5) -> list[SocialPost]: #-> list[SocialPost]: - ''' - Otteniamo i post più recenti da X, il limite si applica al numero di post per ogni utente nella lista interna - ''' + + + def get_top_crypto_posts(self, limit:int = 5) -> list[SocialPost]: social_posts: list[SocialPost] = [] + for user in self.users: - # This currently doesn't work as intended since it returns the posts in random order - # tweets = self.container.exec_run("rettiwt -k" + self.api_key + " tweet search -f " + str(user), tty=True) - tweets = subprocess.run("rettiwt -k" + self.api_key + " tweet search -f " + str(user)) - tweets = tweets.output.decode() - tweets = json.loads(tweets) - tweets: list[dict] = tweets['list'] - tweets = tweets[:limit] - for tweet in tweets: + process = subprocess.run(f"rettiwt -k {self.api_key} tweet search -f {str(user)}", capture_output=True) + results = process.stdout.decode() + json_result = json.loads(results) + + tweets = json_result['list'] + for tweet in tweets[:limit]: social_post = SocialPost() social_post.time = tweet['createdAt'] social_post.title = str(user) + " tweeted: " social_post.description = tweet['fullText'] social_posts.append(social_post) - self.social_posts = social_posts - return social_posts - def print(self): - i = 1 - for post in self.social_posts: - print(f"Post {i}:") - print(f"Time: {post.time}") - print(f"Title: {post.title}") - print(f"Description: {post.description}") - print() - i += 1 -# x_wrapper = XWrapper() -# social_posts = x_wrapper.get_top_crypto_posts(limit=3) -# x_wrapper.print() \ No newline at end of file + return social_posts -- 2.49.1 From 8f9617c5a885bc30898989ebb8f56a38287159e5 Mon Sep 17 00:00:00 2001 From: Berack96 Date: Mon, 20 Oct 2025 16:01:22 +0200 Subject: [PATCH 07/11] MAX_COMMENTS in social.py e lista X_USERS in x.py --- src/app/api/core/social.py | 3 +++ src/app/api/social/reddit.py | 3 +-- src/app/api/social/x.py | 20 ++++++++++---------- 3 files changed, 14 insertions(+), 12 deletions(-) diff --git a/src/app/api/core/social.py b/src/app/api/core/social.py index 721ac0c..deca505 100644 --- a/src/app/api/core/social.py +++ b/src/app/api/core/social.py @@ -1,6 +1,9 @@ from pydantic import BaseModel + +MAX_COMMENTS = 5 + class SocialPost(BaseModel): """ Represents a social media post with time, title, description, and comments. diff --git a/src/app/api/social/reddit.py b/src/app/api/social/reddit.py index bda7687..c247cac 100644 --- a/src/app/api/social/reddit.py +++ b/src/app/api/social/reddit.py @@ -1,10 +1,9 @@ import os from praw import Reddit # type: ignore from praw.models import Submission # type: ignore -from app.api.core.social import SocialWrapper, SocialPost, SocialComment +from app.api.core.social import * -MAX_COMMENTS = 5 # metterne altri se necessario. # fonti: https://lkiconsulting.io/marketing/best-crypto-subreddits/ SUBREDDITS = [ diff --git a/src/app/api/social/x.py b/src/app/api/social/x.py index 5222092..a1b1bd4 100644 --- a/src/app/api/social/x.py +++ b/src/app/api/social/x.py @@ -5,6 +5,15 @@ from shutil import which from app.api.core.social import SocialWrapper, SocialPost +# This is the list of users that can be interesting +# To get the ID of a new user is necessary to search it on X, copy the url and insert it in a service like "https://get-id-x.foundtt.com/en/" +X_USERS = [ + 'watcherguru', + 'Cointelegraph', + 'BTC_Archive', + 'elonmusk' +] + class XWrapper(SocialWrapper): def __init__(self): ''' @@ -17,20 +26,11 @@ class XWrapper(SocialWrapper): assert self.api_key, "X_API_KEY environment variable not set" assert which('rettiwt') is not None, "Command `rettiwt` not installed" - # This is the list of users that can be interesting - # To get the ID of a new user is necessary to search it on X, copy the url and insert it in a service like "https://get-id-x.foundtt.com/en/" - self.users = [ - 'watcherguru', - 'Cointelegraph', - 'BTC_Archive', - 'elonmusk' - ] - def get_top_crypto_posts(self, limit:int = 5) -> list[SocialPost]: social_posts: list[SocialPost] = [] - for user in self.users: + for user in X_USERS: process = subprocess.run(f"rettiwt -k {self.api_key} tweet search -f {str(user)}", capture_output=True) results = process.stdout.decode() json_result = json.loads(results) -- 2.49.1 From 69e2ce651ca36d2a417e75bc4fd653220716cfe0 Mon Sep 17 00:00:00 2001 From: Berack96 Date: Mon, 20 Oct 2025 16:22:31 +0200 Subject: [PATCH 08/11] ChanWrapper to improve typing --- src/app/api/social/chan.py | 173 +++++++++++++++---------------------- 1 file changed, 71 insertions(+), 102 deletions(-) diff --git a/src/app/api/social/chan.py b/src/app/api/social/chan.py index 901383f..bea039d 100644 --- a/src/app/api/social/chan.py +++ b/src/app/api/social/chan.py @@ -1,123 +1,92 @@ ''' Usiamo le API di 4chan per ottenere un catalogo di threads dalla board /biz/ ''' -import requests import re import html +import requests from bs4 import BeautifulSoup +from app.api.core.social import * + -from .base import SocialWrapper, SocialPost, SocialComment class ChanWrapper(SocialWrapper): def __init__(self): super().__init__() + def __time_str(self, timestamp: str) -> str: + """Converte una stringa da MM/GG/AA di timestamp nel formato GG/MM/AA""" + if len(timestamp) < 8: return "" + + month = timestamp[:2] + day = timestamp[3:5] + year = timestamp[6:8] + return f"{day}/{month}/{year}" + + def __unformat_html_str(self, html_element: str) -> str: + """Pulisce il commento rimuovendo HTML e formattazioni inutili""" + if not html_element: return "" + + html_entities = html.unescape(html_element) + soup = BeautifulSoup(html_entities, 'html.parser') + html_element = soup.get_text(separator=" ") + html_element = re.sub(r"[\\/]+", "/", html_element) + html_element = re.sub(r"\s+", " ", html_element).strip() + return html_element + def get_top_crypto_posts(self, limit: int = 5) -> list[SocialPost]: - # Url dell'API della board /biz/ - json_url = 'https://a.4cdn.org/biz/catalog.json' - json = requests.get(json_url) + url = 'https://a.4cdn.org/biz/catalog.json' + response = requests.get(url) + assert response.status_code == 200, f"Error in 4chan API request [{response.status_code}] {response.text}" - if json.status_code == 200: - page_list: list[dict] = json.json() # Questa lista contiene un dizionario per ogni pagina della board di questo tipo {"page": page_number, "threads": [{thread_data}]} - else: - print("Error:", json.status_code) - - # Lista dei post social_posts: list[SocialPost] = [] - for page in page_list: - thread_list: list[dict] = page['threads'] - ''' - Per ogni thread ci interessano i seguenti campi: - - "sticky": ci indica se il thread è stato fissato o meno, se non è presente vuol dire che non è stato fissato, i thread sticky possono essere ignorati - - "now": la data di creazione del thread tipo "MM/GG/AA(day)hh:mm:ss", ci interessa solo MM/GG/AA - - "name": il nome dell'utente - - "sub": il nome del thread, può contenere anche elementi di formattazione html che saranno da ignorare, potrebbe non essere presente - - "com": il commento del thread, può contenere anche elementi di formattazione html che saranno da ignorare - - "last_replies": una lista di dizionari conteneti le risposte al thread principale, sono strutturate similarmente al thread, di queste ci interessano i seguenti campi: - - "now": la data di creazione della risposta tipo "MM/GG/AA(day)hh:mm:ss", ci interessa solo MM/GG/AA - - "name": il nome dell'utente - - "com": il commento della risposta, possono contenere anche elementi di formattazione html che saranno da ignorare - ''' - for thread in thread_list: - # Ignoriamo i dizionari dei thread nei quali è presente la key "sticky" + # Questa lista contiene un dizionario per ogni pagina della board di questo tipo {"page": page_number, "threads": [{thread_data}]} + for page in response.json(): + for thread in page['threads']: + + # ci indica se il thread è stato fissato o meno, se non è presente vuol dire che non è stato fissato, i thread sticky possono essere ignorati if 'sticky' in thread: continue - else: - time: str = thread['now'] - month: str = time[:2] - day: str = time[4:6] - year: str = time[7:9] - time: str = day + '/' + month + '/' + year - - name: str = thread['name'] - try: - title: str = thread['sub'] - html_entities = html.unescape(title) - soup = BeautifulSoup(html_entities, 'html.parser') - title = soup.get_text(separator=" ") - title = re.sub(r"[\\/]+", "/", title) - title = re.sub(r"\s+", " ", title).strip() - title = name + " posted: " + title - except: - title: str = name + " posted" - try: - thread_description: str = thread['com'] - html_entities = html.unescape(thread_description) - soup = BeautifulSoup(html_entities, 'html.parser') - thread_description = soup.get_text(separator=" ") - thread_description = re.sub(r"[\\/]+", "/", thread_description) - thread_description = re.sub(r"\s+", " ", thread_description).strip() - except: - thread_description = None - try: - response_list: list[dict] = thread['last_replies'] - except: - response_list: list[dict] = [] - comments_list: list[SocialComment] = [] + # la data di creazione del thread tipo "MM/GG/AA(day)hh:mm:ss", ci interessa solo MM/GG/AA + time = self.__time_str(thread.get('now', '')) - # Otteniamo i primi 5 commenti - i = 0 - for response in response_list: - time: str = response['now'] - month: str = time[:2] - day: str = time[3:5] - year: str = time[6:8] - time: str = day + '/' + month + '/' + year + # il nome dell'utente + name: str = thread.get('name', 'Anonymous') - try: - comment_description: str = response['com'] - html_entities = html.unescape(comment_description) - soup = BeautifulSoup(html_entities, 'html.parser') - comment_description = soup.get_text(separator=" ") - comment_description = re.sub(r"[\\/]+", "/", comment_description) - comment_description = re.sub(r"\s+", " ", comment_description).strip() - except: - comment_description = None - if comment_description is None: - continue - else: - social_comment: SocialComment = SocialComment( - time=time, - description=comment_description - ) - comments_list.append(social_comment) - i += 1 - if i >= 5: - break - if thread_description is None: + # il nome del thread, può contenere anche elementi di formattazione html che saranno da ignorare, potrebbe non essere presente + title = self.__unformat_html_str(thread.get('sub', '')) + title = f"{name} posted: {title}" + + # il commento del thread, può contenere anche elementi di formattazione html che saranno da ignorare + thread_description = self.__unformat_html_str(thread.get('com', '')) + if not thread_description: + continue + + # una lista di dizionari conteneti le risposte al thread principale, sono strutturate similarmente al thread, di queste ci interessano i seguenti campi: + # - "now": la data di creazione della risposta tipo "MM/GG/AA(day)hh:mm:ss", ci interessa solo MM/GG/AA + # - "name": il nome dell'utente + # - "com": il commento della risposta, possono contenere anche elementi di formattazione html che saranno da ignorare + response_list = thread.get('last_replies', []) + comments_list: list[SocialComment] = [] + + for i, response in enumerate(response_list): + if i >= MAX_COMMENTS: break + + time = self.__time_str(response['now']) + + comment = self.__unformat_html_str(response.get('com', '')) + if not comment: continue - else: - social_post: SocialPost = SocialPost( - time=time, - title=title, - description=thread_description, - comments=comments_list - ) - social_posts.append(social_post) - - return social_posts[:limit] -# Stampiamo i post -# chan_wrapper = ChanWrapper() -# social_posts = chan_wrapper.get_top_crypto_posts() -# print(len(social_posts)) + + social_comment = SocialComment(time=time, description=comment) + comments_list.append(social_comment) + + social_post: SocialPost = SocialPost( + time=time, + title=title, + description=thread_description, + comments=comments_list + ) + social_posts.append(social_post) + + return social_posts[:limit] -- 2.49.1 From 4f9385ae0fa125c8bf369a82ddf60e9ef0eb8a53 Mon Sep 17 00:00:00 2001 From: Berack96 Date: Mon, 20 Oct 2025 16:42:42 +0200 Subject: [PATCH 09/11] unified_timestamp --- src/app/api/core/__init__.py | 22 +++++++++++++++++++ src/app/api/core/markets.py | 22 +++---------------- src/app/api/core/social.py | 9 ++++++++ src/app/api/social/chan.py | 18 +++++++-------- src/app/api/social/reddit.py | 4 ++-- tests/api/test_social_4chan.py | 22 +++++++++++++++++++ .../{test_reddit.py => test_social_reddit.py} | 3 +++ tests/api/test_social_x_api.py | 22 +++++++++++++++++++ 8 files changed, 91 insertions(+), 31 deletions(-) create mode 100644 tests/api/test_social_4chan.py rename tests/api/{test_reddit.py => test_social_reddit.py} (92%) create mode 100644 tests/api/test_social_x_api.py diff --git a/src/app/api/core/__init__.py b/src/app/api/core/__init__.py index e69de29..3cddea7 100644 --- a/src/app/api/core/__init__.py +++ b/src/app/api/core/__init__.py @@ -0,0 +1,22 @@ +from datetime import datetime + + +def unified_timestamp(timestamp_ms: int | None = None, timestamp_s: int | None = None) -> str: + """ + Transform the timestamp from milliseconds or seconds to a unified string format. + The resulting string is a formatted string 'YYYY-MM-DD HH:MM'. + Args: + timestamp_ms: Timestamp in milliseconds. + timestamp_s: Timestamp in seconds. + Raises: + ValueError: If neither timestamp_ms nor timestamp_s is provided. + """ + if timestamp_ms is not None: + timestamp = timestamp_ms // 1000 + elif timestamp_s is not None: + timestamp = timestamp_s + else: + raise ValueError("Either timestamp_ms or timestamp_s must be provided") + assert timestamp > 0, "Invalid timestamp data received" + + return datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M') \ No newline at end of file diff --git a/src/app/api/core/markets.py b/src/app/api/core/markets.py index 8b6c754..6b53f61 100644 --- a/src/app/api/core/markets.py +++ b/src/app/api/core/markets.py @@ -1,6 +1,6 @@ import statistics -from datetime import datetime from pydantic import BaseModel +from app.api.core import unified_timestamp class ProductInfo(BaseModel): @@ -64,24 +64,8 @@ class Price(BaseModel): """Timestamp in format YYYY-MM-DD HH:MM""" def set_timestamp(self, timestamp_ms: int | None = None, timestamp_s: int | None = None) -> None: - """ - Sets the timestamp from milliseconds or seconds. - The timestamp is saved as a formatted string 'YYYY-MM-DD HH:MM'. - Args: - timestamp_ms: Timestamp in milliseconds. - timestamp_s: Timestamp in seconds. - Raises: - ValueError: If neither timestamp_ms nor timestamp_s is provided. - """ - if timestamp_ms is not None: - timestamp = timestamp_ms // 1000 - elif timestamp_s is not None: - timestamp = timestamp_s - else: - raise ValueError("Either timestamp_ms or timestamp_s must be provided") - assert timestamp > 0, "Invalid timestamp data received" - - self.timestamp = datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M') + """ Use the unified_timestamp function to set the timestamp.""" + self.timestamp = unified_timestamp(timestamp_ms, timestamp_s) @staticmethod def aggregate(prices: dict[str, list['Price']]) -> list['Price']: diff --git a/src/app/api/core/social.py b/src/app/api/core/social.py index deca505..fe4d5bf 100644 --- a/src/app/api/core/social.py +++ b/src/app/api/core/social.py @@ -1,4 +1,5 @@ from pydantic import BaseModel +from app.api.core import unified_timestamp @@ -13,6 +14,10 @@ class SocialPost(BaseModel): description: str = "" comments: list["SocialComment"] = [] + def set_timestamp(self, timestamp_ms: int | None = None, timestamp_s: int | None = None) -> None: + """ Use the unified_timestamp function to set the time.""" + self.time = unified_timestamp(timestamp_ms, timestamp_s) + class SocialComment(BaseModel): """ Represents a comment on a social media post. @@ -20,6 +25,10 @@ class SocialComment(BaseModel): time: str = "" description: str = "" + def set_timestamp(self, timestamp_ms: int | None = None, timestamp_s: int | None = None) -> None: + """ Use the unified_timestamp function to set the time.""" + self.time = unified_timestamp(timestamp_ms, timestamp_s) + class SocialWrapper: """ diff --git a/src/app/api/social/chan.py b/src/app/api/social/chan.py index bea039d..df89f31 100644 --- a/src/app/api/social/chan.py +++ b/src/app/api/social/chan.py @@ -5,6 +5,7 @@ import re import html import requests from bs4 import BeautifulSoup +from datetime import datetime from app.api.core.social import * @@ -12,14 +13,10 @@ class ChanWrapper(SocialWrapper): def __init__(self): super().__init__() - def __time_str(self, timestamp: str) -> str: - """Converte una stringa da MM/GG/AA di timestamp nel formato GG/MM/AA""" - if len(timestamp) < 8: return "" - - month = timestamp[:2] - day = timestamp[3:5] - year = timestamp[6:8] - return f"{day}/{month}/{year}" + def __time_str(self, timestamp: str) -> int: + """Converte una stringa da MM/GG/AA(DAY)HH:MM:SS di 4chan a millisecondi""" + time = datetime.strptime(timestamp, "%m/%d/%y(%a)%H:%M:%S") + return int(time.timestamp() * 1000) def __unformat_html_str(self, html_element: str) -> str: """Pulisce il commento rimuovendo HTML e formattazioni inutili""" @@ -78,15 +75,16 @@ class ChanWrapper(SocialWrapper): if not comment: continue - social_comment = SocialComment(time=time, description=comment) + social_comment = SocialComment(description=comment) + social_comment.set_timestamp(timestamp_ms=time) comments_list.append(social_comment) social_post: SocialPost = SocialPost( - time=time, title=title, description=thread_description, comments=comments_list ) + social_post.set_timestamp(timestamp_ms=time) social_posts.append(social_post) return social_posts[:limit] diff --git a/src/app/api/social/reddit.py b/src/app/api/social/reddit.py index c247cac..201166c 100644 --- a/src/app/api/social/reddit.py +++ b/src/app/api/social/reddit.py @@ -23,13 +23,13 @@ SUBREDDITS = [ def extract_post(post: Submission) -> SocialPost: social = SocialPost() - social.time = str(post.created) + social.set_timestamp(timestamp_ms=post.created) social.title = post.title social.description = post.selftext for top_comment in post.comments: comment = SocialComment() - comment.time = str(top_comment.created) + comment.set_timestamp(timestamp_ms=top_comment.created) comment.description = top_comment.body social.comments.append(comment) diff --git a/tests/api/test_social_4chan.py b/tests/api/test_social_4chan.py new file mode 100644 index 0000000..b39a36d --- /dev/null +++ b/tests/api/test_social_4chan.py @@ -0,0 +1,22 @@ +import re +import pytest +from app.api.social.chan import ChanWrapper + +@pytest.mark.social +@pytest.mark.api +class TestChanWrapper: + def test_initialization(self): + wrapper = ChanWrapper() + assert wrapper is not None + + def test_get_top_crypto_posts(self): + wrapper = ChanWrapper() + posts = wrapper.get_top_crypto_posts(limit=2) + assert isinstance(posts, list) + assert len(posts) == 2 + for post in posts: + assert post.title != "" + assert post.time != "" + assert re.match(r'\d{4}-\d{2}-\d{2}', post.time) + assert isinstance(post.comments, list) + diff --git a/tests/api/test_reddit.py b/tests/api/test_social_reddit.py similarity index 92% rename from tests/api/test_reddit.py rename to tests/api/test_social_reddit.py index d4533a5..a83fe8a 100644 --- a/tests/api/test_reddit.py +++ b/tests/api/test_social_reddit.py @@ -1,4 +1,5 @@ import os +import re import pytest from app.api.social.reddit import MAX_COMMENTS, RedditWrapper @@ -18,6 +19,8 @@ class TestRedditWrapper: assert len(posts) == 2 for post in posts: assert post.title != "" + assert re.match(r'\d{4}-\d{2}-\d{2}', post.time) + assert isinstance(post.comments, list) assert len(post.comments) <= MAX_COMMENTS for comment in post.comments: diff --git a/tests/api/test_social_x_api.py b/tests/api/test_social_x_api.py new file mode 100644 index 0000000..15f39c3 --- /dev/null +++ b/tests/api/test_social_x_api.py @@ -0,0 +1,22 @@ +import os +import re +import pytest +from app.api.social.x import XWrapper + +@pytest.mark.social +@pytest.mark.api +@pytest.mark.skipif(not os.getenv("X_API_KEY"), reason="X_API_KEY not set in environment variables") +class TestXWrapper: + def test_initialization(self): + wrapper = XWrapper() + assert wrapper is not None + + def test_get_top_crypto_posts(self): + wrapper = XWrapper() + posts = wrapper.get_top_crypto_posts(limit=2) + assert isinstance(posts, list) + assert len(posts) == 2 + for post in posts: + assert post.title != "" + assert re.match(r'\d{4}-\d{2}-\d{2}', post.time) + assert isinstance(post.comments, list) -- 2.49.1 From c6ad4f8bb0628ad032a0504a488b9d3679d200d2 Mon Sep 17 00:00:00 2001 From: Berack96 Date: Mon, 20 Oct 2025 16:47:39 +0200 Subject: [PATCH 10/11] fix dependency --- pyproject.toml | 3 --- uv.lock | 63 -------------------------------------------------- 2 files changed, 66 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4d0435d..127d77a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,9 +23,6 @@ dependencies = [ # Modelli supportati e installati (aggiungere qui sotto quelli che si vogliono usare) "google-genai", "ollama", - - "openai", - "deepseek", # API di exchange di criptovalute "coinbase-advanced-py", diff --git a/uv.lock b/uv.lock index 7460893..000517c 100644 --- a/uv.lock +++ b/uv.lock @@ -389,26 +389,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/40/cd/ef820662e0d87f46b829bba7e2324c7978e0153692bbd2f08f7746049708/ddgs-9.6.0-py3-none-any.whl", hash = "sha256:24120f1b672fd3a28309db029e7038eb3054381730aea7a08d51bb909dd55520", size = 41558, upload-time = "2025-09-17T13:27:08.99Z" }, ] -[[package]] -name = "deepseek" -version = "1.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "requests" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/7b/bede06edf1a25a6ab06553b15f6abf8e912848dfa5f68514720d3e388550/deepseek-1.0.0-py3-none-any.whl", hash = "sha256:ee4175bfcb7ac1154369dbd86a4d8bc1809f6fa20e3e7baa362544567197cb3f", size = 4542, upload-time = "2025-01-03T08:06:23.887Z" }, -] - -[[package]] -name = "distro" -version = "1.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, -] - [[package]] name = "dnspython" version = "2.8.0" @@ -808,26 +788,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, ] -[[package]] -name = "jiter" -version = "0.11.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9d/c0/a3bb4cc13aced219dd18191ea66e874266bd8aa7b96744e495e1c733aa2d/jiter-0.11.0.tar.gz", hash = "sha256:1d9637eaf8c1d6a63d6562f2a6e5ab3af946c66037eb1b894e8fad75422266e4", size = 167094, upload-time = "2025-09-15T09:20:38.212Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/b5/3009b112b8f673e568ef79af9863d8309a15f0a8cdcc06ed6092051f377e/jiter-0.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2fb7b377688cc3850bbe5c192a6bd493562a0bc50cbc8b047316428fbae00ada", size = 305510, upload-time = "2025-09-15T09:19:25.893Z" }, - { url = "https://files.pythonhosted.org/packages/fe/82/15514244e03b9e71e086bbe2a6de3e4616b48f07d5f834200c873956fb8c/jiter-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a1b7cbe3f25bd0d8abb468ba4302a5d45617ee61b2a7a638f63fee1dc086be99", size = 316521, upload-time = "2025-09-15T09:19:27.525Z" }, - { url = "https://files.pythonhosted.org/packages/92/94/7a2e905f40ad2d6d660e00b68d818f9e29fb87ffe82774f06191e93cbe4a/jiter-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0a7f0ec81d5b7588c5cade1eb1925b91436ae6726dc2df2348524aeabad5de6", size = 338214, upload-time = "2025-09-15T09:19:28.727Z" }, - { url = "https://files.pythonhosted.org/packages/a8/9c/5791ed5bdc76f12110158d3316a7a3ec0b1413d018b41c5ed399549d3ad5/jiter-0.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07630bb46ea2a6b9c6ed986c6e17e35b26148cce2c535454b26ee3f0e8dcaba1", size = 361280, upload-time = "2025-09-15T09:19:30.013Z" }, - { url = "https://files.pythonhosted.org/packages/d4/7f/b7d82d77ff0d2cb06424141000176b53a9e6b16a1125525bb51ea4990c2e/jiter-0.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7764f27d28cd4a9cbc61704dfcd80c903ce3aad106a37902d3270cd6673d17f4", size = 487895, upload-time = "2025-09-15T09:19:31.424Z" }, - { url = "https://files.pythonhosted.org/packages/42/44/10a1475d46f1fc1fd5cc2e82c58e7bca0ce5852208e0fa5df2f949353321/jiter-0.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4a6c4a737d486f77f842aeb22807edecb4a9417e6700c7b981e16d34ba7c72", size = 378421, upload-time = "2025-09-15T09:19:32.746Z" }, - { url = "https://files.pythonhosted.org/packages/9a/5f/0dc34563d8164d31d07bc09d141d3da08157a68dcd1f9b886fa4e917805b/jiter-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf408d2a0abd919b60de8c2e7bc5eeab72d4dafd18784152acc7c9adc3291591", size = 347932, upload-time = "2025-09-15T09:19:34.612Z" }, - { url = "https://files.pythonhosted.org/packages/f7/de/b68f32a4fcb7b4a682b37c73a0e5dae32180140cd1caf11aef6ad40ddbf2/jiter-0.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cdef53eda7d18e799625023e1e250dbc18fbc275153039b873ec74d7e8883e09", size = 386959, upload-time = "2025-09-15T09:19:35.994Z" }, - { url = "https://files.pythonhosted.org/packages/76/0a/c08c92e713b6e28972a846a81ce374883dac2f78ec6f39a0dad9f2339c3a/jiter-0.11.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:53933a38ef7b551dd9c7f1064f9d7bb235bb3168d0fa5f14f0798d1b7ea0d9c5", size = 517187, upload-time = "2025-09-15T09:19:37.426Z" }, - { url = "https://files.pythonhosted.org/packages/89/b5/4a283bec43b15aad54fcae18d951f06a2ec3f78db5708d3b59a48e9c3fbd/jiter-0.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11840d2324c9ab5162fc1abba23bc922124fedcff0d7b7f85fffa291e2f69206", size = 509461, upload-time = "2025-09-15T09:19:38.761Z" }, - { url = "https://files.pythonhosted.org/packages/34/a5/f8bad793010534ea73c985caaeef8cc22dfb1fedb15220ecdf15c623c07a/jiter-0.11.0-cp312-cp312-win32.whl", hash = "sha256:4f01a744d24a5f2bb4a11657a1b27b61dc038ae2e674621a74020406e08f749b", size = 206664, upload-time = "2025-09-15T09:19:40.096Z" }, - { url = "https://files.pythonhosted.org/packages/ed/42/5823ec2b1469395a160b4bf5f14326b4a098f3b6898fbd327366789fa5d3/jiter-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:29fff31190ab3a26de026da2f187814f4b9c6695361e20a9ac2123e4d4378a4c", size = 203520, upload-time = "2025-09-15T09:19:41.798Z" }, -] - [[package]] name = "lxml" version = "6.0.2" @@ -984,25 +944,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b5/c1/edc9f41b425ca40b26b7c104c5f6841a4537bb2552bfa6ca66e81405bb95/ollama-0.6.0-py3-none-any.whl", hash = "sha256:534511b3ccea2dff419ae06c3b58d7f217c55be7897c8ce5868dfb6b219cf7a0", size = 14130, upload-time = "2025-09-24T22:46:01.19Z" }, ] -[[package]] -name = "openai" -version = "2.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "distro" }, - { name = "httpx" }, - { name = "jiter" }, - { name = "pydantic" }, - { name = "sniffio" }, - { name = "tqdm" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/de/90/8f26554d24d63ed4f94d33c24271559863223a67e624f4d2e65ba8e48dca/openai-2.3.0.tar.gz", hash = "sha256:8d213ee5aaf91737faea2d7fc1cd608657a5367a18966372a3756ceaabfbd812", size = 589616, upload-time = "2025-10-10T01:12:50.851Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/5b/4be258ff072ed8ee15f6bfd8d5a1a4618aa4704b127c0c5959212ad177d6/openai-2.3.0-py3-none-any.whl", hash = "sha256:a7aa83be6f7b0ab2e4d4d7bcaf36e3d790874c0167380c5d0afd0ed99a86bd7b", size = 999768, upload-time = "2025-10-10T01:12:48.647Z" }, -] - [[package]] name = "orjson" version = "3.11.3" @@ -1717,7 +1658,6 @@ dependencies = [ { name = "coinbase-advanced-py" }, { name = "colorlog" }, { name = "ddgs" }, - { name = "deepseek" }, { name = "dotenv" }, { name = "gnews" }, { name = "google-genai" }, @@ -1725,7 +1665,6 @@ dependencies = [ { name = "markdown-pdf" }, { name = "newsapi-python" }, { name = "ollama" }, - { name = "openai" }, { name = "praw" }, { name = "pytest" }, { name = "python-binance" }, @@ -1739,7 +1678,6 @@ requires-dist = [ { name = "coinbase-advanced-py" }, { name = "colorlog" }, { name = "ddgs" }, - { name = "deepseek" }, { name = "dotenv" }, { name = "gnews" }, { name = "google-genai" }, @@ -1747,7 +1685,6 @@ requires-dist = [ { name = "markdown-pdf" }, { name = "newsapi-python" }, { name = "ollama" }, - { name = "openai" }, { name = "praw" }, { name = "pytest" }, { name = "python-binance" }, -- 2.49.1 From 41503dc56be069982e95509818e8b2a14aa5fef5 Mon Sep 17 00:00:00 2001 From: Berack96 Date: Mon, 20 Oct 2025 16:54:13 +0200 Subject: [PATCH 11/11] env & fixes --- .env.example | 1 + src/app/api/social/chan.py | 7 +++---- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.env.example b/.env.example index db48988..694300e 100644 --- a/.env.example +++ b/.env.example @@ -5,6 +5,7 @@ # https://makersuite.google.com/app/apikey GOOGLE_API_KEY= + ############################################################################### # Configurazioni per gli agenti di mercato ############################################################################### diff --git a/src/app/api/social/chan.py b/src/app/api/social/chan.py index df89f31..a39e517 100644 --- a/src/app/api/social/chan.py +++ b/src/app/api/social/chan.py @@ -59,18 +59,17 @@ class ChanWrapper(SocialWrapper): if not thread_description: continue - # una lista di dizionari conteneti le risposte al thread principale, sono strutturate similarmente al thread, di queste ci interessano i seguenti campi: - # - "now": la data di creazione della risposta tipo "MM/GG/AA(day)hh:mm:ss", ci interessa solo MM/GG/AA - # - "name": il nome dell'utente - # - "com": il commento della risposta, possono contenere anche elementi di formattazione html che saranno da ignorare + # una lista di dizionari conteneti le risposte al thread principale, sono strutturate similarmente al thread response_list = thread.get('last_replies', []) comments_list: list[SocialComment] = [] for i, response in enumerate(response_list): if i >= MAX_COMMENTS: break + # la data di creazione della risposta tipo "MM/GG/AA(day)hh:mm:ss", ci interessa solo MM/GG/AA time = self.__time_str(response['now']) + # il commento della risposta, può contenere anche elementi di formattazione html che saranno da ignorare comment = self.__unformat_html_str(response.get('com', '')) if not comment: continue -- 2.49.1