import concurrent.futures import time import logging import traceback import miniflux from markdownify import markdownify as md import markdown from openai import OpenAI from yaml import safe_load config = safe_load(open('config.yml', encoding='utf8')) miniflux_client = miniflux.Client(config['miniflux']['base_url'], api_key=config['miniflux']['api_key']) llm_client = OpenAI(base_url=config['llm']['base_url'], api_key=config['llm']['api_key']) logger = logging.getLogger(__name__) logger.setLevel(config.get('log_level', 'INFO')) formatter = logging.Formatter('%(asctime)s - %(filename)s - %(lineno)d - %(levelname)s - %(message)s') console = logging.StreamHandler() console.setFormatter(formatter) logger.addHandler(console) def process_entry(entry): llm_result = '' start_with_list = [name[1]['title'] for name in config['agents'].items()] style_block = [name[1]['style_block'] for name in config['agents'].items()] [start_with_list.append('\n' + agent[1]['title'] + ':' + response_content.replace('\n', '').replace('\r', '') + '\n

') else: llm_result = llm_result + f"{agent[1]['title']}:{markdown.markdown(response_content)}

" if len(llm_result) > 0: miniflux_client.update_entry(entry['id'], content= llm_result + entry['content']) while True: entries = miniflux_client.get_entries(status=['unread'], limit=10000) start_time = time.time() logger.info('Fetched unread entries: ' + str(len(entries['entries']))) if len(entries['entries']) > 0 else logger.info('No new entries') with concurrent.futures.ThreadPoolExecutor(max_workers=config.get('llm', {}).get('max_workers', 4)) as executor: futures = [executor.submit(process_entry, i) for i in entries['entries']] for future in concurrent.futures.as_completed(futures): try: data = future.result() except Exception as e: logger.error(traceback.format_exc()) logger.error('generated an exception: %s' % e) if len(entries['entries']) > 0 and time.time() - start_time >= 3: logger.info('Done') time.sleep(60)