Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import aiohttp
- import asyncio
- import random
- # Load words from the text files
- def read_words_from_file(filename):
- with open(filename, 'r') as file:
- words = file.read().splitlines()
- return words
- # Load words from the three text files
- chunk1_words = read_words_from_file('chunk1.txt')
- chunk2_words = read_words_from_file('chunk2.txt')
- chunk3_words = read_words_from_file('chunk3.txt')
- # Combine all words into one pool
- word_pool = chunk1_words + chunk2_words + chunk3_words
- # Cache for definitions to avoid repeated API calls
- definition_cache = {}
- # Function to fetch definitions from the API
- async def fetch_definition(session, word):
- if word in definition_cache:
- return definition_cache[word]
- url = f"https://api.dictionaryapi.dev/api/v2/entries/en/{word}"
- try:
- async with session.get(url) as response:
- if response.status == 429:
- print(f"Rate limit exceeded for '{word}'. Waiting...")
- await asyncio.sleep(5) # Wait for 5 seconds before retrying
- return await fetch_definition(session, word) # Retry fetching the definition
- response.raise_for_status()
- definitions = await response.json()
- definitions_list = [definition['meanings'][0]['definitions'][0]['definition'] for definition in definitions]
- definition_cache[word] = definitions_list
- return definitions_list
- except Exception as e:
- print(f"Error fetching definition for '{word}': {e}")
- return []
- # Function to compute semantic similarity using Jaccard index
- def are_definitions_similar(def1, def2):
- words1 = set(def1.lower().split())
- words2 = set(def2.lower().split())
- intersection = len(words1.intersection(words2))
- union = len(words1.union(words2))
- return (intersection / union) > 0.5 # Adjustable threshold
- # Function to find a suitable word based on definitions
- async def find_suitable_word(topics):
- async with aiohttp.ClientSession() as session:
- all_definitions = []
- for topic in topics:
- definitions = await fetch_definition(session, topic)
- if definitions:
- all_definitions.extend(definitions)
- if not all_definitions:
- return None, []
- for word in word_pool:
- definitions = await fetch_definition(session, word)
- if definitions:
- for def1 in all_definitions:
- for def2 in definitions:
- if are_definitions_similar(def1, def2):
- return word, definitions
- return None, []
- # Function to construct a grammatically correct sentence
- def construct_sentence(word, definitions):
- return f"The word '{word}' can mean: {', '.join(definitions)}."
- # Main AI conversation function
- async def ai_conversation(topic):
- topics = topic.split() # Split the topic into individual words
- word, definitions = await find_suitable_word(topics)
- if word:
- return construct_sentence(word, definitions)
- else:
- return "AI: No suitable word found for the conversation."
- # Main loop
- if __name__ == "__main__":
- topic = input("Enter a topic for the AI conversation: ")
- async def main():
- while True:
- response = await ai_conversation(topic)
- print(response)
- cont = input("Continue the conversation? (yes/no): ")
- if cont.lower() != 'yes':
- break
- asyncio.run(main())
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement