Spaces:
Running
Running
Major update. Support for 15 LLMs, World Flora Online taxonomy validation, geolocation, 2 OCR methods, significant UI changes, stability improvements, consistent JSON parsing
e91ac58
| import os, io, openai, vertexai | |
| from mistralai.client import MistralClient | |
| from mistralai.models.chat_completion import ChatMessage | |
| from langchain.schema import HumanMessage | |
| from langchain_openai import AzureChatOpenAI | |
| from vertexai.language_models import TextGenerationModel | |
| from vertexai.preview.generative_models import GenerativeModel | |
| from google.cloud import vision | |
| from datetime import datetime | |
| class APIvalidation: | |
| def __init__(self, cfg_private, dir_home) -> None: | |
| self.cfg_private = cfg_private | |
| self.dir_home = dir_home | |
| self.formatted_date = self.get_formatted_date() | |
| def get_formatted_date(self): | |
| # Get the current date | |
| current_date = datetime.now() | |
| # Format the date as "Month day, year" (e.g., "January 23, 2024") | |
| formatted_date = current_date.strftime("%B %d, %Y") | |
| return formatted_date | |
| def has_API_key(self, val): | |
| if val: | |
| return True | |
| else: | |
| return False | |
| def check_openai_api_key(self): | |
| openai.api_key = self.cfg_private['openai']['OPENAI_API_KEY'] | |
| try: | |
| openai.models.list() | |
| return True | |
| except: | |
| return False | |
| def check_google_ocr_api_key(self): | |
| # if os.path.exists(self.cfg_private['google_cloud']['path_json_file']): | |
| # os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = self.cfg_private['google_cloud']['path_json_file'] | |
| # elif os.path.exists(self.cfg_private['google_cloud']['path_json_file_service_account2']): | |
| # os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = self.cfg_private['google_cloud']['path_json_file_service_account2'] | |
| # else: | |
| # return False | |
| try: | |
| logo_path = os.path.join(self.dir_home, 'img','logo.png') | |
| client = vision.ImageAnnotatorClient() | |
| with io.open(logo_path, 'rb') as image_file: | |
| content = image_file.read() | |
| image = vision.Image(content=content) | |
| response = client.document_text_detection(image=image) | |
| texts = response.text_annotations | |
| normal_cleaned_text = texts[0].description if texts else None | |
| if normal_cleaned_text: | |
| return True | |
| else: | |
| return False | |
| except: | |
| return False | |
| def check_azure_openai_api_key(self): | |
| try: | |
| # Initialize the Azure OpenAI client | |
| model = AzureChatOpenAI( | |
| deployment_name = 'gpt-35-turbo',#'gpt-35-turbo', | |
| openai_api_version = self.cfg_private['openai_azure']['api_version'], | |
| openai_api_key = self.cfg_private['openai_azure']['openai_api_key'], | |
| azure_endpoint = self.cfg_private['openai_azure']['openai_api_base'], | |
| openai_organization = self.cfg_private['openai_azure']['openai_organization'], | |
| ) | |
| msg = HumanMessage(content="hello") | |
| # self.llm_object.temperature = self.config.get('temperature') | |
| response = model([msg]) | |
| # Check the response content (you might need to adjust this depending on how your AzureChatOpenAI class handles responses) | |
| if response: | |
| return True | |
| else: | |
| return False | |
| except Exception as e: # Use a more specific exception if possible | |
| return False | |
| def check_mistral_api_key(self): | |
| try: | |
| # Initialize the Mistral Client with the API key | |
| client = MistralClient(api_key=self.cfg_private['mistral']['mistral_key']) | |
| # Create a simple message | |
| messages = [ChatMessage(role="user", content="hello")] | |
| # Send the message and get the response | |
| chat_response = client.chat( | |
| model="mistral-tiny", | |
| messages=messages, | |
| ) | |
| # Check if the response is valid (adjust this according to the actual response structure) | |
| if chat_response and chat_response.choices: | |
| return True | |
| else: | |
| return False | |
| except Exception as e: # Replace with a more specific exception if possible | |
| return False | |
| def check_google_vertex_genai_api_key(self): | |
| results = {"palm2": False, "gemini": False} | |
| try: | |
| # Assuming genai and vertexai are clients for Google services | |
| os.environ["GOOGLE_API_KEY"] = self.cfg_private['google_palm']['google_palm_api'] | |
| # genai.configure(api_key=self.cfg_private['google_palm']['google_palm_api']) | |
| vertexai.init(project= self.cfg_private['google_palm']['project_id'], location=self.cfg_private['google_palm']['location']) | |
| try: | |
| model = TextGenerationModel.from_pretrained("text-bison@001") | |
| response = model.predict("Hello") | |
| test_response_palm = response.text | |
| # llm_palm = ChatGoogleGenerativeAI(model="text-bison@001") | |
| # test_response_palm = llm_palm.invoke("Hello") | |
| if test_response_palm: | |
| results["palm2"] = True | |
| except Exception as e: | |
| pass | |
| try: | |
| model = GenerativeModel("gemini-pro") | |
| response = model.generate_content("Hello") | |
| test_response_gemini = response.text | |
| # llm_gemini = ChatGoogleGenerativeAI(model="gemini-pro") | |
| # test_response_gemini = llm_gemini.invoke("Hello") | |
| if test_response_gemini: | |
| results["gemini"] = True | |
| except Exception as e: | |
| pass | |
| return results | |
| except Exception as e: # Replace with a more specific exception if possible | |
| return results | |
| def report_api_key_status(self): | |
| missing_keys = [] | |
| present_keys = [] | |
| # Check each key and add to the respective list | |
| # OpenAI key check | |
| if self.has_API_key(self.cfg_private['openai']['OPENAI_API_KEY']): | |
| is_valid = self.check_openai_api_key() | |
| if is_valid: | |
| present_keys.append('OpenAI (Valid)') | |
| else: | |
| present_keys.append('OpenAI (Invalid)') | |
| else: | |
| missing_keys.append('OpenAI') | |
| # Azure OpenAI key check | |
| if self.has_API_key(self.cfg_private['openai_azure']['api_version']): | |
| is_valid = self.check_azure_openai_api_key() | |
| if is_valid: | |
| present_keys.append('Azure OpenAI (Valid)') | |
| else: | |
| present_keys.append('Azure OpenAI (Invalid)') | |
| else: | |
| missing_keys.append('Azure OpenAI') | |
| # Google PALM2/Gemini key check | |
| if self.has_API_key(self.cfg_private['google_palm']['google_palm_api']) and self.has_API_key(self.cfg_private['google_palm']['project_id']) and self.has_API_key(self.cfg_private['google_palm']['location']): | |
| google_results = self.check_google_vertex_genai_api_key() | |
| if google_results['palm2']: | |
| present_keys.append('Palm2 (Valid)') | |
| else: | |
| present_keys.append('Palm2 (Invalid)') | |
| if google_results['gemini']: | |
| present_keys.append('Gemini (Valid)') | |
| else: | |
| present_keys.append('Gemini (Invalid)') | |
| else: | |
| missing_keys.append('Google VertexAI/GenAI') | |
| # Google OCR key check | |
| if self.has_API_key(self.cfg_private['google_palm']['google_palm_api']) and self.has_API_key(self.cfg_private['google_palm']['project_id']) and self.has_API_key(self.cfg_private['google_palm']['location']): | |
| is_valid = self.check_google_ocr_api_key() | |
| if is_valid: | |
| present_keys.append('Google OCR (Valid)') | |
| else: | |
| present_keys.append('Google OCR (Invalid)') | |
| else: | |
| missing_keys.append('Google OCR') | |
| # Mistral key check | |
| if self.has_API_key(self.cfg_private['mistral']['mistral_key']): | |
| is_valid = self.check_mistral_api_key() | |
| if is_valid: | |
| present_keys.append('Mistral (Valid)') | |
| else: | |
| present_keys.append('Mistral (Invalid)') | |
| else: | |
| missing_keys.append('Mistral') | |
| if self.has_API_key(self.cfg_private['here']['api_key']): | |
| present_keys.append('HERE Geocode (Valid)') | |
| else: | |
| missing_keys.append('HERE Geocode (Invalid)') | |
| if self.has_API_key(self.cfg_private['open_cage_geocode']['api_key']): | |
| present_keys.append('OpenCage Geocode (Valid)') | |
| else: | |
| missing_keys.append('OpenCage Geocode (Invalid)') | |
| # Create a report string | |
| report = "API Key Status Report:\n" | |
| report += "Present Keys: " + ", ".join(present_keys) + "\n" | |
| report += "Missing Keys: " + ", ".join(missing_keys) + "\n" | |
| # print(report) | |
| return present_keys, missing_keys, self.formatted_date |