deepseek-1.x-dev/src/Plugin/AiProviders/GeminiProvider.php

src/Plugin/AiProviders/GeminiProvider.php
<?php

namespace Drupal\deepseek\Plugin\AiProviders;

use Drupal\Core\Config\Config;
use Drupal\Core\Plugin\ContainerFactoryPluginInterface;
use Drupal\Core\StringTranslation\TranslatableMarkup;
use Drupal\deepseek\AiProvidersInterface;
use Drupal\deepseek\Attribute\AiProviders;
use GeminiAPI\Client;
use GeminiAPI\Enums\MimeType;
use GeminiAPI\Enums\Role;
use GeminiAPI\GenerationConfig;
use GeminiAPI\Resources\Content;
use GeminiAPI\Resources\Parts\FilePart;
use GeminiAPI\Resources\Parts\ImagePart;
use GeminiAPI\Resources\Parts\TextPart;
use GeminiAPI\Responses\GenerateContentResponse;
use Symfony\Component\DependencyInjection\ContainerInterface;

/**
 * Provider for communicating with Google Gemini api.
 */
#[AiProviders(
  id: "gemini",
  label: new TranslatableMarkup("Gemini"),
  url: "https://generativelanguage.googleapis.com/v1",
  model: "gemini-1.5-flash",
  api_key: "",
  embedding: "embedding-001",
)]
class GeminiProvider implements AiProvidersInterface, ContainerFactoryPluginInterface {

  /**
   * Constructs a Gemini Provider object.
   *
   * @param string $plugin_id
   *   The plugin_id for the formatter.
   * @param mixed $plugin_definition
   *   The plugin implementation definition.
   * @param mixed $field_definition
   *   The definition of the field to which the formatter is associated.
   * @param \Drupal\Core\Config\Config $configFactory
   *   The config factory service.
   */
  public function __construct($plugin_id, $plugin_definition, $field_definition, protected Config $configFactory) {
    unset($plugin_id, $plugin_definition, $field_definition);
  }

  /**
   * {@inheritdoc}
   */
  public static function create(ContainerInterface $container, $configuration, $plugin_id, $plugin_definition) {
    return new static(
      $plugin_id,
      $plugin_definition,
      $configuration,
      $container->get('config.factory')->get('deepseek.settings'),
    );
  }

  /**
   * Models list of available from the provider.
   */
  public function models($url = '', $api_key = ''): array {

    $client = new Client($api_key);
    $response = $client->listModels();
    $models = [
      [
        'id' => 'gemini-1.0-pro-vision-latest',
        'name' => 'Gemini 1.0 Pro Vision',
      ],
      ['id' => 'gemini-pro-vision', 'name' => 'Gemini 1.0 Pro Vision'],
      ['id' => 'gemini-1.5-pro-001', 'name' => 'Gemini 1.5 Pro 001'],
      ['id' => 'gemini-1.5-pro-002', 'name' => 'Gemini 1.5 Pro 002'],
      ['id' => 'gemini-1.5-pro', 'name' => 'Gemini 1.5 Pro'],
      ['id' => 'gemini-1.5-flash-001', 'name' => 'Gemini 1.5 Flash 001'],
      [
        'id' => 'gemini-1.5-flash-001-tuning',
        'name' => 'Gemini 1.5 Flash 001 Tuning',
      ],
      ['id' => 'gemini-1.5-flash', 'name' => 'Gemini 1.5 Flash'],
      ['id' => 'gemini-1.5-flash-002', 'name' => 'Gemini 1.5 Flash 002'],
      ['id' => 'gemini-1.5-flash-8b', 'name' => 'Gemini 1.5 Flash-8B'],
      ['id' => 'gemini-1.5-flash-8b-001', 'name' => 'Gemini 1.5 Flash-8B 001'],
      ['id' => 'gemini-2.0-flash', 'name' => 'Gemini 2.0 Flash'],
      ['id' => 'gemini-2.0-flash-001', 'name' => 'Gemini 2.0 Flash 001'],
      [
        'id' => 'gemini-2.0-flash-lite-001',
        'name' => 'Gemini 2.0 Flash-Lite 001',
      ],
      ['id' => 'gemini-2.0-flash-lite', 'name' => 'Gemini 2.0 Flash-Lite'],
      ['id' => 'embedding-001', 'name' => 'Embedding 001'],
      ['id' => 'text-embedding-004', 'name' => 'Text Embedding 004'],
    ];
    if (!empty($response?->models)) {
      foreach ($response->models as $model) {
        $name = explode('/', $model->name);
        $models[] = (object) [
          'id' => end($name),
          'name' => $model->displayName,
        ];
      }
    }
    return $models;
  }

  /**
   * Sends a prompt and receives a chat-style response with streaming.
   *
   * @param array $messages
   *   An array of messages, where each message is an associative array with
   *   'role' (system, user, assistant) and 'content' keys.
   * @param array $options
   *   An optional array of parameters to pass to the
   *   Gemini API (e.g., temperature, top_p).
   * @param array $files
   *   List file upload.
   *
   * @return \Generator|array
   *   A generator yielding response chunks if streaming is enabled,
   *   or an array containing the full response if not.
   */
  public function chat(array $messages, array $options = [], array $files = []): \Generator|array {
    $model = $this->configFactory->get('model') ?? 'gemini-1.5-flash';
    $apiKey = $this->configFactory->get('api_key');
    $imagePart = [];

    if (empty($apiKey)) {
      throw new \InvalidArgumentException('API key for Gemini is not configured.');
    }

    $client = new Client($apiKey);
    $generativeModel = $client->generativeModel($model);

    // Create generation configuration from options.
    if (!empty($options)) {
      $generationConfig = (new GenerationConfig())
        ->withCandidateCount(1)
        ->withStopSequences(['STOP']);
      if (isset($options['temperature'])) {
        $generationConfig->withTemperature($options['temperature']);
      }
      if (isset($options['top_p'])) {
        $generationConfig->withTopP($options['top_p']);
      }
      if (isset($options['top_k'])) {
        $generationConfig->withTopP($options['top_k']);
      }
      if (isset($options['max_tokens'])) {
        $generationConfig->withMaxOutputTokens($options['max_tokens']);
      }
      $generativeModel = $generativeModel->withGenerationConfig($generationConfig);
    }

    $chat = $generativeModel->startChat();

    // Get the last message from the user to ask.
    $lastUserMessageIndex = count($messages) - 1;
    while ($lastUserMessageIndex >= 0 && $messages[$lastUserMessageIndex]['role'] !== 'user') {
      $lastUserMessageIndex--;
    }

    if ($lastUserMessageIndex < 0) {
      throw new \InvalidArgumentException('No user message found in the provided messages array.');
    }

    $ask = $messages[$lastUserMessageIndex];

    // Prepare chat history (ignore last message from user)
    if (!empty($messages)) {
      $history = [];
      for ($i = 0; $i < $lastUserMessageIndex; $i++) {
        $message = $messages[$i];
        $role = ($message['role'] === 'assistant') ? Role::Model : Role::User;
        $history[] = Content::text($message['content'], $role);
      }
      if (!empty($history)) {
        $chat = $chat->withHistory($history);
      }
    }

    // Send files.
    if (!empty($files)) {
      foreach ($files as $file) {
        $imagePart[] = strpos($file['mime']->value, 'image/') === 0 ? new ImagePart($file['mime'], $file['base64']) : new FilePart($file['mime'], $file['base64']);
      }
    }

    // If no streaming required, send messages normally.
    if (empty($options['stream'])) {
      $response = $chat->sendMessage(new TextPart($ask['content']), ...$imagePart);
      return ['response' => $response->text()];
    }

    // Streaming handling uses a static array to store the parts from callback.
    // And a generator to return each part.
    static $responseChunks = [];
    static $isCompleted = FALSE;

    // Reset static variables for new request.
    $responseChunks = [];
    $isCompleted = FALSE;

    // Callbacks to process each part of the streaming response.
    $callback = function (GenerateContentResponse $response) use (&$responseChunks, &$isCompleted): void {
      $text = $response->text();
      if (!empty($text)) {
        $responseChunks[] = $text;
      }

      // Check if this is the final response.
      if ($response->candidates[0]->finishReason !== NULL) {
        $isCompleted = TRUE;
      }
    };

    // Start stream.
    $chat->sendMessageStream($callback, new TextPart($ask['content']), ...$imagePart);

    return (function () use (&$responseChunks, &$isCompleted) {
      // Get initial chunks that were already processed.
      foreach ($responseChunks as $chunk) {
        yield $chunk;
      }

      // Reset array after yielding initial chunks.
      $responseChunks = [];

      // If stream is not completed, we're done.
      if ($isCompleted) {
        return [];
      }
      // Stream is not completed yet, but we've yielded all current chunks
      // This approach might not work well with real-time streaming
      // as the callback continues to run after this generator returns.
      yield "Streaming completed. Note: Real-time streaming is not fully supported with this implementation.";
    })();
  }

  /**
   * Sends a raw text prompt for single-shot completions (not chat).
   *
   * @param string $prompt
   *   The text prompt to send.
   * @param array $options
   *   Optional parameters for the completion.
   *
   * @return array
   *   Array containing the completion text.
   */
  public function completions(string $prompt, array $options = []): array {
    $model = $this->configFactory->get('model') ?? 'gemini-1.5-flash';
    $apiKey = $this->configFactory->get('api_key');

    if (empty($apiKey)) {
      throw new \InvalidArgumentException('API key for Gemini is not configured.');
    }
    $client = new Client($apiKey);
    $generationConfig = new GenerationConfig();
    $generationConfig->temperature = $options['temperature'] ?? 0.7;
    $generationConfig->topP = $options['top_p'] ?? 0.8;
    $generationConfig->topK = $options['top_k'] ?? 40;
    $generationConfig->maxOutputTokens = $options['max_tokens'] ?? 800;
    $content = new Content($prompt, Role::User);
    $modelInstance = $client->gemini($model);
    $response = $modelInstance->generateContent($content, $generationConfig);

    $fullResponse = '';
    foreach ($response->candidates as $candidate) {
      foreach ($candidate->content->parts as $part) {
        $fullResponse .= $part->text;
      }
    }

    return ['response' => $fullResponse];
  }

  /**
   * Generates embeddings from a given text or list of texts.
   *
   * @param string $input
   *   Text input for embedding generation.
   * @param array $options
   *   Optional parameters for embeddings.
   *
   * @return array
   *   Array containing the embeddings.
   */
  public function embeddings(string $input, array $options = []): array {
    $embeddingModel = $this->configFactory->get('embedding');
    if (empty($input) || empty($embeddingModel)) {
      return [];
    }
    $apiKey = $this->configFactory->get('api_key');
    if (empty($apiKey)) {
      throw new \InvalidArgumentException('API key for Gemini is not configured.');
    }

    $client = new Client($apiKey);
    $embeddingResponse = $client->embeddingModel($embeddingModel)
      ->embedContent(new TextPart($input));
    return $embeddingResponse->embedding->values;
  }

  /**
   * Get mime type of file.
   *
   * @param string $mimeType
   *   Text input for mime type.
   *
   * @return \GeminiAPI\Enums\MimeType|null
   *   MimeType enum object or null if not matched.
   */
  public function getMimeType(string $mimeType): MimeType|null {
    return match ($mimeType) {
      'image/jpeg', 'image/jpg' => MimeType::IMAGE_JPEG,
      'image/png' => MimeType::IMAGE_PNG,
      'image/heic' => MimeType::IMAGE_HEIC,
      'image/heif' => MimeType::IMAGE_HEIF,
      'image/webp' => MimeType::IMAGE_WEBP,
      'application/pdf' => MimeType::FILE_PDF,
      'application/x-javascript' => MimeType::APPLICATION_JAVASCRIPT,
      'application/x-python' => MimeType::APPLICATION_PYTHON,
      'text/plain' => MimeType::TEXT_PLAIN,
      'text/html' => MimeType::TEXT_HTML,
      'text/css' => MimeType::TEXT_CSS,
      'text/md' => MimeType::TEXT_MARKDOWN,
      'text/csv' => MimeType::TEXT_CSV,
      'text/xml' => MimeType::TEXT_XML,
      'text/rtf' => MimeType::TEXT_RTF,
      default => NULL,
    };
  }

}

Главная | Обратная связь

drupal hosting | друпал хостинг | it patrol .inc