deepseek-1.x-dev/src/Plugin/AiProviders/SelfHostProvider.php
src/Plugin/AiProviders/SelfHostProvider.php
<?php
namespace Drupal\deepseek\Plugin\AiProviders;
use Drupal\Core\Config\ConfigFactoryInterface;
use Drupal\Core\Plugin\ContainerFactoryPluginInterface;
use Drupal\Core\StringTranslation\TranslatableMarkup;
use Drupal\deepseek\AiProvidersInterface;
use Drupal\deepseek\Attribute\AiProviders;
use GuzzleHttp\Client;
use GuzzleHttp\ClientInterface;
use Symfony\Component\DependencyInjection\ContainerInterface;
/**
* Provider for communicating with local LM Studio.
*/
#[AiProviders(
id: 'self_host',
label: new TranslatableMarkup('Self-host'),
url: 'http://localhost:1234/v1',
model: 'deepseek-r1-distill-qwen-7b',
api_key: 'deepseek-coder-v2-lite-instruct',
embedding: 'text-embedding-nomic-embed-text-v1.5',
)]
class SelfHostProvider implements AiProvidersInterface, ContainerFactoryPluginInterface {
/**
* Constructs a SelfHostProvider object.
*
* @param \Drupal\Core\Config\ConfigFactoryInterface $configFactory
* The config factory service.
* @param \GuzzleHttp\ClientInterface $httpClient
* The HTTP client.
*/
public function __construct(protected ConfigFactoryInterface $configFactory, protected ClientInterface $httpClient) {
}
/**
* {@inheritdoc}
*/
public static function create(ContainerInterface $container, array $configuration, $plugin_id, $plugin_definition) {
return new static($container->get('config.factory'), $container->get('http_client'),);
}
/**
* Models list of available from the provider.
*/
public function models($url = '', $api_key = ''): array {
try {
$options = [];
// Add authorization header if API key is provided.
if (!empty($api_key)) {
$options['headers'] = ['Authorization' => 'Bearer ' . $api_key];
}
// Make the request to LM Studio API.
$response = $this->httpClient->request('GET', $url . '/models', $options);
// Get response content and decode JSON to array.
$body = $response->getBody()->getContents();
$models_data = json_decode($body, TRUE);
if ($models_data === NULL && json_last_error() !== JSON_ERROR_NONE) {
throw new \Exception('Invalid JSON response: ' . json_last_error_msg());
}
return $models_data['data'] ?: [];
}
catch (\Exception $e) {
return [];
}
}
/**
* Sends a prompt and receives a chat-style response.
*/
public function chat(array $messages, array $options = [], $files = []) {
$config = $this->configFactory->get('deepseek.settings');
$model = $config->get('model');
$apiKey = $config->get('api_key') ?? 'RANDOM-KEY';
$baseUrl = $config->get('base_url') ?: 'http://127.0.0.1:1234/v1';
// Variable API.
$apiData = array_merge([
'model' => $model ?? 'deepseek-chat',
'messages' => $messages,
'temperature' => 0.7,
'stream' => TRUE,
], $options);
$client = \OpenAI::factory()
->withApiKey($apiKey)
->withBaseUri($baseUrl)
->withHttpClient(new Client(['timeout' => 360, 'http_errors' => FALSE]))
->make();
return $client->chat()->createStreamed($apiData);
}
/**
* Sends a raw text prompt for single-shot completions (not chat).
*/
public function completions(string $prompt, array $options = []) {
$config = $this->configFactory->get('deepseek.settings');
$model = $config->get('model');
$apiKey = $config->get('api_key') ?? 'RANDOM-KEY';
$baseUrl = $config->get('base_url') ?: 'http://127.0.0.1:1234/v1';
// Variable API.
$apiData = array_merge([
'model' => $model ?? 'deepseek-chat',
'prompt' => $prompt,
'temperature' => 0.7,
'stream' => TRUE,
], $options);
$client = \OpenAI::factory()
->withApiKey($apiKey)
->withBaseUri($baseUrl)
->withHttpClient(new Client(['timeout' => 360, 'http_errors' => FALSE]))
->make();
return $client->completions()->createStreamed($apiData);
}
/**
* Generates embeddings from a given text or list of texts.
*/
public function embeddings(array|string $input, array $options = []): array {
$config = $this->configFactory->get('deepseek.settings');
$model = $config->get('embedding');
if (empty($input) || empty($model)) {
return [];
}
$apiKey = $config->get('api_key') ?? 'RANDOM-KEY';
$baseUrl = $config->get('base_url') ?? 'http://127.0.0.1:1234/v1';
// Variable API.
$texts = is_array($input) ? $input : [$input];
$apiData = [
'model' => $model,
'input' => $texts,
];
$client = \OpenAI::factory()
->withApiKey($apiKey)
->withBaseUri($baseUrl)
->withHttpClient(new Client(['timeout' => 60, 'http_errors' => FALSE]))
->make();
$data = $client->embeddings()->create($apiData)->toArray()['data'];
return $data[0]['embedding'] ?? [];
}
/**
* Get mime type of file.
*/
public function getMimeType(string $mimeType): null {
return NULL;
}
}
