feat: intégration plugin Kirby SEO
All checks were successful
Deploy / Deploy to Production (push) Successful in 22s

- Ajout de tobimori/kirby-seo via Composer
- snippet('seo/head') dans header.php (remplace les meta manuels)
- snippet('seo/schemas') dans footer.php pour JSON-LD
- Onglet SEO ajouté dans site.yml et tous les blueprints de pages
- Configuration SEO dans config.php (sitemap, robots, canonicalBase TODO)

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
isUnknown 2026-03-25 12:59:18 +01:00
parent baab2fb3a1
commit 58c31ea391
133 changed files with 9201 additions and 253 deletions

View file

@ -0,0 +1,140 @@
<?php
namespace tobimori\Seo\Ai\Drivers;
use Generator;
use tobimori\Seo\Ai\Chunk;
use tobimori\Seo\Ai\Content;
use tobimori\Seo\Ai\Driver;
use tobimori\Seo\Ai\SseStream;
class Anthropic extends Driver
{
protected const string DEFAULT_ENDPOINT = 'https://api.anthropic.com/v1/messages';
protected const string DEFAULT_MODEL = 'claude-4-5-haiku';
/**
* @inheritDoc
*/
public function stream(
array $content,
string|null $model = null,
): Generator {
$apiKey = $this->config('apiKey', required: true);
$headers = [
'Content-Type: application/json',
'Accept: text/event-stream',
"x-api-key: {$apiKey}",
'anthropic-version: 2023-06-01',
];
$payload = [
'model' => $model ?? $this->config('model', static::DEFAULT_MODEL),
'messages' => $this->buildMessages($content),
'max_tokens' => 4096,
'stream' => true,
];
$stream = new SseStream($this->config('endpoint', static::DEFAULT_ENDPOINT), $headers, $payload, (int)$this->config('timeout', 120));
yield from $stream->stream(function (array $event): Generator {
$type = $event['type'] ?? null;
// handle message start event
if ($type === 'message_start') {
yield Chunk::streamStart($event);
return;
}
// handle content block start (beginning of text output)
if ($type === 'content_block_start') {
$contentBlock = $event['content_block'] ?? [];
if (($contentBlock['type'] ?? null) === 'text') {
yield Chunk::textStart($event);
}
return;
}
// handle content block delta (text chunks)
if ($type === 'content_block_delta') {
$delta = $event['delta'] ?? [];
if (($delta['type'] ?? null) === 'text_delta') {
$text = $delta['text'] ?? '';
if ($text !== '') {
yield Chunk::textDelta($text, $event);
}
}
return;
}
// handle content block stop (end of text block)
if ($type === 'content_block_stop') {
yield Chunk::textComplete($event);
return;
}
// handle message stop (end of stream)
if ($type === 'message_stop') {
yield Chunk::streamEnd($event);
return;
}
// handle ping events (keep-alive)
if ($type === 'ping') {
// ignore ping events
return;
}
// handle error events
if ($type === 'error') {
$error = $event['error'] ?? [];
$message = $error['message'] ?? 'Unknown Anthropic streaming error.';
yield Chunk::error($message, $event);
return;
}
// handle message delta (contains usage info)
if ($type === 'message_delta') {
// we could extract usage info here if needed
return;
}
});
}
/**
* Translates an array of Content messages into the Anthropic messages format.
*
* @param array<Content> $content
*/
private function buildMessages(array $content): array
{
$messages = [];
foreach ($content as $message) {
$blocks = [];
foreach ($message->blocks() as $block) {
if ($block['type'] === 'image') {
$blocks[] = [
'type' => 'image',
'source' => [
'type' => 'base64',
'media_type' => $block['mediaType'],
'data' => $block['data'],
],
];
} elseif ($block['type'] === 'text') {
$blocks[] = [
'type' => 'text',
'text' => $block['text'],
];
}
}
$messages[] = [
'role' => $message->role(),
'content' => $blocks,
];
}
return $messages;
}
}

View file

@ -0,0 +1,149 @@
<?php
namespace tobimori\Seo\Ai\Drivers;
use Generator;
use tobimori\Seo\Ai\Chunk;
use tobimori\Seo\Ai\Content;
use tobimori\Seo\Ai\Driver;
use tobimori\Seo\Ai\SseStream;
class Gemini extends Driver
{
protected const string DEFAULT_ENDPOINT = 'https://generativelanguage.googleapis.com/v1beta';
protected const string DEFAULT_MODEL = 'gemini-3.1-flash-lite-preview';
/**
* @inheritDoc
*/
public function stream(
array $content,
string|null $model = null,
): Generator {
$apiKey = $this->config('apiKey', required: true);
$model = $model ?? $this->config('model', static::DEFAULT_MODEL);
$baseEndpoint = $this->config('endpoint', static::DEFAULT_ENDPOINT);
$endpoint = "{$baseEndpoint}/models/{$model}:streamGenerateContent?alt=sse&key={$apiKey}";
$headers = [
'Content-Type: application/json',
];
$payload = [
'contents' => $this->buildContents($content),
];
$systemInstruction = $this->buildSystemInstruction($content);
if ($systemInstruction !== null) {
$payload['systemInstruction'] = $systemInstruction;
}
$stream = new SseStream($endpoint, $headers, $payload, (int)$this->config('timeout', 120));
$started = false;
yield from $stream->stream(function (array $event) use (&$started): Generator {
$candidates = $event['candidates'] ?? [];
$candidate = $candidates[0] ?? null;
if ($candidate === null) {
$error = $event['error'] ?? null;
if ($error) {
yield Chunk::error($error['message'] ?? 'Unknown Gemini error.', $event);
}
return;
}
if (!$started) {
yield Chunk::streamStart($event);
yield Chunk::textStart($event);
$started = true;
}
$finishReason = $candidate['finishReason'] ?? null;
if ($finishReason === 'SAFETY') {
yield Chunk::error('Response blocked by safety filters.', $event);
return;
}
$parts = $candidate['content']['parts'] ?? [];
foreach ($parts as $part) {
$text = $part['text'] ?? '';
if ($text !== '') {
yield Chunk::textDelta($text, $event);
}
}
if ($finishReason !== null) {
yield Chunk::textComplete($event);
yield Chunk::streamEnd($event);
}
});
}
/**
* Translates an array of Content messages into the Gemini contents format.
*
* @param array<Content> $content
*/
private function buildContents(array $content): array
{
$contents = [];
foreach ($content as $message) {
if ($message->role() === 'system') {
continue;
}
$parts = [];
foreach ($message->blocks() as $block) {
if ($block['type'] === 'image') {
$parts[] = [
'inline_data' => [
'mime_type' => $block['mediaType'],
'data' => $block['data'],
],
];
} elseif ($block['type'] === 'text') {
$parts[] = [
'text' => $block['text'],
];
}
}
$contents[] = [
'role' => $message->role() === 'assistant' ? 'model' : 'user',
'parts' => $parts,
];
}
return $contents;
}
/**
* Extracts system messages into a Gemini systemInstruction object.
*
* @param array<Content> $content
*/
private function buildSystemInstruction(array $content): array|null
{
$parts = [];
foreach ($content as $message) {
if ($message->role() !== 'system') {
continue;
}
foreach ($message->blocks() as $block) {
if ($block['type'] === 'text') {
$parts[] = ['text' => $block['text']];
}
}
}
if ($parts === []) {
return null;
}
return ['parts' => $parts];
}
}

View file

@ -0,0 +1,118 @@
<?php
namespace tobimori\Seo\Ai\Drivers;
use Generator;
use tobimori\Seo\Ai\Chunk;
use tobimori\Seo\Ai\Content;
use tobimori\Seo\Ai\Driver;
use tobimori\Seo\Ai\SseStream;
class OpenAi extends Driver
{
protected const string DEFAULT_ENDPOINT = 'https://api.openai.com/v1/responses';
protected const string DEFAULT_MODEL = 'gpt-5-mini-2025-08-07';
/**
* @inheritDoc
*/
public function stream(
array $content,
string|null $model = null,
): Generator {
$apiKey = $this->config('apiKey', required: true);
$headers = [
'Content-Type: application/json',
'Accept: text/event-stream',
"Authorization: Bearer {$apiKey}",
];
if ($organization = $this->config('organization')) {
$headers[] = "OpenAI-Organization: {$organization}";
}
$payload = [
'model' => $model ?? $this->config('model', static::DEFAULT_MODEL),
'input' => $this->buildInput($content),
// instructions does not work for e.g. openrouter so let's just put everything in user prompt
'stream' => true,
];
$stream = new SseStream($this->config('endpoint', static::DEFAULT_ENDPOINT), $headers, $payload, (int)$this->config('timeout', 120));
yield from $stream->stream(function (array $event): Generator {
$type = $event['type'] ?? null;
if ($type === 'response.created') {
yield Chunk::streamStart($event);
return;
}
if ($type === 'response.in_progress') {
yield Chunk::textStart($event);
return;
}
if ($type === 'response.output_text.delta') {
$delta = $event['delta'] ?? '';
if ($delta !== '') {
yield Chunk::textDelta($delta, $event);
}
return;
}
if ($type === 'response.output_text.done') {
yield Chunk::textComplete($event);
return;
}
if ($type === 'response.completed') {
yield Chunk::streamEnd($event);
return;
}
if ($type === 'response.output_item.added' && ($event['item']['type'] ?? null) === 'reasoning') {
yield Chunk::thinkingStart($event);
return;
}
if ($type === 'response.error') {
$message = $event['error']['message'] ?? 'Unknown OpenAI streaming error.';
yield Chunk::error($message, $event);
}
});
}
/**
* Translates an array of Content messages into the OpenAI Responses API input format.
*
* @param array<Content> $content
*/
private function buildInput(array $content): array
{
$input = [];
foreach ($content as $message) {
$blocks = [];
foreach ($message->blocks() as $block) {
if ($block['type'] === 'image') {
$blocks[] = [
'type' => 'input_image',
'image_url' => "data:{$block['mediaType']};base64,{$block['data']}",
];
} elseif ($block['type'] === 'text') {
$blocks[] = [
'type' => 'input_text',
'text' => $block['text'],
];
}
}
$input[] = [
'role' => $message->role(),
'content' => $blocks,
];
}
return $input;
}
}