Définition
L’optimisation technique SEO englobe toutes les améliorations apportées à l’infrastructure et au code d’un site web pour faciliter son exploration par les moteurs de recherche et améliorer l’expérience utilisateur. Elle constitue la fondation sur laquelle reposent les autres piliers du SEO (contenu et popularité), garantissant que les moteurs peuvent accéder, comprendre et indexer efficacement le contenu.
Checklist optimisation technique
Crawlabilité et indexation
# Audit crawlabilité complet
class TechnicalSEOAuditor:
def __init__(self, domain):
self.domain = domain
self.issues = []
def audit_crawlability(self):
"""
Vérifie l'accessibilité du site aux robots
"""
checks = {
'robots_txt': self.check_robots_txt(),
'xml_sitemap': self.check_sitemap(),
'crawl_errors': self.check_crawl_errors(),
'orphan_pages': self.find_orphan_pages(),
'crawl_depth': self.analyze_crawl_depth(),
'internal_linking': self.audit_internal_links()
}
return self.generate_report(checks)
def check_robots_txt(self):
"""
Analyse du fichier robots.txt
"""
import requests
robots_url = f"{self.domain}/robots.txt"
issues = []
try:
response = requests.get(robots_url)
if response.status_code == 200:
content = response.text
# Vérifications critiques
if "Disallow: /" in content and "User-agent: *" in content:
issues.append({
'severity': 'critical',
'issue': 'Site entièrement bloqué',
'fix': 'Retirer Disallow: / global'
})
# Vérifier pages importantes bloquées
important_paths = ['/products', '/services', '/blog']
for path in important_paths:
if f"Disallow: {path}" in content:
issues.append({
'severity': 'high',
'issue': f'{path} bloqué dans robots.txt',
'fix': f'Autoriser crawl de {path}'
})
# Sitemap déclaré
if "Sitemap:" not in content:
issues.append({
'severity': 'medium',
'issue': 'Sitemap non déclaré',
'fix': 'Ajouter Sitemap: URL dans robots.txt'
})
else:
issues.append({
'severity': 'medium',
'issue': 'Robots.txt introuvable',
'fix': 'Créer fichier robots.txt'
})
except Exception as e:
issues.append({
'severity': 'high',
'issue': f'Erreur accès robots.txt: {str(e)}',
'fix': 'Vérifier accessibilité du fichier'
})
return issues
Performance et vitesse
// Optimisation performance technique
class PerformanceOptimizer {
constructor(domain) {
this.domain = domain;
this.metrics = {
lcp: { target: 2.5, weight: 0.25 },
fid: { target: 100, weight: 0.25 },
cls: { target: 0.1, weight: 0.25 },
ttfb: { target: 800, weight: 0.25 }
};
}
async runPerformanceAudit() {
const audit = {
coreWebVitals: await this.measureCoreWebVitals(),
resourceOptimization: await this.checkResourceOptimization(),
serverOptimization: await this.checkServerConfig(),
caching: await this.analyzeCaching(),
recommendations: []
};
// Analyser et prioriser les optimisations
this.generateOptimizationPlan(audit);
return audit;
}
async checkResourceOptimization() {
const optimizations = {
images: [],
css: [],
javascript: [],
fonts: []
};
// Optimisation images
const images = await this.getPageImages();
images.forEach(img => {
if (!img.loading === 'lazy' && !this.isAboveFold(img)) {
optimizations.images.push({
issue: 'Missing lazy loading',
element: img.src,
impact: 'medium',
fix: 'Add loading="lazy"'
});
}
if (!img.srcset) {
optimizations.images.push({
issue: 'No responsive images',
element: img.src,
impact: 'high',
fix: 'Implement srcset for different viewports'
});
}
// Vérifier format moderne
if (!this.isModernFormat(img.src)) {
optimizations.images.push({
issue: 'Legacy image format',
element: img.src,
impact: 'medium',
fix: 'Convert to WebP or AVIF'
});
}
});
// Optimisation CSS
const stylesheets = await this.getStylesheets();
for (const css of stylesheets) {
if (css.size > 50000 && !css.critical) {
optimizations.css.push({
issue: 'Large CSS file blocking render',
file: css.href,
size: css.size,
impact: 'high',
fix: 'Extract critical CSS, defer non-critical'
});
}
}
return optimizations;
}
}
Architecture et structure
Optimisation des URLs
# Gestion structure URLs
class URLStructureOptimizer:
def __init__(self):
self.url_patterns = {
'clean': r'^[a-z0-9\-/]+$',
'params_limit': 2,
'depth_limit': 4,
'length_limit': 75
}
def audit_url_structure(self, urls):
"""
Audit structure URLs du site
"""
issues = {
'too_deep': [],
'too_long': [],
'special_chars': [],
'duplicate_paths': [],
'parameter_issues': []
}
url_map = {}
for url in urls:
parsed = urlparse(url)
path = parsed.path
# Profondeur excessive
depth = path.count('/')
if depth > self.url_patterns['depth_limit']:
issues['too_deep'].append({
'url': url,
'depth': depth,
'recommendation': 'Simplifier architecture'
})
# Longueur excessive
if len(path) > self.url_patterns['length_limit']:
issues['too_long'].append({
'url': url,
'length': len(path),
'recommendation': 'Raccourcir URL'
})
# Caractères spéciaux
if not re.match(self.url_patterns['clean'], path):
issues['special_chars'].append({
'url': url,
'chars': re.findall(r'[^a-z0-9\-/]', path),
'recommendation': 'Utiliser uniquement lettres, chiffres, tirets'
})
# Paramètres GET
if parsed.query:
params = parse_qs(parsed.query)
if len(params) > self.url_patterns['params_limit']:
issues['parameter_issues'].append({
'url': url,
'param_count': len(params),
'recommendation': 'Réduire paramètres ou utiliser URL propres'
})
# Détecter duplications
normalized = self.normalize_url(url)
if normalized in url_map:
issues['duplicate_paths'].append({
'url1': url_map[normalized],
'url2': url,
'recommendation': 'Implémenter canoniques ou redirections'
})
else:
url_map[normalized] = url
return issues
def normalize_url(self, url):
"""
Normalise URL pour détection duplications
"""
parsed = urlparse(url.lower())
# Retirer trailing slash
path = parsed.path.rstrip('/')
# Trier paramètres
params = sorted(parse_qs(parsed.query).items())
return f"{path}?{'&'.join([f'{k}={v[0]}' for k,v in params])}"
Sécurité et HTTPS
// Vérification sécurité technique
class SecurityAuditor {
async auditSecuritySEO() {
const securityChecks = {
https: await this.checkHTTPS(),
mixedContent: await this.checkMixedContent(),
headers: await this.checkSecurityHeaders(),
certificates: await this.checkSSLCertificate()
};
return this.generateSecurityReport(securityChecks);
}
async checkHTTPS() {
const issues = [];
// Vérifier redirection HTTP vers HTTPS
const httpResponse = await fetch(`http://${this.domain}`, {
redirect: 'manual'
});
if (httpResponse.status !== 301 && httpResponse.status !== 302) {
issues.push({
severity: 'critical',
issue: 'Pas de redirection HTTP vers HTTPS',
impact: 'Sécurité et rankings',
fix: 'Implémenter redirection 301 permanente'
});
}
// Vérifier HSTS
const httpsResponse = await fetch(`https://${this.domain}`);
const hstsHeader = httpsResponse.headers.get('strict-transport-security');
if (!hstsHeader) {
issues.push({
severity: 'medium',
issue: 'Header HSTS manquant',
impact: 'Sécurité réduite',
fix: 'Ajouter Strict-Transport-Security header'
});
}
return issues;
}
async checkMixedContent() {
const mixedContent = [];
// Scanner ressources chargées
const resources = await this.getAllPageResources();
resources.forEach(resource => {
if (resource.url.startsWith('http://') &&
!resource.url.includes('localhost')) {
mixedContent.push({
type: resource.type,
url: resource.url,
severity: resource.type === 'script' ? 'critical' : 'high',
fix: 'Migrer ressource vers HTTPS'
});
}
});
return mixedContent;
}
}
Mobile et responsive
Optimisation mobile
# Vérification mobile-first
class MobileOptimizationChecker:
def __init__(self):
self.viewport_regex = r'width=device-width.*initial-scale=1'
self.mobile_breakpoint = 768
def audit_mobile_optimization(self, page_html):
"""
Audit optimisation mobile complète
"""
from bs4 import BeautifulSoup
soup = BeautifulSoup(page_html, 'html.parser')
issues = []
# 1. Viewport meta tag
viewport = soup.find('meta', {'name': 'viewport'})
if not viewport:
issues.append({
'type': 'missing_viewport',
'severity': 'critical',
'message': 'Balise viewport manquante',
'fix': '<meta name="viewport" content="width=device-width, initial-scale=1">'
})
elif not re.search(self.viewport_regex, viewport.get('content', '')):
issues.append({
'type': 'incorrect_viewport',
'severity': 'high',
'message': 'Configuration viewport incorrecte',
'current': viewport.get('content'),
'fix': 'width=device-width, initial-scale=1'
})
# 2. Taille des éléments tactiles
buttons = soup.find_all(['button', 'a'])
for btn in buttons:
if self.is_too_small_for_mobile(btn):
issues.append({
'type': 'small_tap_target',
'element': str(btn)[:100],
'severity': 'medium',
'fix': 'Minimum 48x48px pour targets tactiles'
})
# 3. Texte lisible
fonts = soup.find_all(style=re.compile('font-size'))
for element in fonts:
size = self.extract_font_size(element.get('style'))
if size and size < 12:
issues.append({
'type': 'small_font',
'severity': 'medium',
'message': f'Police trop petite: {size}px',
'fix': 'Minimum 16px pour le corps de texte mobile'
})
# 4. Contenu débordant
if self.detect_horizontal_scroll(page_html):
issues.append({
'type': 'horizontal_scroll',
'severity': 'high',
'message': 'Contenu déborde de l\'écran mobile',
'fix': 'Utiliser max-width: 100% et overflow-x: hidden'
})
return issues
Données structurées
Implémentation Schema.org
// Générateur données structurées
class StructuredDataGenerator {
generateSchema(pageType, data) {
const schemas = {
article: this.generateArticleSchema,
product: this.generateProductSchema,
localBusiness: this.generateLocalBusinessSchema,
faqPage: this.generateFAQSchema,
breadcrumb: this.generateBreadcrumbSchema
};
if (schemas[pageType]) {
return schemas[pageType].call(this, data);
}
return null;
}
generateProductSchema(product) {
const schema = {
"@context": "https://schema.org/",
"@type": "Product",
"name": product.name,
"description": product.description,
"image": product.images,
"brand": {
"@type": "Brand",
"name": product.brand
},
"offers": {
"@type": "Offer",
"url": product.url,
"priceCurrency": product.currency,
"price": product.price,
"availability": this.mapAvailability(product.stock),
"seller": {
"@type": "Organization",
"name": product.seller
}
}
};
// Ajouter ratings si disponibles
if (product.rating) {
schema.aggregateRating = {
"@type": "AggregateRating",
"ratingValue": product.rating.value,
"reviewCount": product.rating.count
};
}
// Validation
if (this.validateSchema(schema)) {
return JSON.stringify(schema, null, 2);
}
return null;
}
validateSchema(schema) {
// Vérifications basiques
const requiredFields = {
'Product': ['name', 'offers'],
'Offer': ['price', 'priceCurrency'],
'Article': ['headline', 'datePublished', 'author']
};
const type = schema['@type'];
if (requiredFields[type]) {
return requiredFields[type].every(field =>
this.hasNestedProperty(schema, field)
);
}
return true;
}
}
Monitoring technique
# Monitoring continu SEO technique
class TechnicalSEOMonitor:
def __init__(self, domain):
self.domain = domain
self.alerts_threshold = {
'crawl_errors': 5,
'response_time': 3000,
'page_size': 3000000,
'404_rate': 0.05
}
def daily_technical_check(self):
"""
Vérifications techniques quotidiennes
"""
report = {
'date': datetime.now(),
'status': 'healthy',
'issues': [],
'metrics': {}
}
# 1. Disponibilité site
uptime = self.check_uptime()
if uptime < 99.9:
report['issues'].append({
'type': 'uptime',
'severity': 'critical',
'value': uptime,
'message': f'Uptime {uptime}% - impact SEO potentiel'
})
# 2. Temps de réponse
response_times = self.measure_response_times()
avg_response = sum(response_times) / len(response_times)
if avg_response > self.alerts_threshold['response_time']:
report['issues'].append({
'type': 'slow_response',
'severity': 'high',
'value': avg_response,
'message': f'Temps réponse moyen: {avg_response}ms'
})
# 3. Erreurs crawl
crawl_errors = self.check_crawl_errors()
if len(crawl_errors) > self.alerts_threshold['crawl_errors']:
report['issues'].append({
'type': 'crawl_errors',
'severity': 'high',
'count': len(crawl_errors),
'errors': crawl_errors[:10]
})
# 4. Core Web Vitals
cwv = self.check_core_web_vitals()
for metric, value in cwv.items():
if not self.is_cwv_passing(metric, value):
report['issues'].append({
'type': 'cwv_failing',
'metric': metric,
'value': value,
'threshold': self.get_cwv_threshold(metric)
})
# Déterminer statut global
if any(issue['severity'] == 'critical' for issue in report['issues']):
report['status'] = 'critical'
elif any(issue['severity'] == 'high' for issue in report['issues']):
report['status'] = 'warning'
return report
L’optimisation technique constitue le socle indispensable d’une stratégie SEO réussie, nécessitant une approche systématique et un monitoring constant pour garantir des performances optimales et une indexation efficace.