This is page 8 of 11. Use http://codebase.md/saidsurucu/yargi-mcp?lines=true&page={x} to view the full context.
# Directory Structure
```
├── __main__.py
├── .dockerignore
├── .env.example
├── .gitattributes
├── .github
│ └── workflows
│ └── publish.yml
├── .gitignore
├── .serena
│ ├── .gitignore
│ └── project.yml
├── 5ire-settings.png
├── analyze_kik_hash_generation.py
├── anayasa_mcp_module
│ ├── __init__.py
│ ├── bireysel_client.py
│ ├── client.py
│ ├── models.py
│ └── unified_client.py
├── asgi_app.py
├── bddk_mcp_module
│ ├── __init__.py
│ ├── client.py
│ └── models.py
├── bedesten_mcp_module
│ ├── __init__.py
│ ├── client.py
│ ├── enums.py
│ └── models.py
├── check_response_format.py
├── CLAUDE.md
├── danistay_mcp_module
│ ├── __init__.py
│ ├── client.py
│ └── models.py
├── docker-compose.yml
├── Dockerfile
├── docs
│ └── DEPLOYMENT.md
├── emsal_mcp_module
│ ├── __init__.py
│ ├── client.py
│ └── models.py
├── example_fastapi_app.py
├── fly-no-auth.toml
├── fly.toml
├── kik_mcp_module
│ ├── __init__.py
│ ├── client_v2.py
│ ├── client.py
│ ├── models_v2.py
│ └── models.py
├── kvkk_mcp_module
│ ├── __init__.py
│ ├── client.py
│ └── models.py
├── LICENSE
├── mcp_auth
│ ├── __init__.py
│ ├── clerk_config.py
│ ├── middleware.py
│ ├── oauth.py
│ ├── policy.py
│ └── storage.py
├── mcp_auth_factory.py
├── mcp_auth_http_adapter.py
├── mcp_auth_http_simple.py
├── mcp_server_main.py
├── nginx.conf
├── ornek.png
├── Procfile
├── pyproject.toml
├── railway.json
├── README.md
├── redis_session_store.py
├── rekabet_mcp_module
│ ├── __init__.py
│ ├── client.py
│ └── models.py
├── requirements.txt
├── run_asgi.py
├── saidsurucu-yargi-mcp-f5fa007
│ ├── __main__.py
│ ├── .dockerignore
│ ├── .env.example
│ ├── .gitattributes
│ ├── .github
│ │ └── workflows
│ │ └── publish.yml
│ ├── .gitignore
│ ├── 5ire-settings.png
│ ├── anayasa_mcp_module
│ │ ├── __init__.py
│ │ ├── bireysel_client.py
│ │ ├── client.py
│ │ ├── models.py
│ │ └── unified_client.py
│ ├── asgi_app.py
│ ├── bddk_mcp_module
│ │ ├── __init__.py
│ │ ├── client.py
│ │ └── models.py
│ ├── bedesten_mcp_module
│ │ ├── __init__.py
│ │ ├── client.py
│ │ ├── enums.py
│ │ └── models.py
│ ├── check_response_format.py
│ ├── danistay_mcp_module
│ │ ├── __init__.py
│ │ ├── client.py
│ │ └── models.py
│ ├── docker-compose.yml
│ ├── Dockerfile
│ ├── docs
│ │ └── DEPLOYMENT.md
│ ├── emsal_mcp_module
│ │ ├── __init__.py
│ │ ├── client.py
│ │ └── models.py
│ ├── example_fastapi_app.py
│ ├── kik_mcp_module
│ │ ├── __init__.py
│ │ ├── client.py
│ │ └── models.py
│ ├── kvkk_mcp_module
│ │ ├── __init__.py
│ │ ├── client.py
│ │ └── models.py
│ ├── LICENSE
│ ├── mcp_auth
│ │ ├── __init__.py
│ │ ├── clerk_config.py
│ │ ├── middleware.py
│ │ ├── oauth.py
│ │ ├── policy.py
│ │ └── storage.py
│ ├── mcp_auth_factory.py
│ ├── mcp_auth_http_adapter.py
│ ├── mcp_auth_http_simple.py
│ ├── mcp_server_main.py
│ ├── nginx.conf
│ ├── ornek.png
│ ├── Procfile
│ ├── pyproject.toml
│ ├── railway.json
│ ├── README.md
│ ├── redis_session_store.py
│ ├── rekabet_mcp_module
│ │ ├── __init__.py
│ │ ├── client.py
│ │ └── models.py
│ ├── run_asgi.py
│ ├── sayistay_mcp_module
│ │ ├── __init__.py
│ │ ├── client.py
│ │ ├── enums.py
│ │ ├── models.py
│ │ └── unified_client.py
│ ├── starlette_app.py
│ ├── stripe_webhook.py
│ ├── uyusmazlik_mcp_module
│ │ ├── __init__.py
│ │ ├── client.py
│ │ └── models.py
│ └── yargitay_mcp_module
│ ├── __init__.py
│ ├── client.py
│ └── models.py
├── sayistay_mcp_module
│ ├── __init__.py
│ ├── client.py
│ ├── enums.py
│ ├── models.py
│ └── unified_client.py
├── starlette_app.py
├── stripe_webhook.py
├── uv.lock
├── uyusmazlik_mcp_module
│ ├── __init__.py
│ ├── client.py
│ └── models.py
└── yargitay_mcp_module
├── __init__.py
├── client.py
└── models.py
```
# Files
--------------------------------------------------------------------------------
/kik_mcp_module/client.py:
--------------------------------------------------------------------------------
```python
1 | # kik_mcp_module/client.py
2 | import asyncio
3 | from playwright.async_api import (
4 | async_playwright,
5 | Page,
6 | BrowserContext,
7 | Browser,
8 | Error as PlaywrightError,
9 | TimeoutError as PlaywrightTimeoutError
10 | )
11 | from bs4 import BeautifulSoup
12 | import logging
13 | from typing import Dict, Any, List, Optional
14 | import urllib.parse
15 | import base64 # Base64 için
16 | import re
17 | import html as html_parser
18 | from markitdown import MarkItDown
19 | import os
20 | import math
21 | import io
22 | import random
23 |
24 | from .models import (
25 | KikSearchRequest,
26 | KikDecisionEntry,
27 | KikSearchResult,
28 | KikDocumentMarkdown,
29 | KikKararTipi
30 | )
31 |
32 | logger = logging.getLogger(__name__)
33 |
34 | class KikApiClient:
35 | BASE_URL = "https://ekap.kik.gov.tr"
36 | SEARCH_PAGE_PATH = "/EKAP/Vatandas/kurulkararsorgu.aspx"
37 | FIELD_LOCATORS = {
38 | "karar_tipi_radio_group": "input[name='ctl00$ContentPlaceHolder1$kurulKararTip']",
39 | "karar_no": "input[name='ctl00$ContentPlaceHolder1$txtKararNo']",
40 | "karar_tarihi_baslangic": "input[name='ctl00$ContentPlaceHolder1$etKararTarihBaslangic$EkapTakvimTextBox_etKararTarihBaslangic']",
41 | "karar_tarihi_bitis": "input[name='ctl00$ContentPlaceHolder1$etKararTarihBitis$EkapTakvimTextBox_etKararTarihBitis']",
42 | "resmi_gazete_sayisi": "input[name='ctl00$ContentPlaceHolder1$txtResmiGazeteSayisi']",
43 | "resmi_gazete_tarihi": "input[name='ctl00$ContentPlaceHolder1$etResmiGazeteTarihi$EkapTakvimTextBox_etResmiGazeteTarihi']",
44 | "basvuru_konusu_ihale": "input[name='ctl00$ContentPlaceHolder1$txtBasvuruKonusuIhale']",
45 | "basvuru_sahibi": "input[name='ctl00$ContentPlaceHolder1$txtSikayetci']",
46 | "ihaleyi_yapan_idare": "input[name='ctl00$ContentPlaceHolder1$txtIhaleyiYapanIdare']",
47 | "yil": "select[name='ctl00$ContentPlaceHolder1$ddlYil']",
48 | "karar_metni": "input[name='ctl00$ContentPlaceHolder1$txtKararMetni']",
49 | "search_button_id": "ctl00_ContentPlaceHolder1_btnAra"
50 | }
51 | RESULTS_TABLE_ID = "grdKurulKararSorguSonuc"
52 | NO_RESULTS_MESSAGE_SELECTOR = "div#ctl00_MessageContent1"
53 | VALIDATION_SUMMARY_SELECTOR = "div#ctl00_ValidationSummary1"
54 | MODAL_CLOSE_BUTTON_SELECTOR = "div#detayPopUp.in a#btnKapatPencere_0.close"
55 | DOCUMENT_MARKDOWN_CHUNK_SIZE = 5000
56 |
57 | def __init__(self, request_timeout: float = 60000):
58 | self.playwright_instance: Optional[async_playwright] = None
59 | self.browser: Optional[Browser] = None
60 | self.context: Optional[BrowserContext] = None
61 | self.page: Optional[Page] = None
62 | self.request_timeout = request_timeout
63 | self._lock = asyncio.Lock()
64 |
65 | async def _ensure_playwright_ready(self, force_new_page: bool = False):
66 | async with self._lock:
67 | browser_recreated = False
68 | context_recreated = False
69 | if not self.playwright_instance:
70 | self.playwright_instance = await async_playwright().start()
71 | if not self.browser or not self.browser.is_connected():
72 | if self.browser: await self.browser.close()
73 | # Ultra stealth browser configuration
74 | self.browser = await self.playwright_instance.chromium.launch(
75 | headless=True,
76 | args=[
77 | # Disable automation indicators
78 | '--no-first-run',
79 | '--no-default-browser-check',
80 | '--disable-dev-shm-usage',
81 | '--disable-extensions',
82 | '--disable-gpu',
83 | '--disable-default-apps',
84 | '--disable-translate',
85 | '--disable-blink-features=AutomationControlled',
86 | '--disable-ipc-flooding-protection',
87 | '--disable-renderer-backgrounding',
88 | '--disable-backgrounding-occluded-windows',
89 | '--disable-client-side-phishing-detection',
90 | '--disable-sync',
91 | '--disable-features=TranslateUI,BlinkGenPropertyTrees',
92 | '--disable-component-extensions-with-background-pages',
93 | '--no-sandbox', # Sometimes needed for headless
94 | '--disable-web-security',
95 | '--disable-features=VizDisplayCompositor',
96 | # Language and locale
97 | '--lang=tr-TR',
98 | '--accept-lang=tr-TR,tr;q=0.9,en;q=0.8',
99 | # Performance optimizations
100 | '--memory-pressure-off',
101 | '--max_old_space_size=4096',
102 | ]
103 | )
104 | browser_recreated = True
105 | if not self.context or browser_recreated:
106 | if self.context: await self.context.close()
107 | if not self.browser: raise PlaywrightError("Browser not initialized.")
108 | # Ultra realistic context configuration
109 | self.context = await self.browser.new_context(
110 | user_agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
111 | viewport={'width': 1920, 'height': 1080},
112 | screen={'width': 1920, 'height': 1080},
113 | device_scale_factor=1.0,
114 | is_mobile=False,
115 | has_touch=False,
116 | # Localization
117 | locale='tr-TR',
118 | timezone_id='Europe/Istanbul',
119 | # Realistic browser features
120 | java_script_enabled=True,
121 | accept_downloads=True,
122 | ignore_https_errors=True,
123 | # Color scheme and media
124 | color_scheme='light',
125 | reduced_motion='no-preference',
126 | forced_colors='none',
127 | # Additional headers for realism
128 | extra_http_headers={
129 | 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
130 | 'Accept-Encoding': 'gzip, deflate, br',
131 | 'Accept-Language': 'tr-TR,tr;q=0.9,en;q=0.8',
132 | 'Cache-Control': 'max-age=0',
133 | 'DNT': '1',
134 | 'Upgrade-Insecure-Requests': '1',
135 | 'Sec-Ch-Ua': '"Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24"',
136 | 'Sec-Ch-Ua-Mobile': '?0',
137 | 'Sec-Ch-Ua-Platform': '"Windows"',
138 | 'Sec-Fetch-Dest': 'document',
139 | 'Sec-Fetch-Mode': 'navigate',
140 | 'Sec-Fetch-Site': 'none',
141 | 'Sec-Fetch-User': '?1',
142 | },
143 | # Permissions to appear realistic
144 | permissions=['geolocation'],
145 | geolocation={'latitude': 41.0082, 'longitude': 28.9784}, # Istanbul
146 | )
147 | context_recreated = True
148 | if not self.page or self.page.is_closed() or force_new_page or context_recreated or browser_recreated:
149 | if self.page and not self.page.is_closed(): await self.page.close()
150 | if not self.context: raise PlaywrightError("Context is None.")
151 | self.page = await self.context.new_page()
152 | if not self.page: raise PlaywrightError("Failed to create new page.")
153 | self.page.set_default_navigation_timeout(self.request_timeout)
154 | self.page.set_default_timeout(self.request_timeout)
155 |
156 | # CRITICAL: Anti-detection JavaScript injection
157 | await self._inject_stealth_scripts()
158 | if not self.page or self.page.is_closed():
159 | raise PlaywrightError("Playwright page initialization failed.")
160 | logger.debug("_ensure_playwright_ready completed.")
161 |
162 | async def close_client_session(self):
163 | async with self._lock:
164 | # ... (öncekiyle aynı)
165 | if self.page and not self.page.is_closed(): await self.page.close(); self.page = None
166 | if self.context: await self.context.close(); self.context = None
167 | if self.browser: await self.browser.close(); self.browser = None
168 | if self.playwright_instance: await self.playwright_instance.stop(); self.playwright_instance = None
169 | logger.info("KikApiClient (Playwright): Resources closed.")
170 |
171 | async def _inject_stealth_scripts(self):
172 | """
173 | Inject comprehensive stealth JavaScript to evade bot detection.
174 | Overrides navigator properties and other fingerprinting vectors.
175 | """
176 | if not self.page:
177 | logger.warning("Cannot inject stealth scripts: page is None")
178 | return
179 |
180 | logger.debug("Injecting comprehensive stealth scripts...")
181 |
182 | stealth_script = '''
183 | // Override navigator.webdriver
184 | Object.defineProperty(navigator, 'webdriver', {
185 | get: () => undefined,
186 | configurable: true
187 | });
188 |
189 | // Override navigator properties to appear more human
190 | Object.defineProperty(navigator, 'languages', {
191 | get: () => ['tr-TR', 'tr', 'en-US', 'en'],
192 | configurable: true
193 | });
194 |
195 | Object.defineProperty(navigator, 'platform', {
196 | get: () => 'Win32',
197 | configurable: true
198 | });
199 |
200 | Object.defineProperty(navigator, 'vendor', {
201 | get: () => 'Google Inc.',
202 | configurable: true
203 | });
204 |
205 | Object.defineProperty(navigator, 'deviceMemory', {
206 | get: () => 8,
207 | configurable: true
208 | });
209 |
210 | Object.defineProperty(navigator, 'hardwareConcurrency', {
211 | get: () => 8,
212 | configurable: true
213 | });
214 |
215 | Object.defineProperty(navigator, 'maxTouchPoints', {
216 | get: () => 0,
217 | configurable: true
218 | });
219 |
220 | // Override plugins to appear realistic
221 | Object.defineProperty(navigator, 'plugins', {
222 | get: () => {
223 | return [
224 | {
225 | 0: {type: "application/x-google-chrome-pdf", suffixes: "pdf", description: "Portable Document Format", enabledPlugin: Plugin},
226 | description: "Portable Document Format",
227 | filename: "internal-pdf-viewer",
228 | length: 1,
229 | name: "Chrome PDF Plugin"
230 | },
231 | {
232 | 0: {type: "application/pdf", suffixes: "pdf", description: "", enabledPlugin: Plugin},
233 | description: "",
234 | filename: "mhjfbmdgcfjbbpaeojofohoefgiehjai",
235 | length: 1,
236 | name: "Chrome PDF Viewer"
237 | }
238 | ];
239 | },
240 | configurable: true
241 | });
242 |
243 | // Override permissions
244 | const originalQuery = window.navigator.permissions.query;
245 | window.navigator.permissions.query = (parameters) => (
246 | parameters.name === 'notifications' ?
247 | Promise.resolve({ state: Notification.permission }) :
248 | originalQuery(parameters)
249 | );
250 |
251 | // Override WebGL rendering context
252 | const getParameter = WebGLRenderingContext.prototype.getParameter;
253 | WebGLRenderingContext.prototype.getParameter = function(parameter) {
254 | if (parameter === 37445) { // UNMASKED_VENDOR_WEBGL
255 | return 'Intel Inc.';
256 | }
257 | if (parameter === 37446) { // UNMASKED_RENDERER_WEBGL
258 | return 'Intel(R) Iris(R) Plus Graphics 640';
259 | }
260 | return getParameter(parameter);
261 | };
262 |
263 | // Override canvas fingerprinting
264 | const toBlob = HTMLCanvasElement.prototype.toBlob;
265 | const toDataURL = HTMLCanvasElement.prototype.toDataURL;
266 | const getImageData = CanvasRenderingContext2D.prototype.getImageData;
267 |
268 | const noisify = (canvas, context) => {
269 | const imageData = context.getImageData(0, 0, canvas.width, canvas.height);
270 | for (let i = 0; i < imageData.data.length; i += 4) {
271 | imageData.data[i] += Math.floor(Math.random() * 10) - 5;
272 | imageData.data[i + 1] += Math.floor(Math.random() * 10) - 5;
273 | imageData.data[i + 2] += Math.floor(Math.random() * 10) - 5;
274 | }
275 | context.putImageData(imageData, 0, 0);
276 | };
277 |
278 | Object.defineProperty(HTMLCanvasElement.prototype, 'toBlob', {
279 | value: function(callback, type, encoderOptions) {
280 | noisify(this, this.getContext('2d'));
281 | return toBlob.apply(this, arguments);
282 | }
283 | });
284 |
285 | Object.defineProperty(HTMLCanvasElement.prototype, 'toDataURL', {
286 | value: function(type, encoderOptions) {
287 | noisify(this, this.getContext('2d'));
288 | return toDataURL.apply(this, arguments);
289 | }
290 | });
291 |
292 | // Override AudioContext for audio fingerprinting
293 | const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
294 | const originalAnalyser = audioCtx.createAnalyser;
295 | audioCtx.createAnalyser = function() {
296 | const analyser = originalAnalyser.apply(this, arguments);
297 | const getFloatFrequencyData = analyser.getFloatFrequencyData;
298 | analyser.getFloatFrequencyData = function(array) {
299 | getFloatFrequencyData.apply(this, arguments);
300 | for (let i = 0; i < array.length; i++) {
301 | array[i] += Math.random() * 0.0001;
302 | }
303 | };
304 | return analyser;
305 | };
306 |
307 | // Override screen properties
308 | Object.defineProperty(window.screen, 'colorDepth', {
309 | get: () => 24,
310 | configurable: true
311 | });
312 |
313 | Object.defineProperty(window.screen, 'pixelDepth', {
314 | get: () => 24,
315 | configurable: true
316 | });
317 |
318 | // Override timezone
319 | Date.prototype.getTimezoneOffset = function() {
320 | return -180; // UTC+3 (Istanbul)
321 | };
322 |
323 | // Override document.cookie to prevent tracking
324 | const originalCookieDescriptor = Object.getOwnPropertyDescriptor(Document.prototype, 'cookie') ||
325 | Object.getOwnPropertyDescriptor(HTMLDocument.prototype, 'cookie');
326 | if (originalCookieDescriptor && originalCookieDescriptor.configurable) {
327 | Object.defineProperty(document, 'cookie', {
328 | get: function() {
329 | return originalCookieDescriptor.get.call(this);
330 | },
331 | set: function(val) {
332 | console.log('Cookie set blocked:', val);
333 | return originalCookieDescriptor.set.call(this, val);
334 | },
335 | configurable: true
336 | });
337 | }
338 |
339 | // Remove automation traces
340 | delete window.cdc_adoQpoasnfa76pfcZLmcfl_Array;
341 | delete window.cdc_adoQpoasnfa76pfcZLmcfl_Promise;
342 | delete window.cdc_adoQpoasnfa76pfcZLmcfl_Symbol;
343 | delete window.cdc_adoQpoasnfa76pfcZLmcfl_JSON;
344 | delete window.cdc_adoQpoasnfa76pfcZLmcfl_Object;
345 | delete window.cdc_adoQpoasnfa76pfcZLmcfl_Proxy;
346 |
347 | // Add realistic performance timing
348 | if (window.performance && window.performance.timing) {
349 | const timing = window.performance.timing;
350 | const now = Date.now();
351 | Object.defineProperty(timing, 'navigationStart', { value: now - Math.floor(Math.random() * 1000) + 1000, configurable: false });
352 | Object.defineProperty(timing, 'loadEventEnd', { value: now - Math.floor(Math.random() * 100) + 100, configurable: false });
353 | }
354 |
355 | console.log('✓ Stealth scripts injected successfully');
356 | '''
357 |
358 | try:
359 | await self.page.add_init_script(stealth_script)
360 | logger.debug("✅ Stealth scripts injected successfully")
361 | except Exception as e:
362 | logger.warning(f"⚠️ Failed to inject stealth scripts: {e}")
363 |
364 | async def _simulate_human_behavior(self, fast_mode: bool = True):
365 | """
366 | Simulate realistic human behavior patterns to avoid detection.
367 | Includes mouse movements, typing patterns, and natural delays.
368 |
369 | Args:
370 | fast_mode: If True, use minimal timing for speed optimization
371 | """
372 | if not self.page:
373 | logger.warning("Cannot simulate human behavior: page is None")
374 | return
375 |
376 | logger.debug("🤖 Simulating human behavior patterns...")
377 |
378 | try:
379 | if fast_mode:
380 | # ULTRA-FAST MODE: Minimal human behavior
381 | viewport_size = self.page.viewport_size
382 | if viewport_size and random.random() < 0.7: # 70% chance to do movement
383 | width, height = viewport_size['width'], viewport_size['height']
384 |
385 | # Single quick mouse movement
386 | x = random.randint(200, width - 200)
387 | y = random.randint(200, height - 200)
388 | await self.page.mouse.move(x, y)
389 |
390 | # Brief scroll (50% chance)
391 | if random.random() < 0.5:
392 | await self.page.mouse.wheel(0, random.randint(50, 100))
393 |
394 | # Ultra-minimal delay
395 | await asyncio.sleep(random.uniform(0.05, 0.15)) # Reduced from 0.1-0.3
396 |
397 | else:
398 | # FULL MODE: Original comprehensive behavior
399 | viewport_size = self.page.viewport_size
400 | if viewport_size:
401 | width, height = viewport_size['width'], viewport_size['height']
402 |
403 | # Generate 3-5 random mouse movements
404 | movements = random.randint(3, 5)
405 | logger.debug(f" 🖱️ Performing {movements} random mouse movements")
406 |
407 | for i in range(movements):
408 | x = random.randint(100, width - 100)
409 | y = random.randint(100, height - 100)
410 |
411 | # Move mouse with realistic speed (not instant)
412 | await self.page.mouse.move(x, y)
413 | await asyncio.sleep(random.uniform(0.1, 0.3))
414 |
415 | # 2. Scroll simulation
416 | logger.debug(" 📜 Simulating scroll behavior")
417 | scroll_amount = random.randint(100, 300)
418 | await self.page.mouse.wheel(0, scroll_amount)
419 | await asyncio.sleep(random.uniform(0.2, 0.5))
420 |
421 | # Scroll back up
422 | await self.page.mouse.wheel(0, -scroll_amount)
423 | await asyncio.sleep(random.uniform(0.2, 0.4))
424 |
425 | # 3. Random page interaction delays
426 | await asyncio.sleep(random.uniform(0.5, 1.5))
427 |
428 | logger.debug("✅ Human behavior simulation completed")
429 |
430 | except Exception as e:
431 | logger.warning(f"⚠️ Human behavior simulation failed: {e}")
432 |
433 | async def _human_type(self, selector: str, text: str, clear_first: bool = True, fast_mode: bool = True):
434 | """
435 | Type text with human-like patterns and delays.
436 |
437 | Args:
438 | selector: CSS selector for the input element
439 | text: Text to type
440 | clear_first: Whether to clear the field first
441 | fast_mode: If True, use minimal delays for speed optimization
442 | """
443 | if not self.page:
444 | logger.warning("Cannot perform human typing: page is None")
445 | return
446 |
447 | try:
448 | if fast_mode:
449 | # FAST MODE: Direct fill for speed
450 | await self.page.fill(selector, text)
451 | await asyncio.sleep(random.uniform(0.02, 0.05)) # Reduced from 0.05-0.1
452 | else:
453 | # FULL MODE: Character-by-character human typing
454 | # Focus on the element first
455 | await self.page.focus(selector)
456 | await asyncio.sleep(random.uniform(0.1, 0.3))
457 |
458 | # Clear field if requested
459 | if clear_first:
460 | await self.page.keyboard.press('Control+a')
461 | await asyncio.sleep(random.uniform(0.05, 0.15))
462 | await self.page.keyboard.press('Delete')
463 | await asyncio.sleep(random.uniform(0.05, 0.15))
464 |
465 | # Type each character with human-like delays
466 | for char in text:
467 | await self.page.keyboard.type(char)
468 | # Human typing speed: 50-150ms between characters
469 | delay = random.uniform(0.05, 0.15)
470 |
471 | # Occasional longer pauses (thinking)
472 | if random.random() < 0.1: # 10% chance
473 | delay += random.uniform(0.2, 0.8)
474 |
475 | await asyncio.sleep(delay)
476 |
477 | # Brief pause after typing
478 | await asyncio.sleep(random.uniform(0.2, 0.6))
479 |
480 | logger.debug(f"✅ Human-typed '{text}' into {selector}")
481 |
482 | except Exception as e:
483 | logger.warning(f"⚠️ Human typing failed: {e}")
484 |
485 | async def _human_click(self, selector: str, wait_before: bool = True, wait_after: bool = True, fast_mode: bool = True):
486 | """
487 | Perform a human-like click with realistic delays and mouse movement.
488 |
489 | Args:
490 | selector: CSS selector or element to click
491 | wait_before: Whether to wait before clicking
492 | wait_after: Whether to wait after clicking
493 | fast_mode: If True, use minimal delays for speed optimization
494 | """
495 | if not self.page:
496 | logger.warning("Cannot perform human click: page is None")
497 | return
498 |
499 | try:
500 | if fast_mode:
501 | # FAST MODE: Direct click with minimal delay
502 | if wait_before:
503 | await asyncio.sleep(random.uniform(0.02, 0.08)) # Reduced from 0.05-0.15
504 |
505 | await self.page.click(selector)
506 |
507 | if wait_after:
508 | await asyncio.sleep(random.uniform(0.02, 0.08)) # Reduced from 0.05-0.15
509 |
510 | else:
511 | # FULL MODE: Realistic mouse movement and timing
512 | # Wait before clicking (thinking time)
513 | if wait_before:
514 | await asyncio.sleep(random.uniform(0.3, 0.8))
515 |
516 | # Get element bounds for realistic mouse movement
517 | element = await self.page.query_selector(selector)
518 | if element:
519 | box = await element.bounding_box()
520 | if box:
521 | # Move to element with slight randomness
522 | center_x = box['x'] + box['width'] / 2
523 | center_y = box['y'] + box['height'] / 2
524 |
525 | # Add small random offset
526 | offset_x = random.uniform(-10, 10)
527 | offset_y = random.uniform(-5, 5)
528 |
529 | await self.page.mouse.move(center_x + offset_x, center_y + offset_y)
530 | await asyncio.sleep(random.uniform(0.1, 0.3))
531 |
532 | # Perform click
533 | await self.page.mouse.click(center_x + offset_x, center_y + offset_y)
534 |
535 | logger.debug(f"✅ Human-clicked {selector}")
536 | else:
537 | # Fallback to regular click
538 | await self.page.click(selector)
539 | logger.debug(f"✅ Fallback-clicked {selector}")
540 | else:
541 | logger.warning(f"⚠️ Element not found for human click: {selector}")
542 | return
543 |
544 | # Wait after clicking (processing time)
545 | if wait_after:
546 | await asyncio.sleep(random.uniform(0.2, 0.6))
547 |
548 | logger.debug(f"✅ Human-clicked {selector}")
549 |
550 | except Exception as e:
551 | logger.warning(f"⚠️ Human click failed: {e}")
552 |
553 | async def _simulate_page_exploration(self, fast_mode: bool = True):
554 | """
555 | Simulate natural page exploration before performing the main task.
556 | This helps establish a more human-like session.
557 |
558 | Args:
559 | fast_mode: If True, use minimal exploration for speed optimization
560 | """
561 | if not self.page:
562 | return
563 |
564 | logger.debug("🕵️ Simulating page exploration...")
565 |
566 | try:
567 | if fast_mode:
568 | # ULTRA-FAST MODE: Minimal exploration
569 | await asyncio.sleep(random.uniform(0.05, 0.1)) # Reduced from 0.1-0.3
570 |
571 | # Single mouse movement (optional)
572 | try:
573 | elements = await self.page.query_selector_all("input, button")
574 | if elements and random.random() < 0.5: # 50% chance to skip
575 | element = random.choice(elements)
576 | box = await element.bounding_box()
577 | if box:
578 | center_x = box['x'] + box['width'] / 2
579 | center_y = box['y'] + box['height'] / 2
580 | await self.page.mouse.move(center_x, center_y)
581 | except:
582 | pass
583 |
584 | await asyncio.sleep(random.uniform(0.02, 0.05)) # Reduced from 0.05-0.15
585 |
586 | else:
587 | # FULL MODE: Comprehensive exploration
588 | # 1. Brief pause to "read" the page
589 | await asyncio.sleep(random.uniform(1.0, 2.5))
590 |
591 | # 2. Move mouse to various UI elements (like a human would explore)
592 | explore_selectors = [
593 | "h1", "h2", ".navbar", "#header", ".logo",
594 | "input", "button", "a", ".form-group"
595 | ]
596 |
597 | explored = 0
598 | for selector in explore_selectors:
599 | elements = await self.page.query_selector_all(selector)
600 | if elements and explored < 3: # Explore max 3 elements
601 | element = random.choice(elements)
602 | box = await element.bounding_box()
603 | if box:
604 | center_x = box['x'] + box['width'] / 2
605 | center_y = box['y'] + box['height'] / 2
606 |
607 | await self.page.mouse.move(center_x, center_y)
608 | await asyncio.sleep(random.uniform(0.3, 0.8))
609 | explored += 1
610 |
611 | # 3. Small scroll to simulate reading
612 | await self.page.mouse.wheel(0, random.randint(50, 150))
613 | await asyncio.sleep(random.uniform(0.5, 1.2))
614 |
615 | logger.debug("✅ Page exploration completed")
616 |
617 | except Exception as e:
618 | logger.debug(f"⚠️ Page exploration failed: {e}")
619 |
620 | def _parse_decision_entries_from_soup(self, soup: BeautifulSoup, search_karar_tipi: KikKararTipi) -> List[KikDecisionEntry]:
621 | entries: List[KikDecisionEntry] = []
622 | table = soup.find("table", {"id": self.RESULTS_TABLE_ID})
623 |
624 | logger.debug(f"Looking for table with ID: {self.RESULTS_TABLE_ID}")
625 | if not table:
626 | logger.warning(f"Table with ID '{self.RESULTS_TABLE_ID}' not found in HTML")
627 | # Log available tables for debugging
628 | all_tables = soup.find_all("table")
629 | logger.debug(f"Found {len(all_tables)} tables in HTML")
630 | for idx, tbl in enumerate(all_tables):
631 | table_id = tbl.get('id', 'no-id')
632 | table_class = tbl.get('class', 'no-class')
633 | rows = tbl.find_all('tr')
634 | logger.debug(f"Table {idx}: id='{table_id}', class='{table_class}', rows={len(rows)}")
635 |
636 | # If this looks like a results table, try to use it
637 | if (table_id and ('grd' in table_id.lower() or 'kurul' in table_id.lower() or 'sonuc' in table_id.lower())) or \
638 | (isinstance(table_class, list) and any('grid' in cls.lower() or 'result' in cls.lower() for cls in table_class)) or \
639 | len(rows) > 3: # Table with multiple rows might be results
640 | logger.info(f"Trying to parse table {idx} as potential results table: id='{table_id}'")
641 | table = tbl
642 | break
643 |
644 | if not table:
645 | logger.error("No suitable results table found")
646 | return entries
647 |
648 | rows = table.find_all("tr")
649 | logger.info(f"Found {len(rows)} rows in results table")
650 |
651 | # Debug: Log first few rows structure
652 | for i, row in enumerate(rows[:3]):
653 | cells = row.find_all(["td", "th"])
654 | cell_texts = [cell.get_text(strip=True)[:30] for cell in cells]
655 | logger.info(f"Row {i} structure: {len(cells)} cells: {cell_texts}")
656 |
657 | for row_idx, row in enumerate(rows):
658 | # Skip first row (search bar with colspan=7) and second row (header with 6 cells)
659 | if row_idx < 2:
660 | logger.debug(f"Skipping header row {row_idx}")
661 | continue
662 |
663 | cells = row.find_all("td")
664 | logger.debug(f"Row {row_idx}: Found {len(cells)} cells")
665 |
666 | # Log cell contents for debugging
667 | if cells and row_idx < 5: # Log first few data rows
668 | for cell_idx, cell in enumerate(cells):
669 | cell_text = cell.get_text(strip=True)[:50] # First 50 chars
670 | logger.debug(f" Cell {cell_idx}: '{cell_text}...'")
671 |
672 | # Be more flexible with cell count - try 6 cells first, then adapt
673 | if len(cells) >= 5: # At least 5 cells for minimum required data
674 | try:
675 | # Try to find preview button in first cell or any cell with a link
676 | preview_button_tag = None
677 | event_target = ""
678 |
679 | # Look for preview button in first few cells
680 | for cell_idx in range(min(3, len(cells))):
681 | cell = cells[cell_idx]
682 | # Try multiple patterns for preview button (based on actual HTML structure)
683 | preview_candidates = [
684 | cell.find("a", id="btnOnizle"), # Exact match
685 | cell.find("a", id=re.compile(r"btnOnizle$")),
686 | cell.find("a", id=re.compile(r"btn.*Onizle")),
687 | cell.find("a", id=re.compile(r".*Onizle.*")),
688 | cell.find("a", href=re.compile(r"__doPostBack"))
689 | ]
690 |
691 | for candidate in preview_candidates:
692 | if candidate and candidate.has_attr('href'):
693 | match = re.search(r"__doPostBack\('([^']*)','([^']*)'\)", candidate['href'])
694 | if match:
695 | event_target = match.group(1)
696 | preview_button_tag = candidate
697 | logger.debug(f"Row {row_idx}: Found event_target '{event_target}' in cell {cell_idx}")
698 | break
699 |
700 | if preview_button_tag:
701 | break
702 |
703 | if not preview_button_tag:
704 | logger.debug(f"Row {row_idx}: No preview button found in any cell")
705 | # Log what links we found
706 | for cell_idx, cell in enumerate(cells[:3]):
707 | links_in_cell = cell.find_all("a")
708 | logger.debug(f" Cell {cell_idx}: {len(links_in_cell)} links")
709 | for link in links_in_cell[:2]:
710 | logger.debug(f" Link id='{link.get('id')}', href='{link.get('href', '')[:50]}...'")
711 |
712 | # Try to find decision data spans with more flexible patterns
713 | karar_no_span = None
714 | karar_tarihi_span = None
715 | idare_span = None
716 | basvuru_sahibi_span = None
717 | ihale_span = None
718 |
719 | # Try different span patterns for karar no (usually in cell 1)
720 | for cell_idx in range(min(4, len(cells))):
721 | if not karar_no_span:
722 | cell = cells[cell_idx]
723 | candidates = [
724 | cell.find("span", id="lblKno"), # Exact match based on actual HTML
725 | cell.find("span", id=re.compile(r"lblKno$")),
726 | cell.find("span", id=re.compile(r".*Kno.*")),
727 | cell.find("span", id=re.compile(r".*KararNo.*")),
728 | cell.find("span", id=re.compile(r".*No.*"))
729 | ]
730 | for candidate in candidates:
731 | if candidate and candidate.get_text(strip=True):
732 | karar_no_span = candidate
733 | logger.debug(f"Row {row_idx}: Found karar_no in cell {cell_idx}")
734 | break
735 |
736 | # Try different patterns for karar tarihi (usually in cell 2)
737 | for cell_idx in range(min(4, len(cells))):
738 | if not karar_tarihi_span:
739 | cell = cells[cell_idx]
740 | candidates = [
741 | cell.find("span", id="lblKtar"), # Exact match based on actual HTML
742 | cell.find("span", id=re.compile(r"lblKtar$")),
743 | cell.find("span", id=re.compile(r".*Ktar.*")),
744 | cell.find("span", id=re.compile(r".*Tarih.*")),
745 | cell.find("span", id=re.compile(r".*Date.*"))
746 | ]
747 | for candidate in candidates:
748 | if candidate and candidate.get_text(strip=True):
749 | # Check if it looks like a date
750 | text = candidate.get_text(strip=True)
751 | if re.match(r'\d{1,2}[./]\d{1,2}[./]\d{4}', text):
752 | karar_tarihi_span = candidate
753 | logger.debug(f"Row {row_idx}: Found karar_tarihi in cell {cell_idx}")
754 | break
755 |
756 | # Find other spans in remaining cells (if we have 6 cells) - using exact IDs
757 | if len(cells) >= 6:
758 | idare_span = cells[3].find("span", id="lblIdare") or cells[3].find("span")
759 | basvuru_sahibi_span = cells[4].find("span", id="lblSikayetci") or cells[4].find("span")
760 | ihale_span = cells[5].find("span", id="lblIhale") or cells[5].find("span")
761 | elif len(cells) == 5:
762 | # Adjust for 5-cell layout
763 | idare_span = cells[2].find("span") if cells[2] != cells[1] else None
764 | basvuru_sahibi_span = cells[3].find("span") if len(cells) > 3 else None
765 | ihale_span = cells[4].find("span") if len(cells) > 4 else None
766 |
767 | # Log what we found
768 | logger.debug(f"Row {row_idx}: karar_no_span={karar_no_span is not None}, "
769 | f"karar_tarihi_span={karar_tarihi_span is not None}, "
770 | f"event_target={bool(event_target)}")
771 |
772 | # For KIK, we need at least karar_no and karar_tarihi, event_target is helpful but not critical
773 | if not (karar_no_span and karar_tarihi_span):
774 | logger.debug(f"Row {row_idx}: Missing required fields (karar_no or karar_tarihi), skipping")
775 | # Log what spans we found in cells
776 | for i, cell in enumerate(cells):
777 | spans = cell.find_all("span")
778 | if spans:
779 | span_info = []
780 | for s in spans:
781 | span_id = s.get('id', 'no-id')
782 | span_text = s.get_text(strip=True)[:20]
783 | span_info.append(f"{span_id}:'{span_text}...'")
784 | logger.debug(f" Cell {i} spans: {span_info}")
785 | continue
786 |
787 | # If we don't have event_target, we can still create an entry but mark it specially
788 | if not event_target:
789 | logger.warning(f"Row {row_idx}: No event_target found, document retrieval won't work")
790 | event_target = f"missing_target_row_{row_idx}" # Placeholder
791 |
792 | # Karar tipini arama parametresinden alıyoruz, çünkü HTML'de direkt olarak bulunmuyor.
793 | try:
794 | entry = KikDecisionEntry(
795 | preview_event_target=event_target,
796 | kararNo=karar_no_span.get_text(strip=True),
797 | karar_tipi=search_karar_tipi, # Arama yapılan karar tipini ekle
798 | kararTarihi=karar_tarihi_span.get_text(strip=True),
799 | idare=idare_span.get_text(strip=True) if idare_span else None,
800 | basvuruSahibi=basvuru_sahibi_span.get_text(strip=True) if basvuru_sahibi_span else None,
801 | ihaleKonusu=ihale_span.get_text(strip=True) if ihale_span else None,
802 | )
803 | entries.append(entry)
804 | logger.info(f"Row {row_idx}: Successfully parsed decision: {entry.karar_no_str}")
805 | except Exception as e:
806 | logger.error(f"Row {row_idx}: Error creating KikDecisionEntry: {e}")
807 | continue
808 |
809 | except Exception as e:
810 | logger.error(f"Error parsing row {row_idx}: {e}", exc_info=True)
811 | else:
812 | logger.warning(f"Row {row_idx}: Expected at least 5 cells but found {len(cells)}, skipping")
813 | if len(cells) > 0:
814 | cell_texts = [cell.get_text(strip=True)[:50] for cell in cells[:3]]
815 | logger.debug(f"Row {row_idx} cells preview: {cell_texts}")
816 |
817 | logger.info(f"Parsed {len(entries)} decision entries from {len(rows)} rows")
818 | return entries
819 |
820 | def _parse_total_records_from_soup(self, soup: BeautifulSoup) -> int:
821 | # ... (öncekiyle aynı) ...
822 | try:
823 | pager_div = soup.find("div", class_="gridToplamSayi")
824 | if pager_div:
825 | match = re.search(r"Toplam Kayıt Sayısı:(\d+)", pager_div.get_text(strip=True))
826 | if match: return int(match.group(1))
827 | except: pass
828 | return 0
829 |
830 | def _parse_current_page_from_soup(self, soup: BeautifulSoup) -> int:
831 | # ... (öncekiyle aynı) ...
832 | try:
833 | pager_div = soup.find("div", class_="sayfalama")
834 | if pager_div:
835 | active_page_span = pager_div.find("span", class_="active")
836 | if active_page_span: return int(active_page_span.get_text(strip=True))
837 | except: pass
838 | return 1
839 |
840 | async def search_decisions(self, search_params: KikSearchRequest) -> KikSearchResult:
841 | await self._ensure_playwright_ready()
842 | page = self.page
843 | search_url = f"{self.BASE_URL}{self.SEARCH_PAGE_PATH}"
844 | try:
845 | if page.url != search_url:
846 | await page.goto(search_url, wait_until="networkidle", timeout=self.request_timeout)
847 |
848 | # Simulate natural page exploration after navigation (FAST MODE)
849 | await self._simulate_page_exploration(fast_mode=True)
850 |
851 | search_button_selector = f"a[id='{self.FIELD_LOCATORS['search_button_id']}']"
852 | await page.wait_for_selector(search_button_selector, state="visible", timeout=self.request_timeout)
853 |
854 | current_karar_tipi_value = search_params.karar_tipi.value
855 | radio_locator_selector = f"{self.FIELD_LOCATORS['karar_tipi_radio_group']}[value='{current_karar_tipi_value}']"
856 | if not await page.locator(radio_locator_selector).is_checked():
857 | js_target_radio = f"ctl00$ContentPlaceHolder1${current_karar_tipi_value}"
858 | logger.info(f"Selecting radio button: {js_target_radio}")
859 | async with page.expect_navigation(wait_until="networkidle", timeout=self.request_timeout):
860 | await page.evaluate(f"javascript:__doPostBack('{js_target_radio}','')")
861 | # Ultra-fast wait for page to stabilize after radio button change
862 | await page.wait_for_timeout(300) # Reduced from 1000ms
863 | logger.info("Radio button selection completed")
864 |
865 | # Helper function for human-like form filling (FAST MODE)
866 | async def human_fill_if_value(selector_key: str, value: Optional[str]):
867 | if value is not None:
868 | selector = self.FIELD_LOCATORS[selector_key]
869 | await self._human_type(selector, value, fast_mode=True)
870 |
871 | # Karar No'yu KİK sitesine göndermeden önce '_' -> '/' dönüşümü yap
872 | karar_no_for_kik_form = None
873 | if search_params.karar_no: # search_params.karar_no Claude'dan '_' ile gelmiş olabilir
874 | karar_no_for_kik_form = search_params.karar_no.replace('_', '/')
875 | logger.info(f"Using karar_no '{karar_no_for_kik_form}' (transformed from '{search_params.karar_no}') for KIK form.")
876 |
877 | # Fill form fields with FAST human-like behavior
878 | logger.info("Filling form fields with fast mode...")
879 |
880 | # Start with FAST mouse behavior simulation
881 | await self._simulate_human_behavior(fast_mode=True)
882 |
883 | await human_fill_if_value('karar_metni', search_params.karar_metni)
884 | await human_fill_if_value('karar_no', karar_no_for_kik_form) # Dönüştürülmüş halini kullan
885 | await human_fill_if_value('karar_tarihi_baslangic', search_params.karar_tarihi_baslangic)
886 | await human_fill_if_value('karar_tarihi_bitis', search_params.karar_tarihi_bitis)
887 | await human_fill_if_value('resmi_gazete_sayisi', search_params.resmi_gazete_sayisi)
888 | await human_fill_if_value('resmi_gazete_tarihi', search_params.resmi_gazete_tarihi)
889 | await human_fill_if_value('basvuru_konusu_ihale', search_params.basvuru_konusu_ihale)
890 | await human_fill_if_value('basvuru_sahibi', search_params.basvuru_sahibi)
891 | await human_fill_if_value('ihaleyi_yapan_idare', search_params.ihaleyi_yapan_idare)
892 |
893 | if search_params.yil:
894 | await page.select_option(self.FIELD_LOCATORS['yil'], value=search_params.yil)
895 | await page.wait_for_timeout(50) # Reduced from 100ms
896 |
897 | logger.info("Form filling completed, preparing for search...")
898 |
899 | # Additional FAST human behavior before search
900 | await self._simulate_human_behavior(fast_mode=True)
901 |
902 | action_is_search_button_click = (search_params.page == 1)
903 | event_target_for_submit: str
904 |
905 | try:
906 | if action_is_search_button_click:
907 | event_target_for_submit = self.FIELD_LOCATORS['search_button_id']
908 | # Use human-like clicking for search button
909 | search_button_selector = f"a[id='{event_target_for_submit}']"
910 | logger.info(f"Performing human-like search button click...")
911 |
912 | try:
913 | # Hide datepicker first to prevent interference
914 | await page.evaluate("$('#ui-datepicker-div').hide()")
915 |
916 | # FAST Human-like click on search button
917 | await self._human_click(search_button_selector, wait_before=True, wait_after=False, fast_mode=True)
918 |
919 | # Wait for navigation
920 | await page.wait_for_load_state("networkidle", timeout=self.request_timeout)
921 | logger.info("Search navigation completed successfully")
922 | except Exception as e:
923 | logger.warning(f"Human click failed, falling back to JavaScript: {e}")
924 | # Hide datepicker and use JavaScript fallback
925 | await page.evaluate("$('#ui-datepicker-div').hide()")
926 | async with page.expect_navigation(wait_until="networkidle", timeout=self.request_timeout):
927 | await page.evaluate(f"javascript:__doPostBack('{event_target_for_submit}','')")
928 | logger.info("Search navigation completed via fallback")
929 | else:
930 | # Pagination - use original method for consistency
931 | page_link_ctl_number = search_params.page + 2
932 | event_target_for_submit = f"ctl00$ContentPlaceHolder1$grdKurulKararSorguSonuc$ctl14$ctl{page_link_ctl_number:02d}"
933 | logger.info(f"Executing pagination with event target: {event_target_for_submit}")
934 |
935 | async with page.expect_navigation(wait_until="networkidle", timeout=self.request_timeout):
936 | await page.evaluate(f"javascript:__doPostBack('{event_target_for_submit}','')")
937 | logger.info("Pagination navigation completed successfully")
938 | except PlaywrightTimeoutError:
939 | logger.warning("Search navigation timed out, but continuing...")
940 | await page.wait_for_timeout(5000) # Longer wait if navigation fails
941 |
942 | # Ultra-fast wait time for results to load
943 | logger.info("Waiting for search results to load...")
944 | await page.wait_for_timeout(500) # Reduced from 1000ms
945 |
946 | results_table_dom_selector = f"table#{self.RESULTS_TABLE_ID}"
947 | try:
948 | # First wait for any tables to appear (more general check)
949 | logger.info("Waiting for any tables to appear...")
950 | await page.wait_for_function("""
951 | () => document.querySelectorAll('table').length > 0
952 | """, timeout=4000) # Reduced from 8000ms
953 | logger.info("At least one table appeared")
954 |
955 | # Then wait for our specific table
956 | await page.wait_for_selector(results_table_dom_selector, timeout=4000, state="attached") # Reduced from 8000ms
957 | logger.debug("Results table attached to DOM")
958 |
959 | # Wait for table to have some content (more than just headers)
960 | await page.wait_for_function(f"""
961 | () => {{
962 | const table = document.querySelector('{results_table_dom_selector}');
963 | return table && table.querySelectorAll('tr').length > 2;
964 | }}
965 | """, timeout=4000) # Reduced from 20000ms
966 | logger.debug("Results table populated with data")
967 |
968 | # Ultra-fast additional wait for any remaining JavaScript
969 | await page.wait_for_timeout(500) # Reduced from 3000ms
970 |
971 | except PlaywrightTimeoutError:
972 | logger.warning(f"Timeout waiting for results table '{results_table_dom_selector}'.")
973 | # Try one more wait for content placeholder
974 | try:
975 | await page.wait_for_selector("#ctl00_ContentPlaceHolder1", timeout=10000)
976 | logger.info("ContentPlaceHolder1 found, checking for tables...")
977 | await page.wait_for_timeout(5000)
978 | except PlaywrightTimeoutError:
979 | logger.warning("ContentPlaceHolder1 also not found - content may not have loaded")
980 |
981 | html_content = await page.content()
982 | soup = BeautifulSoup(html_content, "html.parser")
983 | # ... (hata ve sonuç yok mesajı kontrolü aynı) ...
984 | validation_summary_tag = soup.find("div", id=self.VALIDATION_SUMMARY_SELECTOR.split('[')[0].split(':')[0])
985 | if validation_summary_tag and validation_summary_tag.get_text(strip=True) and \
986 | ("display: none" not in validation_summary_tag.get("style", "").lower() if validation_summary_tag.has_attr("style") else True) and \
987 | validation_summary_tag.get_text(strip=True) != "":
988 | return KikSearchResult(decisions=[], total_records=0, current_page=search_params.page)
989 | message_content_div = soup.find("div", id=self.NO_RESULTS_MESSAGE_SELECTOR.split(':')[0])
990 | if message_content_div and "kayıt bulunamamıştır" in message_content_div.get_text(strip=True).lower():
991 | return KikSearchResult(decisions=[], total_records=0, current_page=1)
992 |
993 | # _parse_decision_entries_from_soup'a arama yapılan karar_tipi'ni gönder
994 | decisions = self._parse_decision_entries_from_soup(soup, search_params.karar_tipi)
995 | total_records = self._parse_total_records_from_soup(soup)
996 | current_page_from_html = self._parse_current_page_from_soup(soup)
997 | return KikSearchResult(decisions=decisions, total_records=total_records, current_page=current_page_from_html)
998 | except Exception as e:
999 | logger.error(f"Error during KIK decision search: {e}", exc_info=True)
1000 | return KikSearchResult(decisions=[], current_page=search_params.page)
1001 |
1002 | def _clean_html_for_markdown(self, html_content: str) -> str:
1003 | # ... (öncekiyle aynı) ...
1004 | if not html_content: return ""
1005 | return html_parser.unescape(html_content)
1006 |
1007 | def _convert_html_to_markdown_internal(self, html_fragment: str) -> Optional[str]:
1008 | # ... (öncekiyle aynı) ...
1009 | if not html_fragment: return None
1010 | cleaned_html = self._clean_html_for_markdown(html_fragment)
1011 | markdown_output = None
1012 | try:
1013 | # Convert HTML string to bytes and create BytesIO stream
1014 | html_bytes = cleaned_html.encode('utf-8')
1015 | html_stream = io.BytesIO(html_bytes)
1016 |
1017 | # Pass BytesIO stream to MarkItDown to avoid temp file creation
1018 | md_converter = MarkItDown(enable_plugins=True, remove_alt_whitespace=True, keep_underline=True)
1019 | markdown_output = md_converter.convert(html_stream).text_content
1020 | if markdown_output: markdown_output = re.sub(r'\n{3,}', '\n\n', markdown_output).strip()
1021 | except Exception as e: logger.error(f"MarkItDown conversion error: {e}", exc_info=True)
1022 | return markdown_output
1023 |
1024 |
1025 | async def get_decision_document_as_markdown(
1026 | self,
1027 | karar_id_b64: str,
1028 | page_number: int = 1
1029 | ) -> KikDocumentMarkdown:
1030 | await self._ensure_playwright_ready()
1031 | # Bu metodun kendi içinde yeni bir 'page' nesnesi ('doc_page_for_content') kullanacağını unutmayın,
1032 | # ana 'self.page' arama sonuçları sayfasında kalır.
1033 | current_main_page = self.page # Ana arama sonuçları sayfasını referans alalım
1034 |
1035 | try:
1036 | decoded_key = base64.b64decode(karar_id_b64.encode('utf-8')).decode('utf-8')
1037 | karar_tipi_value, karar_no_for_search = decoded_key.split('|', 1)
1038 | original_karar_tipi = KikKararTipi(karar_tipi_value)
1039 | logger.info(f"KIK Get Detail: Decoded karar_id '{karar_id_b64}' to Karar Tipi: {original_karar_tipi.value}, Karar No: {karar_no_for_search}. Requested Markdown Page: {page_number}")
1040 | except Exception as e_decode:
1041 | logger.error(f"Invalid karar_id format. Could not decode Base64 or split: {karar_id_b64}. Error: {e_decode}")
1042 | return KikDocumentMarkdown(retrieved_with_karar_id=karar_id_b64, error_message="Invalid karar_id format.", current_page=page_number)
1043 |
1044 | default_error_response_data = {
1045 | "retrieved_with_karar_id": karar_id_b64,
1046 | "retrieved_karar_no": karar_no_for_search,
1047 | "retrieved_karar_tipi": original_karar_tipi,
1048 | "error_message": "An unspecified error occurred.",
1049 | "current_page": page_number, "total_pages": 1, "is_paginated": False
1050 | }
1051 |
1052 | # Ana arama sayfasında olduğumuzdan emin olalım
1053 | if self.SEARCH_PAGE_PATH not in current_main_page.url:
1054 | logger.info(f"Not on search page ({current_main_page.url}). Navigating to {self.SEARCH_PAGE_PATH} before targeted search for document.")
1055 | await current_main_page.goto(f"{self.BASE_URL}{self.SEARCH_PAGE_PATH}", wait_until="networkidle", timeout=self.request_timeout)
1056 | await current_main_page.wait_for_selector(f"a[id='{self.FIELD_LOCATORS['search_button_id']}']", state="visible", timeout=self.request_timeout)
1057 |
1058 | targeted_search_params = KikSearchRequest(
1059 | karar_no=karar_no_for_search,
1060 | karar_tipi=original_karar_tipi,
1061 | page=1
1062 | )
1063 | logger.info(f"Performing targeted search for Karar No: {karar_no_for_search}")
1064 | # search_decisions kendi içinde _ensure_playwright_ready çağırır ve self.page'i kullanır.
1065 | # Bu, current_main_page ile aynı olmalı.
1066 | search_results = await self.search_decisions(targeted_search_params)
1067 |
1068 | if not search_results.decisions:
1069 | default_error_response_data["error_message"] = f"Decision with Karar No '{karar_no_for_search}' (Tipi: {original_karar_tipi.value}) not found by internal search."
1070 | return KikDocumentMarkdown(**default_error_response_data)
1071 |
1072 | decision_to_fetch = None
1073 | for dec_entry in search_results.decisions:
1074 | if dec_entry.karar_no_str == karar_no_for_search and dec_entry.karar_tipi == original_karar_tipi:
1075 | decision_to_fetch = dec_entry
1076 | break
1077 |
1078 | if not decision_to_fetch:
1079 | default_error_response_data["error_message"] = f"Karar No '{karar_no_for_search}' (Tipi: {original_karar_tipi.value}) not present with an exact match in first page of targeted search results."
1080 | return KikDocumentMarkdown(**default_error_response_data)
1081 |
1082 | decision_preview_event_target = decision_to_fetch.preview_event_target
1083 | logger.info(f"Found target decision. Using preview_event_target: {decision_preview_event_target} for Karar No: {decision_to_fetch.karar_no_str}")
1084 |
1085 | iframe_document_url_str = None
1086 | karar_id_param_from_url_on_doc_page = None
1087 | document_html_content = ""
1088 |
1089 | try:
1090 | logger.info(f"Evaluating __doPostBack on main page to show modal for: {decision_preview_event_target}")
1091 | # Bu evaluate, self.page (yani current_main_page) üzerinde çalışır
1092 | await current_main_page.evaluate(f"javascript:__doPostBack('{decision_preview_event_target}','')")
1093 | await current_main_page.wait_for_timeout(1000)
1094 | logger.info(f"Executed __doPostBack for {decision_preview_event_target} on main page.")
1095 |
1096 | iframe_selector = "iframe#iframe_detayPopUp"
1097 | modal_visible_selector = "div#detayPopUp.in"
1098 |
1099 | try:
1100 | logger.info(f"Waiting for modal '{modal_visible_selector}' to be visible and iframe '{iframe_selector}' src to be populated on main page...")
1101 | await current_main_page.wait_for_function(
1102 | f"""
1103 | () => {{
1104 | const modal = document.querySelector('{modal_visible_selector}');
1105 | const iframe = document.querySelector('{iframe_selector}');
1106 | const modalIsTrulyVisible = modal && (window.getComputedStyle(modal).display !== 'none');
1107 | return modalIsTrulyVisible &&
1108 | iframe && iframe.getAttribute('src') &&
1109 | iframe.getAttribute('src').includes('KurulKararGoster.aspx');
1110 | }}
1111 | """,
1112 | timeout=self.request_timeout / 2
1113 | )
1114 | iframe_src_value = await current_main_page.locator(iframe_selector).get_attribute("src")
1115 | logger.info(f"Iframe src populated: {iframe_src_value}")
1116 |
1117 | except PlaywrightTimeoutError:
1118 | logger.warning(f"Timeout waiting for KIK iframe src for {decision_preview_event_target}. Trying to parse from static content after presumed update.")
1119 | html_after_postback = await current_main_page.content()
1120 | # ... (fallback parsing öncekiyle aynı, default_error_response_data set edilir ve return edilir) ...
1121 | soup_after_postback = BeautifulSoup(html_after_postback, "html.parser")
1122 | detay_popup_div = soup_after_postback.find("div", {"id": "detayPopUp", "class": re.compile(r"\bin\b")})
1123 | if not detay_popup_div: detay_popup_div = soup_after_postback.find("div", {"id": "detayPopUp", "style": re.compile(r"display:\s*block", re.I)})
1124 | iframe_tag = detay_popup_div.find("iframe", {"id": "iframe_detayPopUp"}) if detay_popup_div else None
1125 | if iframe_tag and iframe_tag.has_attr("src") and iframe_tag["src"]: iframe_src_value = iframe_tag["src"]
1126 | else:
1127 | default_error_response_data["error_message"]="Timeout or failure finding decision content iframe URL after postback."
1128 | return KikDocumentMarkdown(**default_error_response_data)
1129 |
1130 | if not iframe_src_value or not iframe_src_value.strip():
1131 | default_error_response_data["error_message"]="Extracted iframe URL for decision content is empty."
1132 | return KikDocumentMarkdown(**default_error_response_data)
1133 |
1134 | # iframe_src_value göreceli bir URL ise, ana sayfanın URL'si ile birleştir
1135 | iframe_document_url_str = urllib.parse.urljoin(current_main_page.url, iframe_src_value)
1136 | logger.info(f"Constructed absolute iframe_document_url_str for goto: {iframe_document_url_str}") # Log this absolute URL
1137 | default_error_response_data["source_url"] = iframe_document_url_str
1138 |
1139 | parsed_url = urllib.parse.urlparse(iframe_document_url_str)
1140 | query_params = urllib.parse.parse_qs(parsed_url.query)
1141 | karar_id_param_from_url_on_doc_page = query_params.get("KararId", [None])[0]
1142 | default_error_response_data["karar_id_param_from_url"] = karar_id_param_from_url_on_doc_page
1143 | if not karar_id_param_from_url_on_doc_page:
1144 | default_error_response_data["error_message"]="KararId (KIK internal ID) not found in extracted iframe URL."
1145 | return KikDocumentMarkdown(**default_error_response_data)
1146 |
1147 | logger.info(f"Fetching KIK decision content from iframe URL using a new Playwright page: {iframe_document_url_str}")
1148 |
1149 | doc_page_for_content = await self.context.new_page()
1150 | try:
1151 | # `goto` metoduna MUTLAK URL verilmeli. Loglanan URL'nin mutlak olduğundan emin olalım.
1152 | await doc_page_for_content.goto(iframe_document_url_str, wait_until="domcontentloaded", timeout=self.request_timeout)
1153 | document_html_content = await doc_page_for_content.content()
1154 | except Exception as e_doc_page:
1155 | logger.error(f"Error navigating or getting content from doc_page ({iframe_document_url_str}): {e_doc_page}")
1156 | if doc_page_for_content and not doc_page_for_content.is_closed(): await doc_page_for_content.close()
1157 | default_error_response_data["error_message"]=f"Failed to load decision detail page: {e_doc_page}"
1158 | return KikDocumentMarkdown(**default_error_response_data)
1159 | finally:
1160 | if doc_page_for_content and not doc_page_for_content.is_closed():
1161 | await doc_page_for_content.close()
1162 |
1163 | soup_decision_detail = BeautifulSoup(document_html_content, "html.parser")
1164 | karar_content_span = soup_decision_detail.find("span", {"id": "ctl00_ContentPlaceHolder1_lblKarar"})
1165 | actual_decision_html = karar_content_span.decode_contents() if karar_content_span else document_html_content
1166 | full_markdown_content = self._convert_html_to_markdown_internal(actual_decision_html)
1167 |
1168 | if not full_markdown_content:
1169 | default_error_response_data["error_message"]="Markdown conversion failed or returned empty content."
1170 | try:
1171 | if await current_main_page.locator(self.MODAL_CLOSE_BUTTON_SELECTOR).is_visible(timeout=1000):
1172 | await current_main_page.locator(self.MODAL_CLOSE_BUTTON_SELECTOR).click()
1173 | except: pass
1174 | return KikDocumentMarkdown(**default_error_response_data)
1175 |
1176 | content_length = len(full_markdown_content); total_pages = math.ceil(content_length / self.DOCUMENT_MARKDOWN_CHUNK_SIZE) or 1
1177 | current_page_clamped = max(1, min(page_number, total_pages))
1178 | start_index = (current_page_clamped - 1) * self.DOCUMENT_MARKDOWN_CHUNK_SIZE
1179 | markdown_chunk = full_markdown_content[start_index : start_index + self.DOCUMENT_MARKDOWN_CHUNK_SIZE]
1180 |
1181 | try:
1182 | if await current_main_page.locator(self.MODAL_CLOSE_BUTTON_SELECTOR).is_visible(timeout=2000):
1183 | await current_main_page.locator(self.MODAL_CLOSE_BUTTON_SELECTOR).click()
1184 | await current_main_page.wait_for_selector(f"div#detayPopUp:not(.in)", timeout=5000)
1185 | except: pass
1186 |
1187 | return KikDocumentMarkdown(
1188 | retrieved_with_karar_id=karar_id_b64,
1189 | retrieved_karar_no=karar_no_for_search,
1190 | retrieved_karar_tipi=original_karar_tipi,
1191 | kararIdParam=karar_id_param_from_url_on_doc_page,
1192 | markdown_chunk=markdown_chunk, source_url=iframe_document_url_str,
1193 | current_page=current_page_clamped, total_pages=total_pages,
1194 | is_paginated=(total_pages > 1), full_content_char_count=content_length
1195 | )
1196 | except Exception as e:
1197 | logger.error(f"Error in get_decision_document_as_markdown for Karar ID {karar_id_b64}: {e}", exc_info=True)
1198 | default_error_response_data["error_message"] = f"General error: {str(e)}"
1199 | return KikDocumentMarkdown(**default_error_response_data)
1200 |
```
--------------------------------------------------------------------------------
/saidsurucu-yargi-mcp-f5fa007/kik_mcp_module/client.py:
--------------------------------------------------------------------------------
```python
1 | # kik_mcp_module/client.py
2 | import asyncio
3 | from playwright.async_api import (
4 | async_playwright,
5 | Page,
6 | BrowserContext,
7 | Browser,
8 | Error as PlaywrightError,
9 | TimeoutError as PlaywrightTimeoutError
10 | )
11 | from bs4 import BeautifulSoup
12 | import logging
13 | from typing import Dict, Any, List, Optional
14 | import urllib.parse
15 | import base64 # Base64 için
16 | import re
17 | import html as html_parser
18 | from markitdown import MarkItDown
19 | import os
20 | import math
21 | import io
22 | import random
23 |
24 | from .models import (
25 | KikSearchRequest,
26 | KikDecisionEntry,
27 | KikSearchResult,
28 | KikDocumentMarkdown,
29 | KikKararTipi
30 | )
31 |
32 | logger = logging.getLogger(__name__)
33 |
34 | class KikApiClient:
35 | BASE_URL = "https://ekap.kik.gov.tr"
36 | SEARCH_PAGE_PATH = "/EKAP/Vatandas/kurulkararsorgu.aspx"
37 | FIELD_LOCATORS = {
38 | "karar_tipi_radio_group": "input[name='ctl00$ContentPlaceHolder1$kurulKararTip']",
39 | "karar_no": "input[name='ctl00$ContentPlaceHolder1$txtKararNo']",
40 | "karar_tarihi_baslangic": "input[name='ctl00$ContentPlaceHolder1$etKararTarihBaslangic$EkapTakvimTextBox_etKararTarihBaslangic']",
41 | "karar_tarihi_bitis": "input[name='ctl00$ContentPlaceHolder1$etKararTarihBitis$EkapTakvimTextBox_etKararTarihBitis']",
42 | "resmi_gazete_sayisi": "input[name='ctl00$ContentPlaceHolder1$txtResmiGazeteSayisi']",
43 | "resmi_gazete_tarihi": "input[name='ctl00$ContentPlaceHolder1$etResmiGazeteTarihi$EkapTakvimTextBox_etResmiGazeteTarihi']",
44 | "basvuru_konusu_ihale": "input[name='ctl00$ContentPlaceHolder1$txtBasvuruKonusuIhale']",
45 | "basvuru_sahibi": "input[name='ctl00$ContentPlaceHolder1$txtSikayetci']",
46 | "ihaleyi_yapan_idare": "input[name='ctl00$ContentPlaceHolder1$txtIhaleyiYapanIdare']",
47 | "yil": "select[name='ctl00$ContentPlaceHolder1$ddlYil']",
48 | "karar_metni": "input[name='ctl00$ContentPlaceHolder1$txtKararMetni']",
49 | "search_button_id": "ctl00_ContentPlaceHolder1_btnAra"
50 | }
51 | RESULTS_TABLE_ID = "grdKurulKararSorguSonuc"
52 | NO_RESULTS_MESSAGE_SELECTOR = "div#ctl00_MessageContent1"
53 | VALIDATION_SUMMARY_SELECTOR = "div#ctl00_ValidationSummary1"
54 | MODAL_CLOSE_BUTTON_SELECTOR = "div#detayPopUp.in a#btnKapatPencere_0.close"
55 | DOCUMENT_MARKDOWN_CHUNK_SIZE = 5000
56 |
57 | def __init__(self, request_timeout: float = 60000):
58 | self.playwright_instance: Optional[async_playwright] = None
59 | self.browser: Optional[Browser] = None
60 | self.context: Optional[BrowserContext] = None
61 | self.page: Optional[Page] = None
62 | self.request_timeout = request_timeout
63 | self._lock = asyncio.Lock()
64 |
65 | async def _ensure_playwright_ready(self, force_new_page: bool = False):
66 | async with self._lock:
67 | browser_recreated = False
68 | context_recreated = False
69 | if not self.playwright_instance:
70 | self.playwright_instance = await async_playwright().start()
71 | if not self.browser or not self.browser.is_connected():
72 | if self.browser: await self.browser.close()
73 | # Ultra stealth browser configuration
74 | self.browser = await self.playwright_instance.chromium.launch(
75 | headless=True,
76 | args=[
77 | # Disable automation indicators
78 | '--no-first-run',
79 | '--no-default-browser-check',
80 | '--disable-dev-shm-usage',
81 | '--disable-extensions',
82 | '--disable-gpu',
83 | '--disable-default-apps',
84 | '--disable-translate',
85 | '--disable-blink-features=AutomationControlled',
86 | '--disable-ipc-flooding-protection',
87 | '--disable-renderer-backgrounding',
88 | '--disable-backgrounding-occluded-windows',
89 | '--disable-client-side-phishing-detection',
90 | '--disable-sync',
91 | '--disable-features=TranslateUI,BlinkGenPropertyTrees',
92 | '--disable-component-extensions-with-background-pages',
93 | '--no-sandbox', # Sometimes needed for headless
94 | '--disable-web-security',
95 | '--disable-features=VizDisplayCompositor',
96 | # Language and locale
97 | '--lang=tr-TR',
98 | '--accept-lang=tr-TR,tr;q=0.9,en;q=0.8',
99 | # Performance optimizations
100 | '--memory-pressure-off',
101 | '--max_old_space_size=4096',
102 | ]
103 | )
104 | browser_recreated = True
105 | if not self.context or browser_recreated:
106 | if self.context: await self.context.close()
107 | if not self.browser: raise PlaywrightError("Browser not initialized.")
108 | # Ultra realistic context configuration
109 | self.context = await self.browser.new_context(
110 | user_agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36",
111 | viewport={'width': 1920, 'height': 1080},
112 | screen={'width': 1920, 'height': 1080},
113 | device_scale_factor=1.0,
114 | is_mobile=False,
115 | has_touch=False,
116 | # Localization
117 | locale='tr-TR',
118 | timezone_id='Europe/Istanbul',
119 | # Realistic browser features
120 | java_script_enabled=True,
121 | accept_downloads=True,
122 | ignore_https_errors=True,
123 | # Color scheme and media
124 | color_scheme='light',
125 | reduced_motion='no-preference',
126 | forced_colors='none',
127 | # Additional headers for realism
128 | extra_http_headers={
129 | 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',
130 | 'Accept-Encoding': 'gzip, deflate, br',
131 | 'Accept-Language': 'tr-TR,tr;q=0.9,en;q=0.8',
132 | 'Cache-Control': 'max-age=0',
133 | 'DNT': '1',
134 | 'Upgrade-Insecure-Requests': '1',
135 | 'Sec-Ch-Ua': '"Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24"',
136 | 'Sec-Ch-Ua-Mobile': '?0',
137 | 'Sec-Ch-Ua-Platform': '"Windows"',
138 | 'Sec-Fetch-Dest': 'document',
139 | 'Sec-Fetch-Mode': 'navigate',
140 | 'Sec-Fetch-Site': 'none',
141 | 'Sec-Fetch-User': '?1',
142 | },
143 | # Permissions to appear realistic
144 | permissions=['geolocation'],
145 | geolocation={'latitude': 41.0082, 'longitude': 28.9784}, # Istanbul
146 | )
147 | context_recreated = True
148 | if not self.page or self.page.is_closed() or force_new_page or context_recreated or browser_recreated:
149 | if self.page and not self.page.is_closed(): await self.page.close()
150 | if not self.context: raise PlaywrightError("Context is None.")
151 | self.page = await self.context.new_page()
152 | if not self.page: raise PlaywrightError("Failed to create new page.")
153 | self.page.set_default_navigation_timeout(self.request_timeout)
154 | self.page.set_default_timeout(self.request_timeout)
155 |
156 | # CRITICAL: Anti-detection JavaScript injection
157 | await self._inject_stealth_scripts()
158 | if not self.page or self.page.is_closed():
159 | raise PlaywrightError("Playwright page initialization failed.")
160 | logger.debug("_ensure_playwright_ready completed.")
161 |
162 | async def close_client_session(self):
163 | async with self._lock:
164 | # ... (öncekiyle aynı)
165 | if self.page and not self.page.is_closed(): await self.page.close(); self.page = None
166 | if self.context: await self.context.close(); self.context = None
167 | if self.browser: await self.browser.close(); self.browser = None
168 | if self.playwright_instance: await self.playwright_instance.stop(); self.playwright_instance = None
169 | logger.info("KikApiClient (Playwright): Resources closed.")
170 |
171 | async def _inject_stealth_scripts(self):
172 | """
173 | Inject comprehensive stealth JavaScript to evade bot detection.
174 | Overrides navigator properties and other fingerprinting vectors.
175 | """
176 | if not self.page:
177 | logger.warning("Cannot inject stealth scripts: page is None")
178 | return
179 |
180 | logger.debug("Injecting comprehensive stealth scripts...")
181 |
182 | stealth_script = '''
183 | // Override navigator.webdriver
184 | Object.defineProperty(navigator, 'webdriver', {
185 | get: () => undefined,
186 | configurable: true
187 | });
188 |
189 | // Override navigator properties to appear more human
190 | Object.defineProperty(navigator, 'languages', {
191 | get: () => ['tr-TR', 'tr', 'en-US', 'en'],
192 | configurable: true
193 | });
194 |
195 | Object.defineProperty(navigator, 'platform', {
196 | get: () => 'Win32',
197 | configurable: true
198 | });
199 |
200 | Object.defineProperty(navigator, 'vendor', {
201 | get: () => 'Google Inc.',
202 | configurable: true
203 | });
204 |
205 | Object.defineProperty(navigator, 'deviceMemory', {
206 | get: () => 8,
207 | configurable: true
208 | });
209 |
210 | Object.defineProperty(navigator, 'hardwareConcurrency', {
211 | get: () => 8,
212 | configurable: true
213 | });
214 |
215 | Object.defineProperty(navigator, 'maxTouchPoints', {
216 | get: () => 0,
217 | configurable: true
218 | });
219 |
220 | // Override plugins to appear realistic
221 | Object.defineProperty(navigator, 'plugins', {
222 | get: () => {
223 | return [
224 | {
225 | 0: {type: "application/x-google-chrome-pdf", suffixes: "pdf", description: "Portable Document Format", enabledPlugin: Plugin},
226 | description: "Portable Document Format",
227 | filename: "internal-pdf-viewer",
228 | length: 1,
229 | name: "Chrome PDF Plugin"
230 | },
231 | {
232 | 0: {type: "application/pdf", suffixes: "pdf", description: "", enabledPlugin: Plugin},
233 | description: "",
234 | filename: "mhjfbmdgcfjbbpaeojofohoefgiehjai",
235 | length: 1,
236 | name: "Chrome PDF Viewer"
237 | }
238 | ];
239 | },
240 | configurable: true
241 | });
242 |
243 | // Override permissions
244 | const originalQuery = window.navigator.permissions.query;
245 | window.navigator.permissions.query = (parameters) => (
246 | parameters.name === 'notifications' ?
247 | Promise.resolve({ state: Notification.permission }) :
248 | originalQuery(parameters)
249 | );
250 |
251 | // Override WebGL rendering context
252 | const getParameter = WebGLRenderingContext.prototype.getParameter;
253 | WebGLRenderingContext.prototype.getParameter = function(parameter) {
254 | if (parameter === 37445) { // UNMASKED_VENDOR_WEBGL
255 | return 'Intel Inc.';
256 | }
257 | if (parameter === 37446) { // UNMASKED_RENDERER_WEBGL
258 | return 'Intel(R) Iris(R) Plus Graphics 640';
259 | }
260 | return getParameter(parameter);
261 | };
262 |
263 | // Override canvas fingerprinting
264 | const toBlob = HTMLCanvasElement.prototype.toBlob;
265 | const toDataURL = HTMLCanvasElement.prototype.toDataURL;
266 | const getImageData = CanvasRenderingContext2D.prototype.getImageData;
267 |
268 | const noisify = (canvas, context) => {
269 | const imageData = context.getImageData(0, 0, canvas.width, canvas.height);
270 | for (let i = 0; i < imageData.data.length; i += 4) {
271 | imageData.data[i] += Math.floor(Math.random() * 10) - 5;
272 | imageData.data[i + 1] += Math.floor(Math.random() * 10) - 5;
273 | imageData.data[i + 2] += Math.floor(Math.random() * 10) - 5;
274 | }
275 | context.putImageData(imageData, 0, 0);
276 | };
277 |
278 | Object.defineProperty(HTMLCanvasElement.prototype, 'toBlob', {
279 | value: function(callback, type, encoderOptions) {
280 | noisify(this, this.getContext('2d'));
281 | return toBlob.apply(this, arguments);
282 | }
283 | });
284 |
285 | Object.defineProperty(HTMLCanvasElement.prototype, 'toDataURL', {
286 | value: function(type, encoderOptions) {
287 | noisify(this, this.getContext('2d'));
288 | return toDataURL.apply(this, arguments);
289 | }
290 | });
291 |
292 | // Override AudioContext for audio fingerprinting
293 | const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
294 | const originalAnalyser = audioCtx.createAnalyser;
295 | audioCtx.createAnalyser = function() {
296 | const analyser = originalAnalyser.apply(this, arguments);
297 | const getFloatFrequencyData = analyser.getFloatFrequencyData;
298 | analyser.getFloatFrequencyData = function(array) {
299 | getFloatFrequencyData.apply(this, arguments);
300 | for (let i = 0; i < array.length; i++) {
301 | array[i] += Math.random() * 0.0001;
302 | }
303 | };
304 | return analyser;
305 | };
306 |
307 | // Override screen properties
308 | Object.defineProperty(window.screen, 'colorDepth', {
309 | get: () => 24,
310 | configurable: true
311 | });
312 |
313 | Object.defineProperty(window.screen, 'pixelDepth', {
314 | get: () => 24,
315 | configurable: true
316 | });
317 |
318 | // Override timezone
319 | Date.prototype.getTimezoneOffset = function() {
320 | return -180; // UTC+3 (Istanbul)
321 | };
322 |
323 | // Override document.cookie to prevent tracking
324 | const originalCookieDescriptor = Object.getOwnPropertyDescriptor(Document.prototype, 'cookie') ||
325 | Object.getOwnPropertyDescriptor(HTMLDocument.prototype, 'cookie');
326 | if (originalCookieDescriptor && originalCookieDescriptor.configurable) {
327 | Object.defineProperty(document, 'cookie', {
328 | get: function() {
329 | return originalCookieDescriptor.get.call(this);
330 | },
331 | set: function(val) {
332 | console.log('Cookie set blocked:', val);
333 | return originalCookieDescriptor.set.call(this, val);
334 | },
335 | configurable: true
336 | });
337 | }
338 |
339 | // Remove automation traces
340 | delete window.cdc_adoQpoasnfa76pfcZLmcfl_Array;
341 | delete window.cdc_adoQpoasnfa76pfcZLmcfl_Promise;
342 | delete window.cdc_adoQpoasnfa76pfcZLmcfl_Symbol;
343 | delete window.cdc_adoQpoasnfa76pfcZLmcfl_JSON;
344 | delete window.cdc_adoQpoasnfa76pfcZLmcfl_Object;
345 | delete window.cdc_adoQpoasnfa76pfcZLmcfl_Proxy;
346 |
347 | // Add realistic performance timing
348 | if (window.performance && window.performance.timing) {
349 | const timing = window.performance.timing;
350 | const now = Date.now();
351 | Object.defineProperty(timing, 'navigationStart', { value: now - Math.floor(Math.random() * 1000) + 1000, configurable: false });
352 | Object.defineProperty(timing, 'loadEventEnd', { value: now - Math.floor(Math.random() * 100) + 100, configurable: false });
353 | }
354 |
355 | console.log('✓ Stealth scripts injected successfully');
356 | '''
357 |
358 | try:
359 | await self.page.add_init_script(stealth_script)
360 | logger.debug("✅ Stealth scripts injected successfully")
361 | except Exception as e:
362 | logger.warning(f"⚠️ Failed to inject stealth scripts: {e}")
363 |
364 | async def _simulate_human_behavior(self, fast_mode: bool = True):
365 | """
366 | Simulate realistic human behavior patterns to avoid detection.
367 | Includes mouse movements, typing patterns, and natural delays.
368 |
369 | Args:
370 | fast_mode: If True, use minimal timing for speed optimization
371 | """
372 | if not self.page:
373 | logger.warning("Cannot simulate human behavior: page is None")
374 | return
375 |
376 | logger.debug("🤖 Simulating human behavior patterns...")
377 |
378 | try:
379 | if fast_mode:
380 | # ULTRA-FAST MODE: Minimal human behavior
381 | viewport_size = self.page.viewport_size
382 | if viewport_size and random.random() < 0.7: # 70% chance to do movement
383 | width, height = viewport_size['width'], viewport_size['height']
384 |
385 | # Single quick mouse movement
386 | x = random.randint(200, width - 200)
387 | y = random.randint(200, height - 200)
388 | await self.page.mouse.move(x, y)
389 |
390 | # Brief scroll (50% chance)
391 | if random.random() < 0.5:
392 | await self.page.mouse.wheel(0, random.randint(50, 100))
393 |
394 | # Ultra-minimal delay
395 | await asyncio.sleep(random.uniform(0.05, 0.15)) # Reduced from 0.1-0.3
396 |
397 | else:
398 | # FULL MODE: Original comprehensive behavior
399 | viewport_size = self.page.viewport_size
400 | if viewport_size:
401 | width, height = viewport_size['width'], viewport_size['height']
402 |
403 | # Generate 3-5 random mouse movements
404 | movements = random.randint(3, 5)
405 | logger.debug(f" 🖱️ Performing {movements} random mouse movements")
406 |
407 | for i in range(movements):
408 | x = random.randint(100, width - 100)
409 | y = random.randint(100, height - 100)
410 |
411 | # Move mouse with realistic speed (not instant)
412 | await self.page.mouse.move(x, y)
413 | await asyncio.sleep(random.uniform(0.1, 0.3))
414 |
415 | # 2. Scroll simulation
416 | logger.debug(" 📜 Simulating scroll behavior")
417 | scroll_amount = random.randint(100, 300)
418 | await self.page.mouse.wheel(0, scroll_amount)
419 | await asyncio.sleep(random.uniform(0.2, 0.5))
420 |
421 | # Scroll back up
422 | await self.page.mouse.wheel(0, -scroll_amount)
423 | await asyncio.sleep(random.uniform(0.2, 0.4))
424 |
425 | # 3. Random page interaction delays
426 | await asyncio.sleep(random.uniform(0.5, 1.5))
427 |
428 | logger.debug("✅ Human behavior simulation completed")
429 |
430 | except Exception as e:
431 | logger.warning(f"⚠️ Human behavior simulation failed: {e}")
432 |
433 | async def _human_type(self, selector: str, text: str, clear_first: bool = True, fast_mode: bool = True):
434 | """
435 | Type text with human-like patterns and delays.
436 |
437 | Args:
438 | selector: CSS selector for the input element
439 | text: Text to type
440 | clear_first: Whether to clear the field first
441 | fast_mode: If True, use minimal delays for speed optimization
442 | """
443 | if not self.page:
444 | logger.warning("Cannot perform human typing: page is None")
445 | return
446 |
447 | try:
448 | if fast_mode:
449 | # FAST MODE: Direct fill for speed
450 | await self.page.fill(selector, text)
451 | await asyncio.sleep(random.uniform(0.02, 0.05)) # Reduced from 0.05-0.1
452 | else:
453 | # FULL MODE: Character-by-character human typing
454 | # Focus on the element first
455 | await self.page.focus(selector)
456 | await asyncio.sleep(random.uniform(0.1, 0.3))
457 |
458 | # Clear field if requested
459 | if clear_first:
460 | await self.page.keyboard.press('Control+a')
461 | await asyncio.sleep(random.uniform(0.05, 0.15))
462 | await self.page.keyboard.press('Delete')
463 | await asyncio.sleep(random.uniform(0.05, 0.15))
464 |
465 | # Type each character with human-like delays
466 | for char in text:
467 | await self.page.keyboard.type(char)
468 | # Human typing speed: 50-150ms between characters
469 | delay = random.uniform(0.05, 0.15)
470 |
471 | # Occasional longer pauses (thinking)
472 | if random.random() < 0.1: # 10% chance
473 | delay += random.uniform(0.2, 0.8)
474 |
475 | await asyncio.sleep(delay)
476 |
477 | # Brief pause after typing
478 | await asyncio.sleep(random.uniform(0.2, 0.6))
479 |
480 | logger.debug(f"✅ Human-typed '{text}' into {selector}")
481 |
482 | except Exception as e:
483 | logger.warning(f"⚠️ Human typing failed: {e}")
484 |
485 | async def _human_click(self, selector: str, wait_before: bool = True, wait_after: bool = True, fast_mode: bool = True):
486 | """
487 | Perform a human-like click with realistic delays and mouse movement.
488 |
489 | Args:
490 | selector: CSS selector or element to click
491 | wait_before: Whether to wait before clicking
492 | wait_after: Whether to wait after clicking
493 | fast_mode: If True, use minimal delays for speed optimization
494 | """
495 | if not self.page:
496 | logger.warning("Cannot perform human click: page is None")
497 | return
498 |
499 | try:
500 | if fast_mode:
501 | # FAST MODE: Direct click with minimal delay
502 | if wait_before:
503 | await asyncio.sleep(random.uniform(0.02, 0.08)) # Reduced from 0.05-0.15
504 |
505 | await self.page.click(selector)
506 |
507 | if wait_after:
508 | await asyncio.sleep(random.uniform(0.02, 0.08)) # Reduced from 0.05-0.15
509 |
510 | else:
511 | # FULL MODE: Realistic mouse movement and timing
512 | # Wait before clicking (thinking time)
513 | if wait_before:
514 | await asyncio.sleep(random.uniform(0.3, 0.8))
515 |
516 | # Get element bounds for realistic mouse movement
517 | element = await self.page.query_selector(selector)
518 | if element:
519 | box = await element.bounding_box()
520 | if box:
521 | # Move to element with slight randomness
522 | center_x = box['x'] + box['width'] / 2
523 | center_y = box['y'] + box['height'] / 2
524 |
525 | # Add small random offset
526 | offset_x = random.uniform(-10, 10)
527 | offset_y = random.uniform(-5, 5)
528 |
529 | await self.page.mouse.move(center_x + offset_x, center_y + offset_y)
530 | await asyncio.sleep(random.uniform(0.1, 0.3))
531 |
532 | # Perform click
533 | await self.page.mouse.click(center_x + offset_x, center_y + offset_y)
534 |
535 | logger.debug(f"✅ Human-clicked {selector}")
536 | else:
537 | # Fallback to regular click
538 | await self.page.click(selector)
539 | logger.debug(f"✅ Fallback-clicked {selector}")
540 | else:
541 | logger.warning(f"⚠️ Element not found for human click: {selector}")
542 | return
543 |
544 | # Wait after clicking (processing time)
545 | if wait_after:
546 | await asyncio.sleep(random.uniform(0.2, 0.6))
547 |
548 | logger.debug(f"✅ Human-clicked {selector}")
549 |
550 | except Exception as e:
551 | logger.warning(f"⚠️ Human click failed: {e}")
552 |
553 | async def _simulate_page_exploration(self, fast_mode: bool = True):
554 | """
555 | Simulate natural page exploration before performing the main task.
556 | This helps establish a more human-like session.
557 |
558 | Args:
559 | fast_mode: If True, use minimal exploration for speed optimization
560 | """
561 | if not self.page:
562 | return
563 |
564 | logger.debug("🕵️ Simulating page exploration...")
565 |
566 | try:
567 | if fast_mode:
568 | # ULTRA-FAST MODE: Minimal exploration
569 | await asyncio.sleep(random.uniform(0.05, 0.1)) # Reduced from 0.1-0.3
570 |
571 | # Single mouse movement (optional)
572 | try:
573 | elements = await self.page.query_selector_all("input, button")
574 | if elements and random.random() < 0.5: # 50% chance to skip
575 | element = random.choice(elements)
576 | box = await element.bounding_box()
577 | if box:
578 | center_x = box['x'] + box['width'] / 2
579 | center_y = box['y'] + box['height'] / 2
580 | await self.page.mouse.move(center_x, center_y)
581 | except:
582 | pass
583 |
584 | await asyncio.sleep(random.uniform(0.02, 0.05)) # Reduced from 0.05-0.15
585 |
586 | else:
587 | # FULL MODE: Comprehensive exploration
588 | # 1. Brief pause to "read" the page
589 | await asyncio.sleep(random.uniform(1.0, 2.5))
590 |
591 | # 2. Move mouse to various UI elements (like a human would explore)
592 | explore_selectors = [
593 | "h1", "h2", ".navbar", "#header", ".logo",
594 | "input", "button", "a", ".form-group"
595 | ]
596 |
597 | explored = 0
598 | for selector in explore_selectors:
599 | elements = await self.page.query_selector_all(selector)
600 | if elements and explored < 3: # Explore max 3 elements
601 | element = random.choice(elements)
602 | box = await element.bounding_box()
603 | if box:
604 | center_x = box['x'] + box['width'] / 2
605 | center_y = box['y'] + box['height'] / 2
606 |
607 | await self.page.mouse.move(center_x, center_y)
608 | await asyncio.sleep(random.uniform(0.3, 0.8))
609 | explored += 1
610 |
611 | # 3. Small scroll to simulate reading
612 | await self.page.mouse.wheel(0, random.randint(50, 150))
613 | await asyncio.sleep(random.uniform(0.5, 1.2))
614 |
615 | logger.debug("✅ Page exploration completed")
616 |
617 | except Exception as e:
618 | logger.debug(f"⚠️ Page exploration failed: {e}")
619 |
620 | def _parse_decision_entries_from_soup(self, soup: BeautifulSoup, search_karar_tipi: KikKararTipi) -> List[KikDecisionEntry]:
621 | entries: List[KikDecisionEntry] = []
622 | table = soup.find("table", {"id": self.RESULTS_TABLE_ID})
623 |
624 | logger.debug(f"Looking for table with ID: {self.RESULTS_TABLE_ID}")
625 | if not table:
626 | logger.warning(f"Table with ID '{self.RESULTS_TABLE_ID}' not found in HTML")
627 | # Log available tables for debugging
628 | all_tables = soup.find_all("table")
629 | logger.debug(f"Found {len(all_tables)} tables in HTML")
630 | for idx, tbl in enumerate(all_tables):
631 | table_id = tbl.get('id', 'no-id')
632 | table_class = tbl.get('class', 'no-class')
633 | rows = tbl.find_all('tr')
634 | logger.debug(f"Table {idx}: id='{table_id}', class='{table_class}', rows={len(rows)}")
635 |
636 | # If this looks like a results table, try to use it
637 | if (table_id and ('grd' in table_id.lower() or 'kurul' in table_id.lower() or 'sonuc' in table_id.lower())) or \
638 | (isinstance(table_class, list) and any('grid' in cls.lower() or 'result' in cls.lower() for cls in table_class)) or \
639 | len(rows) > 3: # Table with multiple rows might be results
640 | logger.info(f"Trying to parse table {idx} as potential results table: id='{table_id}'")
641 | table = tbl
642 | break
643 |
644 | if not table:
645 | logger.error("No suitable results table found")
646 | return entries
647 |
648 | rows = table.find_all("tr")
649 | logger.info(f"Found {len(rows)} rows in results table")
650 |
651 | # Debug: Log first few rows structure
652 | for i, row in enumerate(rows[:3]):
653 | cells = row.find_all(["td", "th"])
654 | cell_texts = [cell.get_text(strip=True)[:30] for cell in cells]
655 | logger.info(f"Row {i} structure: {len(cells)} cells: {cell_texts}")
656 |
657 | for row_idx, row in enumerate(rows):
658 | # Skip first row (search bar with colspan=7) and second row (header with 6 cells)
659 | if row_idx < 2:
660 | logger.debug(f"Skipping header row {row_idx}")
661 | continue
662 |
663 | cells = row.find_all("td")
664 | logger.debug(f"Row {row_idx}: Found {len(cells)} cells")
665 |
666 | # Log cell contents for debugging
667 | if cells and row_idx < 5: # Log first few data rows
668 | for cell_idx, cell in enumerate(cells):
669 | cell_text = cell.get_text(strip=True)[:50] # First 50 chars
670 | logger.debug(f" Cell {cell_idx}: '{cell_text}...'")
671 |
672 | # Be more flexible with cell count - try 6 cells first, then adapt
673 | if len(cells) >= 5: # At least 5 cells for minimum required data
674 | try:
675 | # Try to find preview button in first cell or any cell with a link
676 | preview_button_tag = None
677 | event_target = ""
678 |
679 | # Look for preview button in first few cells
680 | for cell_idx in range(min(3, len(cells))):
681 | cell = cells[cell_idx]
682 | # Try multiple patterns for preview button (based on actual HTML structure)
683 | preview_candidates = [
684 | cell.find("a", id="btnOnizle"), # Exact match
685 | cell.find("a", id=re.compile(r"btnOnizle$")),
686 | cell.find("a", id=re.compile(r"btn.*Onizle")),
687 | cell.find("a", id=re.compile(r".*Onizle.*")),
688 | cell.find("a", href=re.compile(r"__doPostBack"))
689 | ]
690 |
691 | for candidate in preview_candidates:
692 | if candidate and candidate.has_attr('href'):
693 | match = re.search(r"__doPostBack\('([^']*)','([^']*)'\)", candidate['href'])
694 | if match:
695 | event_target = match.group(1)
696 | preview_button_tag = candidate
697 | logger.debug(f"Row {row_idx}: Found event_target '{event_target}' in cell {cell_idx}")
698 | break
699 |
700 | if preview_button_tag:
701 | break
702 |
703 | if not preview_button_tag:
704 | logger.debug(f"Row {row_idx}: No preview button found in any cell")
705 | # Log what links we found
706 | for cell_idx, cell in enumerate(cells[:3]):
707 | links_in_cell = cell.find_all("a")
708 | logger.debug(f" Cell {cell_idx}: {len(links_in_cell)} links")
709 | for link in links_in_cell[:2]:
710 | logger.debug(f" Link id='{link.get('id')}', href='{link.get('href', '')[:50]}...'")
711 |
712 | # Try to find decision data spans with more flexible patterns
713 | karar_no_span = None
714 | karar_tarihi_span = None
715 | idare_span = None
716 | basvuru_sahibi_span = None
717 | ihale_span = None
718 |
719 | # Try different span patterns for karar no (usually in cell 1)
720 | for cell_idx in range(min(4, len(cells))):
721 | if not karar_no_span:
722 | cell = cells[cell_idx]
723 | candidates = [
724 | cell.find("span", id="lblKno"), # Exact match based on actual HTML
725 | cell.find("span", id=re.compile(r"lblKno$")),
726 | cell.find("span", id=re.compile(r".*Kno.*")),
727 | cell.find("span", id=re.compile(r".*KararNo.*")),
728 | cell.find("span", id=re.compile(r".*No.*"))
729 | ]
730 | for candidate in candidates:
731 | if candidate and candidate.get_text(strip=True):
732 | karar_no_span = candidate
733 | logger.debug(f"Row {row_idx}: Found karar_no in cell {cell_idx}")
734 | break
735 |
736 | # Try different patterns for karar tarihi (usually in cell 2)
737 | for cell_idx in range(min(4, len(cells))):
738 | if not karar_tarihi_span:
739 | cell = cells[cell_idx]
740 | candidates = [
741 | cell.find("span", id="lblKtar"), # Exact match based on actual HTML
742 | cell.find("span", id=re.compile(r"lblKtar$")),
743 | cell.find("span", id=re.compile(r".*Ktar.*")),
744 | cell.find("span", id=re.compile(r".*Tarih.*")),
745 | cell.find("span", id=re.compile(r".*Date.*"))
746 | ]
747 | for candidate in candidates:
748 | if candidate and candidate.get_text(strip=True):
749 | # Check if it looks like a date
750 | text = candidate.get_text(strip=True)
751 | if re.match(r'\d{1,2}[./]\d{1,2}[./]\d{4}', text):
752 | karar_tarihi_span = candidate
753 | logger.debug(f"Row {row_idx}: Found karar_tarihi in cell {cell_idx}")
754 | break
755 |
756 | # Find other spans in remaining cells (if we have 6 cells) - using exact IDs
757 | if len(cells) >= 6:
758 | idare_span = cells[3].find("span", id="lblIdare") or cells[3].find("span")
759 | basvuru_sahibi_span = cells[4].find("span", id="lblSikayetci") or cells[4].find("span")
760 | ihale_span = cells[5].find("span", id="lblIhale") or cells[5].find("span")
761 | elif len(cells) == 5:
762 | # Adjust for 5-cell layout
763 | idare_span = cells[2].find("span") if cells[2] != cells[1] else None
764 | basvuru_sahibi_span = cells[3].find("span") if len(cells) > 3 else None
765 | ihale_span = cells[4].find("span") if len(cells) > 4 else None
766 |
767 | # Log what we found
768 | logger.debug(f"Row {row_idx}: karar_no_span={karar_no_span is not None}, "
769 | f"karar_tarihi_span={karar_tarihi_span is not None}, "
770 | f"event_target={bool(event_target)}")
771 |
772 | # For KIK, we need at least karar_no and karar_tarihi, event_target is helpful but not critical
773 | if not (karar_no_span and karar_tarihi_span):
774 | logger.debug(f"Row {row_idx}: Missing required fields (karar_no or karar_tarihi), skipping")
775 | # Log what spans we found in cells
776 | for i, cell in enumerate(cells):
777 | spans = cell.find_all("span")
778 | if spans:
779 | span_info = []
780 | for s in spans:
781 | span_id = s.get('id', 'no-id')
782 | span_text = s.get_text(strip=True)[:20]
783 | span_info.append(f"{span_id}:'{span_text}...'")
784 | logger.debug(f" Cell {i} spans: {span_info}")
785 | continue
786 |
787 | # If we don't have event_target, we can still create an entry but mark it specially
788 | if not event_target:
789 | logger.warning(f"Row {row_idx}: No event_target found, document retrieval won't work")
790 | event_target = f"missing_target_row_{row_idx}" # Placeholder
791 |
792 | # Karar tipini arama parametresinden alıyoruz, çünkü HTML'de direkt olarak bulunmuyor.
793 | try:
794 | entry = KikDecisionEntry(
795 | preview_event_target=event_target,
796 | kararNo=karar_no_span.get_text(strip=True),
797 | karar_tipi=search_karar_tipi, # Arama yapılan karar tipini ekle
798 | kararTarihi=karar_tarihi_span.get_text(strip=True),
799 | idare=idare_span.get_text(strip=True) if idare_span else None,
800 | basvuruSahibi=basvuru_sahibi_span.get_text(strip=True) if basvuru_sahibi_span else None,
801 | ihaleKonusu=ihale_span.get_text(strip=True) if ihale_span else None,
802 | )
803 | entries.append(entry)
804 | logger.info(f"Row {row_idx}: Successfully parsed decision: {entry.karar_no_str}")
805 | except Exception as e:
806 | logger.error(f"Row {row_idx}: Error creating KikDecisionEntry: {e}")
807 | continue
808 |
809 | except Exception as e:
810 | logger.error(f"Error parsing row {row_idx}: {e}", exc_info=True)
811 | else:
812 | logger.warning(f"Row {row_idx}: Expected at least 5 cells but found {len(cells)}, skipping")
813 | if len(cells) > 0:
814 | cell_texts = [cell.get_text(strip=True)[:50] for cell in cells[:3]]
815 | logger.debug(f"Row {row_idx} cells preview: {cell_texts}")
816 |
817 | logger.info(f"Parsed {len(entries)} decision entries from {len(rows)} rows")
818 | return entries
819 |
820 | def _parse_total_records_from_soup(self, soup: BeautifulSoup) -> int:
821 | # ... (öncekiyle aynı) ...
822 | try:
823 | pager_div = soup.find("div", class_="gridToplamSayi")
824 | if pager_div:
825 | match = re.search(r"Toplam Kayıt Sayısı:(\d+)", pager_div.get_text(strip=True))
826 | if match: return int(match.group(1))
827 | except: pass
828 | return 0
829 |
830 | def _parse_current_page_from_soup(self, soup: BeautifulSoup) -> int:
831 | # ... (öncekiyle aynı) ...
832 | try:
833 | pager_div = soup.find("div", class_="sayfalama")
834 | if pager_div:
835 | active_page_span = pager_div.find("span", class_="active")
836 | if active_page_span: return int(active_page_span.get_text(strip=True))
837 | except: pass
838 | return 1
839 |
840 | async def search_decisions(self, search_params: KikSearchRequest) -> KikSearchResult:
841 | await self._ensure_playwright_ready()
842 | page = self.page
843 | search_url = f"{self.BASE_URL}{self.SEARCH_PAGE_PATH}"
844 | try:
845 | if page.url != search_url:
846 | await page.goto(search_url, wait_until="networkidle", timeout=self.request_timeout)
847 |
848 | # Simulate natural page exploration after navigation (FAST MODE)
849 | await self._simulate_page_exploration(fast_mode=True)
850 |
851 | search_button_selector = f"a[id='{self.FIELD_LOCATORS['search_button_id']}']"
852 | await page.wait_for_selector(search_button_selector, state="visible", timeout=self.request_timeout)
853 |
854 | current_karar_tipi_value = search_params.karar_tipi.value
855 | radio_locator_selector = f"{self.FIELD_LOCATORS['karar_tipi_radio_group']}[value='{current_karar_tipi_value}']"
856 | if not await page.locator(radio_locator_selector).is_checked():
857 | js_target_radio = f"ctl00$ContentPlaceHolder1${current_karar_tipi_value}"
858 | logger.info(f"Selecting radio button: {js_target_radio}")
859 | async with page.expect_navigation(wait_until="networkidle", timeout=self.request_timeout):
860 | await page.evaluate(f"javascript:__doPostBack('{js_target_radio}','')")
861 | # Ultra-fast wait for page to stabilize after radio button change
862 | await page.wait_for_timeout(300) # Reduced from 1000ms
863 | logger.info("Radio button selection completed")
864 |
865 | # Helper function for human-like form filling (FAST MODE)
866 | async def human_fill_if_value(selector_key: str, value: Optional[str]):
867 | if value is not None:
868 | selector = self.FIELD_LOCATORS[selector_key]
869 | await self._human_type(selector, value, fast_mode=True)
870 |
871 | # Karar No'yu KİK sitesine göndermeden önce '_' -> '/' dönüşümü yap
872 | karar_no_for_kik_form = None
873 | if search_params.karar_no: # search_params.karar_no Claude'dan '_' ile gelmiş olabilir
874 | karar_no_for_kik_form = search_params.karar_no.replace('_', '/')
875 | logger.info(f"Using karar_no '{karar_no_for_kik_form}' (transformed from '{search_params.karar_no}') for KIK form.")
876 |
877 | # Fill form fields with FAST human-like behavior
878 | logger.info("Filling form fields with fast mode...")
879 |
880 | # Start with FAST mouse behavior simulation
881 | await self._simulate_human_behavior(fast_mode=True)
882 |
883 | await human_fill_if_value('karar_metni', search_params.karar_metni)
884 | await human_fill_if_value('karar_no', karar_no_for_kik_form) # Dönüştürülmüş halini kullan
885 | await human_fill_if_value('karar_tarihi_baslangic', search_params.karar_tarihi_baslangic)
886 | await human_fill_if_value('karar_tarihi_bitis', search_params.karar_tarihi_bitis)
887 | await human_fill_if_value('resmi_gazete_sayisi', search_params.resmi_gazete_sayisi)
888 | await human_fill_if_value('resmi_gazete_tarihi', search_params.resmi_gazete_tarihi)
889 | await human_fill_if_value('basvuru_konusu_ihale', search_params.basvuru_konusu_ihale)
890 | await human_fill_if_value('basvuru_sahibi', search_params.basvuru_sahibi)
891 | await human_fill_if_value('ihaleyi_yapan_idare', search_params.ihaleyi_yapan_idare)
892 |
893 | if search_params.yil:
894 | await page.select_option(self.FIELD_LOCATORS['yil'], value=search_params.yil)
895 | await page.wait_for_timeout(50) # Reduced from 100ms
896 |
897 | logger.info("Form filling completed, preparing for search...")
898 |
899 | # Additional FAST human behavior before search
900 | await self._simulate_human_behavior(fast_mode=True)
901 |
902 | action_is_search_button_click = (search_params.page == 1)
903 | event_target_for_submit: str
904 |
905 | try:
906 | if action_is_search_button_click:
907 | event_target_for_submit = self.FIELD_LOCATORS['search_button_id']
908 | # Use human-like clicking for search button
909 | search_button_selector = f"a[id='{event_target_for_submit}']"
910 | logger.info(f"Performing human-like search button click...")
911 |
912 | try:
913 | # Hide datepicker first to prevent interference
914 | await page.evaluate("$('#ui-datepicker-div').hide()")
915 |
916 | # FAST Human-like click on search button
917 | await self._human_click(search_button_selector, wait_before=True, wait_after=False, fast_mode=True)
918 |
919 | # Wait for navigation
920 | await page.wait_for_load_state("networkidle", timeout=self.request_timeout)
921 | logger.info("Search navigation completed successfully")
922 | except Exception as e:
923 | logger.warning(f"Human click failed, falling back to JavaScript: {e}")
924 | # Hide datepicker and use JavaScript fallback
925 | await page.evaluate("$('#ui-datepicker-div').hide()")
926 | async with page.expect_navigation(wait_until="networkidle", timeout=self.request_timeout):
927 | await page.evaluate(f"javascript:__doPostBack('{event_target_for_submit}','')")
928 | logger.info("Search navigation completed via fallback")
929 | else:
930 | # Pagination - use original method for consistency
931 | page_link_ctl_number = search_params.page + 2
932 | event_target_for_submit = f"ctl00$ContentPlaceHolder1$grdKurulKararSorguSonuc$ctl14$ctl{page_link_ctl_number:02d}"
933 | logger.info(f"Executing pagination with event target: {event_target_for_submit}")
934 |
935 | async with page.expect_navigation(wait_until="networkidle", timeout=self.request_timeout):
936 | await page.evaluate(f"javascript:__doPostBack('{event_target_for_submit}','')")
937 | logger.info("Pagination navigation completed successfully")
938 | except PlaywrightTimeoutError:
939 | logger.warning("Search navigation timed out, but continuing...")
940 | await page.wait_for_timeout(5000) # Longer wait if navigation fails
941 |
942 | # Ultra-fast wait time for results to load
943 | logger.info("Waiting for search results to load...")
944 | await page.wait_for_timeout(500) # Reduced from 1000ms
945 |
946 | results_table_dom_selector = f"table#{self.RESULTS_TABLE_ID}"
947 | try:
948 | # First wait for any tables to appear (more general check)
949 | logger.info("Waiting for any tables to appear...")
950 | await page.wait_for_function("""
951 | () => document.querySelectorAll('table').length > 0
952 | """, timeout=4000) # Reduced from 8000ms
953 | logger.info("At least one table appeared")
954 |
955 | # Then wait for our specific table
956 | await page.wait_for_selector(results_table_dom_selector, timeout=4000, state="attached") # Reduced from 8000ms
957 | logger.debug("Results table attached to DOM")
958 |
959 | # Wait for table to have some content (more than just headers)
960 | await page.wait_for_function(f"""
961 | () => {{
962 | const table = document.querySelector('{results_table_dom_selector}');
963 | return table && table.querySelectorAll('tr').length > 2;
964 | }}
965 | """, timeout=4000) # Reduced from 20000ms
966 | logger.debug("Results table populated with data")
967 |
968 | # Ultra-fast additional wait for any remaining JavaScript
969 | await page.wait_for_timeout(500) # Reduced from 3000ms
970 |
971 | except PlaywrightTimeoutError:
972 | logger.warning(f"Timeout waiting for results table '{results_table_dom_selector}'.")
973 | # Try one more wait for content placeholder
974 | try:
975 | await page.wait_for_selector("#ctl00_ContentPlaceHolder1", timeout=10000)
976 | logger.info("ContentPlaceHolder1 found, checking for tables...")
977 | await page.wait_for_timeout(5000)
978 | except PlaywrightTimeoutError:
979 | logger.warning("ContentPlaceHolder1 also not found - content may not have loaded")
980 |
981 | html_content = await page.content()
982 | soup = BeautifulSoup(html_content, "html.parser")
983 | # ... (hata ve sonuç yok mesajı kontrolü aynı) ...
984 | validation_summary_tag = soup.find("div", id=self.VALIDATION_SUMMARY_SELECTOR.split('[')[0].split(':')[0])
985 | if validation_summary_tag and validation_summary_tag.get_text(strip=True) and \
986 | ("display: none" not in validation_summary_tag.get("style", "").lower() if validation_summary_tag.has_attr("style") else True) and \
987 | validation_summary_tag.get_text(strip=True) != "":
988 | return KikSearchResult(decisions=[], total_records=0, current_page=search_params.page)
989 | message_content_div = soup.find("div", id=self.NO_RESULTS_MESSAGE_SELECTOR.split(':')[0])
990 | if message_content_div and "kayıt bulunamamıştır" in message_content_div.get_text(strip=True).lower():
991 | return KikSearchResult(decisions=[], total_records=0, current_page=1)
992 |
993 | # _parse_decision_entries_from_soup'a arama yapılan karar_tipi'ni gönder
994 | decisions = self._parse_decision_entries_from_soup(soup, search_params.karar_tipi)
995 | total_records = self._parse_total_records_from_soup(soup)
996 | current_page_from_html = self._parse_current_page_from_soup(soup)
997 | return KikSearchResult(decisions=decisions, total_records=total_records, current_page=current_page_from_html)
998 | except Exception as e:
999 | logger.error(f"Error during KIK decision search: {e}", exc_info=True)
1000 | return KikSearchResult(decisions=[], current_page=search_params.page)
1001 |
1002 | def _clean_html_for_markdown(self, html_content: str) -> str:
1003 | # ... (öncekiyle aynı) ...
1004 | if not html_content: return ""
1005 | return html_parser.unescape(html_content)
1006 |
1007 | def _convert_html_to_markdown_internal(self, html_fragment: str) -> Optional[str]:
1008 | # ... (öncekiyle aynı) ...
1009 | if not html_fragment: return None
1010 | cleaned_html = self._clean_html_for_markdown(html_fragment)
1011 | markdown_output = None
1012 | try:
1013 | # Convert HTML string to bytes and create BytesIO stream
1014 | html_bytes = cleaned_html.encode('utf-8')
1015 | html_stream = io.BytesIO(html_bytes)
1016 |
1017 | # Pass BytesIO stream to MarkItDown to avoid temp file creation
1018 | md_converter = MarkItDown(enable_plugins=True, remove_alt_whitespace=True, keep_underline=True)
1019 | markdown_output = md_converter.convert(html_stream).text_content
1020 | if markdown_output: markdown_output = re.sub(r'\n{3,}', '\n\n', markdown_output).strip()
1021 | except Exception as e: logger.error(f"MarkItDown conversion error: {e}", exc_info=True)
1022 | return markdown_output
1023 |
1024 |
1025 | async def get_decision_document_as_markdown(
1026 | self,
1027 | karar_id_b64: str,
1028 | page_number: int = 1
1029 | ) -> KikDocumentMarkdown:
1030 | await self._ensure_playwright_ready()
1031 | # Bu metodun kendi içinde yeni bir 'page' nesnesi ('doc_page_for_content') kullanacağını unutmayın,
1032 | # ana 'self.page' arama sonuçları sayfasında kalır.
1033 | current_main_page = self.page # Ana arama sonuçları sayfasını referans alalım
1034 |
1035 | try:
1036 | decoded_key = base64.b64decode(karar_id_b64.encode('utf-8')).decode('utf-8')
1037 | karar_tipi_value, karar_no_for_search = decoded_key.split('|', 1)
1038 | original_karar_tipi = KikKararTipi(karar_tipi_value)
1039 | logger.info(f"KIK Get Detail: Decoded karar_id '{karar_id_b64}' to Karar Tipi: {original_karar_tipi.value}, Karar No: {karar_no_for_search}. Requested Markdown Page: {page_number}")
1040 | except Exception as e_decode:
1041 | logger.error(f"Invalid karar_id format. Could not decode Base64 or split: {karar_id_b64}. Error: {e_decode}")
1042 | return KikDocumentMarkdown(retrieved_with_karar_id=karar_id_b64, error_message="Invalid karar_id format.", current_page=page_number)
1043 |
1044 | default_error_response_data = {
1045 | "retrieved_with_karar_id": karar_id_b64,
1046 | "retrieved_karar_no": karar_no_for_search,
1047 | "retrieved_karar_tipi": original_karar_tipi,
1048 | "error_message": "An unspecified error occurred.",
1049 | "current_page": page_number, "total_pages": 1, "is_paginated": False
1050 | }
1051 |
1052 | # Ana arama sayfasında olduğumuzdan emin olalım
1053 | if self.SEARCH_PAGE_PATH not in current_main_page.url:
1054 | logger.info(f"Not on search page ({current_main_page.url}). Navigating to {self.SEARCH_PAGE_PATH} before targeted search for document.")
1055 | await current_main_page.goto(f"{self.BASE_URL}{self.SEARCH_PAGE_PATH}", wait_until="networkidle", timeout=self.request_timeout)
1056 | await current_main_page.wait_for_selector(f"a[id='{self.FIELD_LOCATORS['search_button_id']}']", state="visible", timeout=self.request_timeout)
1057 |
1058 | targeted_search_params = KikSearchRequest(
1059 | karar_no=karar_no_for_search,
1060 | karar_tipi=original_karar_tipi,
1061 | page=1
1062 | )
1063 | logger.info(f"Performing targeted search for Karar No: {karar_no_for_search}")
1064 | # search_decisions kendi içinde _ensure_playwright_ready çağırır ve self.page'i kullanır.
1065 | # Bu, current_main_page ile aynı olmalı.
1066 | search_results = await self.search_decisions(targeted_search_params)
1067 |
1068 | if not search_results.decisions:
1069 | default_error_response_data["error_message"] = f"Decision with Karar No '{karar_no_for_search}' (Tipi: {original_karar_tipi.value}) not found by internal search."
1070 | return KikDocumentMarkdown(**default_error_response_data)
1071 |
1072 | decision_to_fetch = None
1073 | for dec_entry in search_results.decisions:
1074 | if dec_entry.karar_no_str == karar_no_for_search and dec_entry.karar_tipi == original_karar_tipi:
1075 | decision_to_fetch = dec_entry
1076 | break
1077 |
1078 | if not decision_to_fetch:
1079 | default_error_response_data["error_message"] = f"Karar No '{karar_no_for_search}' (Tipi: {original_karar_tipi.value}) not present with an exact match in first page of targeted search results."
1080 | return KikDocumentMarkdown(**default_error_response_data)
1081 |
1082 | decision_preview_event_target = decision_to_fetch.preview_event_target
1083 | logger.info(f"Found target decision. Using preview_event_target: {decision_preview_event_target} for Karar No: {decision_to_fetch.karar_no_str}")
1084 |
1085 | iframe_document_url_str = None
1086 | karar_id_param_from_url_on_doc_page = None
1087 | document_html_content = ""
1088 |
1089 | try:
1090 | logger.info(f"Evaluating __doPostBack on main page to show modal for: {decision_preview_event_target}")
1091 | # Bu evaluate, self.page (yani current_main_page) üzerinde çalışır
1092 | await current_main_page.evaluate(f"javascript:__doPostBack('{decision_preview_event_target}','')")
1093 | await current_main_page.wait_for_timeout(1000)
1094 | logger.info(f"Executed __doPostBack for {decision_preview_event_target} on main page.")
1095 |
1096 | iframe_selector = "iframe#iframe_detayPopUp"
1097 | modal_visible_selector = "div#detayPopUp.in"
1098 |
1099 | try:
1100 | logger.info(f"Waiting for modal '{modal_visible_selector}' to be visible and iframe '{iframe_selector}' src to be populated on main page...")
1101 | await current_main_page.wait_for_function(
1102 | f"""
1103 | () => {{
1104 | const modal = document.querySelector('{modal_visible_selector}');
1105 | const iframe = document.querySelector('{iframe_selector}');
1106 | const modalIsTrulyVisible = modal && (window.getComputedStyle(modal).display !== 'none');
1107 | return modalIsTrulyVisible &&
1108 | iframe && iframe.getAttribute('src') &&
1109 | iframe.getAttribute('src').includes('KurulKararGoster.aspx');
1110 | }}
1111 | """,
1112 | timeout=self.request_timeout / 2
1113 | )
1114 | iframe_src_value = await current_main_page.locator(iframe_selector).get_attribute("src")
1115 | logger.info(f"Iframe src populated: {iframe_src_value}")
1116 |
1117 | except PlaywrightTimeoutError:
1118 | logger.warning(f"Timeout waiting for KIK iframe src for {decision_preview_event_target}. Trying to parse from static content after presumed update.")
1119 | html_after_postback = await current_main_page.content()
1120 | # ... (fallback parsing öncekiyle aynı, default_error_response_data set edilir ve return edilir) ...
1121 | soup_after_postback = BeautifulSoup(html_after_postback, "html.parser")
1122 | detay_popup_div = soup_after_postback.find("div", {"id": "detayPopUp", "class": re.compile(r"\bin\b")})
1123 | if not detay_popup_div: detay_popup_div = soup_after_postback.find("div", {"id": "detayPopUp", "style": re.compile(r"display:\s*block", re.I)})
1124 | iframe_tag = detay_popup_div.find("iframe", {"id": "iframe_detayPopUp"}) if detay_popup_div else None
1125 | if iframe_tag and iframe_tag.has_attr("src") and iframe_tag["src"]: iframe_src_value = iframe_tag["src"]
1126 | else:
1127 | default_error_response_data["error_message"]="Timeout or failure finding decision content iframe URL after postback."
1128 | return KikDocumentMarkdown(**default_error_response_data)
1129 |
1130 | if not iframe_src_value or not iframe_src_value.strip():
1131 | default_error_response_data["error_message"]="Extracted iframe URL for decision content is empty."
1132 | return KikDocumentMarkdown(**default_error_response_data)
1133 |
1134 | # iframe_src_value göreceli bir URL ise, ana sayfanın URL'si ile birleştir
1135 | iframe_document_url_str = urllib.parse.urljoin(current_main_page.url, iframe_src_value)
1136 | logger.info(f"Constructed absolute iframe_document_url_str for goto: {iframe_document_url_str}") # Log this absolute URL
1137 | default_error_response_data["source_url"] = iframe_document_url_str
1138 |
1139 | parsed_url = urllib.parse.urlparse(iframe_document_url_str)
1140 | query_params = urllib.parse.parse_qs(parsed_url.query)
1141 | karar_id_param_from_url_on_doc_page = query_params.get("KararId", [None])[0]
1142 | default_error_response_data["karar_id_param_from_url"] = karar_id_param_from_url_on_doc_page
1143 | if not karar_id_param_from_url_on_doc_page:
1144 | default_error_response_data["error_message"]="KararId (KIK internal ID) not found in extracted iframe URL."
1145 | return KikDocumentMarkdown(**default_error_response_data)
1146 |
1147 | logger.info(f"Fetching KIK decision content from iframe URL using a new Playwright page: {iframe_document_url_str}")
1148 |
1149 | doc_page_for_content = await self.context.new_page()
1150 | try:
1151 | # `goto` metoduna MUTLAK URL verilmeli. Loglanan URL'nin mutlak olduğundan emin olalım.
1152 | await doc_page_for_content.goto(iframe_document_url_str, wait_until="domcontentloaded", timeout=self.request_timeout)
1153 | document_html_content = await doc_page_for_content.content()
1154 | except Exception as e_doc_page:
1155 | logger.error(f"Error navigating or getting content from doc_page ({iframe_document_url_str}): {e_doc_page}")
1156 | if doc_page_for_content and not doc_page_for_content.is_closed(): await doc_page_for_content.close()
1157 | default_error_response_data["error_message"]=f"Failed to load decision detail page: {e_doc_page}"
1158 | return KikDocumentMarkdown(**default_error_response_data)
1159 | finally:
1160 | if doc_page_for_content and not doc_page_for_content.is_closed():
1161 | await doc_page_for_content.close()
1162 |
1163 | soup_decision_detail = BeautifulSoup(document_html_content, "html.parser")
1164 | karar_content_span = soup_decision_detail.find("span", {"id": "ctl00_ContentPlaceHolder1_lblKarar"})
1165 | actual_decision_html = karar_content_span.decode_contents() if karar_content_span else document_html_content
1166 | full_markdown_content = self._convert_html_to_markdown_internal(actual_decision_html)
1167 |
1168 | if not full_markdown_content:
1169 | default_error_response_data["error_message"]="Markdown conversion failed or returned empty content."
1170 | try:
1171 | if await current_main_page.locator(self.MODAL_CLOSE_BUTTON_SELECTOR).is_visible(timeout=1000):
1172 | await current_main_page.locator(self.MODAL_CLOSE_BUTTON_SELECTOR).click()
1173 | except: pass
1174 | return KikDocumentMarkdown(**default_error_response_data)
1175 |
1176 | content_length = len(full_markdown_content); total_pages = math.ceil(content_length / self.DOCUMENT_MARKDOWN_CHUNK_SIZE) or 1
1177 | current_page_clamped = max(1, min(page_number, total_pages))
1178 | start_index = (current_page_clamped - 1) * self.DOCUMENT_MARKDOWN_CHUNK_SIZE
1179 | markdown_chunk = full_markdown_content[start_index : start_index + self.DOCUMENT_MARKDOWN_CHUNK_SIZE]
1180 |
1181 | try:
1182 | if await current_main_page.locator(self.MODAL_CLOSE_BUTTON_SELECTOR).is_visible(timeout=2000):
1183 | await current_main_page.locator(self.MODAL_CLOSE_BUTTON_SELECTOR).click()
1184 | await current_main_page.wait_for_selector(f"div#detayPopUp:not(.in)", timeout=5000)
1185 | except: pass
1186 |
1187 | return KikDocumentMarkdown(
1188 | retrieved_with_karar_id=karar_id_b64,
1189 | retrieved_karar_no=karar_no_for_search,
1190 | retrieved_karar_tipi=original_karar_tipi,
1191 | kararIdParam=karar_id_param_from_url_on_doc_page,
1192 | markdown_chunk=markdown_chunk, source_url=iframe_document_url_str,
1193 | current_page=current_page_clamped, total_pages=total_pages,
1194 | is_paginated=(total_pages > 1), full_content_char_count=content_length
1195 | )
1196 | except Exception as e:
1197 | logger.error(f"Error in get_decision_document_as_markdown for Karar ID {karar_id_b64}: {e}", exc_info=True)
1198 | default_error_response_data["error_message"] = f"General error: {str(e)}"
1199 | return KikDocumentMarkdown(**default_error_response_data)
1200 |
```