HEX
Server: nginx/1.18.0
System: Linux test-ipsremont 5.4.0-214-generic #234-Ubuntu SMP Fri Mar 14 23:50:27 UTC 2025 x86_64
User: ips (1000)
PHP: 8.0.30
Disabled: pcntl_alarm,pcntl_fork,pcntl_waitpid,pcntl_wait,pcntl_wifexited,pcntl_wifstopped,pcntl_wifsignaled,pcntl_wifcontinued,pcntl_wexitstatus,pcntl_wtermsig,pcntl_wstopsig,pcntl_signal,pcntl_signal_get_handler,pcntl_signal_dispatch,pcntl_get_last_error,pcntl_strerror,pcntl_sigprocmask,pcntl_sigwaitinfo,pcntl_sigtimedwait,pcntl_exec,pcntl_getpriority,pcntl_setpriority,pcntl_async_signals,pcntl_unshare,
Upload Files
File: /var/www/html/laravel/app/Helpers/helpers.php
<?php

use App\Models\Notam;

if (!function_exists('callGptProxi')) {
    function callGptProxi($data, $localEndpoint = 'notamNew', $type = 'notam')
    {
        $url = 'http://158.160.85.61/proxy.php?url=gpt.php';

        // Calculate execution time
        $initTime = microtime(true);

        $requestData = [
            'key' => $data['key'] ?? '',
            'input' => $data['input'] ?? '',
            'model' => $data['model'] ?? '',
            'prompt' => $data['prompt'] ?? ' ',
            'messages' => $data['messages'] ?? [],
            'temperature' => (float) ($data['temperature'] ?? 0.7),
            'max_tokens' => (float) ($data['max_tokens'] ?? 4096),
            'host' => env('APP_URL'),
            'request_date' => date('Y-m-d H:i:s'),
        ];

        $requestData = array_merge($data, $requestData);

        if ($requestData['model'] == 'local') {
            // Forward request to local model
            unset($requestData['key']);
            $requestData['url'] = env('N8N_PROTOCOL') . '://' . env('N8N_LOCAL_LLM_DOMAIN_NAME') . ':' . env('N8N_PORT') . '/webhook/' . $localEndpoint;
            $url = 'http://158.160.85.61/proxy.php?url=localLlm.php';
        }

        $curl = curl_init();

        curl_setopt($curl, CURLOPT_TIMEOUT, 300);
        curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
        curl_setopt($curl, CURLOPT_URL, $url);
        curl_setopt($curl, CURLOPT_HTTPHEADER, ['Content-Type:application/json']);
        curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'POST');
        curl_setopt($curl, CURLOPT_POSTFIELDS, json_encode($requestData));
        $result = curl_exec($curl);

        $errNo = curl_errno($curl);
        $errors = curl_error($curl);

        // if error is timeout, return error message
        if ($errNo == 28) {
            $result = json_encode(['error' => 'Request timed out']);
            return $result;
        }

        if ($result == false) {
            if ($errors) {
                $result = json_encode(['error' => $errors]);
            } else {
                $result = json_encode(['error' => 'Empty result']);
            }
        } else {
            $result = json_decode($result);
            if (is_array($result) && count($result) == 1) {
                $result = $result[0];
            }
        }

        curl_close($curl);

        $endTime = microtime(true);
        $executionTime = $endTime - $initTime;
        if ($type == 'notam' && !empty($data['object_id'])) {
            $notam = Notam::find($data['object_id']);
            if ($notam) {
                if ($requestData['model'] == 'local') {
                    $notam->local_processing_time = $executionTime;
                } elseif (substr_count($requestData['model'], '4.1-mini')) {
                    $notam->gpt41_processing_time = $executionTime;
                } else {
                    $notam->gpt_processing_time = $executionTime;
                }
                $notam->saveQuietly();
            } else {
                echo "Notam with ID {$data['object_id']} not found to save processing time.\n";
            }
        }

        return $result;
    }
}

if (!function_exists('callGptProxiImage')) {
    function callGptProxiImage($data)
    {
        $url = 'http://158.160.85.61/proxy.php?url=gpt-image-processing.php';

        // Calculate execution time
        $initTime = microtime(true);

        $requestData = [
            'key' => $data['key'] ?? '',
            'image' => $data['image'] ? $data['image'] : null,
            'model' => $data['model'] ?? '',
            'prompt' => $data['prompt'] ?? ' ',
            'temperature' => (float) ($data['temperature'] ?? 0.7),
            'max_tokens' => (float) ($data['max_tokens'] ?? 4096)
        ];

        $curl = curl_init();

        curl_setopt($curl, CURLOPT_TIMEOUT, 300);
        curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
        curl_setopt($curl, CURLOPT_URL, $url);
        curl_setopt($curl, CURLOPT_HTTPHEADER, ['Content-Type:application/json']);
        curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'POST');
        curl_setopt($curl, CURLOPT_POSTFIELDS, json_encode($requestData));
        $result = curl_exec($curl);

        $errNo = curl_errno($curl);
        $errors = curl_error($curl);

        // if error is timeout, return error message
        if ($errNo == 28) {
            $result = json_encode(['error' => 'Request timed out']);
            return $result;
        }

        if ($result == false) {
            if ($errors) {
                $result = json_encode(['error' => $errors]);
            } else {
                $result = json_encode(['error' => 'Empty result']);
            }
        } else {
            $result = json_decode($result);
            if (is_array($result) && count($result) == 1) {
                $result = $result[0];
            }
        }

        curl_close($curl);

        $endTime = microtime(true);
        $executionTime = $endTime - $initTime;

        return $result;
    }
}

if (!function_exists('calculateGptCost')) {
    function calculateGptCost($object)
    {
        $allCosts = [
            'gpt-5.2' => ['input' => 1.75, 'output' => 14.00],
            'gpt-5.1' => ['input' => 1.25, 'output' => 10.00],
            'gpt-5' => ['input' => 1.25, 'output' => 10.00],
            'gpt-5-mini' => ['input' => 0.25, 'output' => 2.00],
            'gpt-5-nano' => ['input' => 0.05, 'output' => 0.40],
            'gpt-5.2-chat-latest' => ['input' => 1.75, 'output' => 14.00],
            'gpt-5.1-chat-latest' => ['input' => 1.25, 'output' => 10.00],
            'gpt-5-chat-latest' => ['input' => 1.25, 'output' => 10.00],
            'gpt-5.1-codex-max' => ['input' => 1.25, 'output' => 10.00],
            'gpt-5.1-codex' => ['input' => 1.25, 'output' => 10.00],
            'gpt-5-codex' => ['input' => 1.25, 'output' => 10.00],
            'gpt-5.2-pro' => ['input' => 21.00, 'output' => 168.00],
            'gpt-5-pro' => ['input' => 15.00, 'output' => 120.00],
            'gpt-4.1' => ['input' => 2.00, 'output' => 8.00],
            'gpt-4.1-mini' => ['input' => 0.40, 'output' => 1.60],
            'gpt-4.1-nano' => ['input' => 0.10, 'output' => 0.40],
            'gpt-4o' => ['input' => 2.50, 'output' => 10.00],
            'gpt-4o-2024-05-13' => ['input' => 5.00, 'output' => 15.00],
            'gpt-4o-mini' => ['input' => 0.15, 'output' => 0.60],
        ];

        if (!property_exists($object, 'prompt_tokens') && property_exists($object, 'input_tokens')) {
            $object->prompt_tokens = $object->input_tokens;
            $object->completion_tokens = $object->output_tokens;
        }

        if (!property_exists($object, 'model') && property_exists($object, 'model_name')) {
            $object->model = $object->model_name;
        }

        if ($object->model && $object->prompt_tokens !== null && $object->completion_tokens !== null) {
            $modelKey = strtolower($object->model);
            if (!empty($allCosts[$modelKey])) {
                if (array_key_exists($modelKey, $allCosts)) {
                    $costPerMillionInput = $allCosts[$modelKey]['input'];
                    $costPerMillionOutput = $allCosts[$modelKey]['output'];

                    $inputCost = round(($object->prompt_tokens / 1000000) * $costPerMillionInput, 6);
                    $outputCost = round(($object->completion_tokens / 1000000) * $costPerMillionOutput, 6);

                    return $inputCost + $outputCost;
                }
            }
        }

        return 0;
    }
}