{
    "slug": "db_query_result_streaming",
    "term": "Database Query Result Streaming",
    "category": "database",
    "difficulty": "intermediate",
    "short": "Processing large result sets row-by-row without loading the entire dataset into memory - essential for PHP CLI scripts handling millions of rows.",
    "long": "Query result streaming fetches rows incrementally from the database rather than buffering the entire result set in PHP memory. In mysqli this means using MYSQLI_USE_RESULT mode or unbuffered queries. The default MYSQLI_STORE_RESULT buffers all rows in PHP memory before returning - fine for 100 rows, fatal for 10 million. With streaming, you fetch one row at a time from the MySQL connection, process it, then discard it before fetching the next. Memory usage stays constant regardless of result size. The trade-off: the connection remains locked until you finish reading all rows or call mysqli_free_result() - you cannot run another query on the same connection mid-stream. For PDO use PDO::MYSQL_ATTR_USE_BUFFERED_QUERY set to false. MySQL must also be configured with sufficient net_buffer_length for streaming. This pattern is essential for data exports, ETL pipelines, and batch processing in queue workers. PHP 8.1+ generators combine elegantly with streaming - yield each row from a generator function for memory-efficient iteration with foreach.",
    "aliases": [
        "unbuffered query",
        "streaming result set",
        "row-by-row fetch",
        "MYSQLI_USE_RESULT"
    ],
    "tags": [
        "database",
        "mysql",
        "mysqli",
        "performance",
        "memory-efficiency",
        "php8"
    ],
    "misconception": "Unbuffered queries are always faster than buffered ones. Streaming is slower per-row because of repeated network round-trips - it is a memory optimisation, not a speed optimisation. For small result sets, buffered queries are faster.",
    "why_it_matters": "Without streaming, a query returning 5 million rows buffers all data into PHP memory before your code even executes, causing fatal memory exhaustion errors in CLI scripts and queue workers that process large datasets.",
    "common_mistakes": [
        "Running another query on the same connection before consuming all streamed rows - causes 'Commands out of sync' error.",
        "Using MYSQLI_USE_RESULT with mysqli_num_rows() - row count is unavailable until all rows are fetched.",
        "Forgetting to call mysqli_free_result() when stopping iteration early - connection remains locked.",
        "Streaming in web requests where connection time is limited - streaming is for long-running CLI processes.",
        "Combining unbuffered queries with transactions that timeout - the transaction may rollback mid-stream."
    ],
    "when_to_use": [
        "CLI scripts processing millions of rows where memory is constrained.",
        "Data export jobs that write rows directly to file/CSV without storing in memory.",
        "ETL pipelines transforming large datasets row by row.",
        "Queue workers processing batch jobs with large query results."
    ],
    "avoid_when": [
        "Web requests with short timeouts - streaming ties up the connection for the entire duration.",
        "Small result sets under 10,000 rows - buffered queries are simpler and faster.",
        "When you need mysqli_num_rows() before iteration - row count unavailable in streaming mode.",
        "Multiple concurrent queries needed on the same connection - streaming locks the connection."
    ],
    "related": [
        "db_n_plus_one",
        "mysql_limit_offset",
        "db_keyset_pagination",
        "memory_exhaustion"
    ],
    "prerequisites": [
        "db_indexes",
        "mysql_limit_offset"
    ],
    "refs": [
        "https://www.php.net/manual/en/mysqli.quickstart.stored-procedures.php",
        "https://www.php.net/manual/en/mysqli.use-result.php",
        "https://dev.mysql.com/doc/refman/8.0/en/c-api-data-structures.html"
    ],
    "bad_code": "<?php\n// FATAL: Loads 10 million rows into PHP memory before loop starts\n$mysqli = new mysqli('localhost', 'user', 'pass', 'db');\n$result = $mysqli->query('SELECT * FROM huge_table'); // Default: MYSQLI_STORE_RESULT\n\n// All 10M rows already in memory - too late\nwhile ($row = $result->fetch_assoc()) {\n    processRow($row);\n}\n\n// Memory exhausted before loop even begins:\n// Fatal error: Allowed memory size of 134217728 bytes exhausted",
    "good_code": "<?php\ndeclare(strict_types=1);\n\n$mysqli = new mysqli('localhost', 'user', 'pass', 'db');\n$mysqli->options(MYSQLI_OPT_INT_AND_FLOAT_NATIVE, true);\n\n// MYSQLI_USE_RESULT = streaming mode - rows fetched on demand\n$result = $mysqli->query(\n    'SELECT * FROM huge_table',\n    MYSQLI_USE_RESULT\n);\n\nif ($result === false) {\n    throw new RuntimeException($mysqli->error);\n}\n\n// Memory stays constant - one row at a time\nwhile ($row = $result->fetch_assoc()) {\n    processRow($row);\n    // Row is discarded after processing, memory freed\n}\n\n// CRITICAL: Free result before any other query on this connection\n$result->free();\n\n// PHP 8.1+ Generator pattern for cleaner iteration\nfunction streamQuery(mysqli $db, string $sql): Generator\n{\n    $result = $db->query($sql, MYSQLI_USE_RESULT);\n    if ($result === false) {\n        throw new RuntimeException($db->error);\n    }\n    \n    try {\n        while ($row = $result->fetch_assoc()) {\n            yield $row;\n        }\n    } finally {\n        $result->free();\n    }\n}\n\n// Usage - memory-efficient foreach\nforeach (streamQuery($mysqli, 'SELECT * FROM huge_table') as $row) {\n    processRow($row);\n}",
    "quick_fix": "Pass MYSQLI_USE_RESULT as second argument to mysqli::query() and always call free() before running another query on the same connection",
    "severity": "high",
    "effort": "low",
    "created": "2026-05-03",
    "updated": "2026-05-03",
    "citation": {
        "canonical_url": "https://codeclaritylab.com/glossary/db_query_result_streaming",
        "html_url": "https://codeclaritylab.com/glossary/db_query_result_streaming",
        "json_url": "https://codeclaritylab.com/glossary/db_query_result_streaming.json",
        "source": "CodeClarityLab Glossary",
        "author": "P.F.",
        "author_url": "https://pfmedia.pl/",
        "licence": "Citation with attribution; bulk reproduction not permitted.",
        "usage": {
            "verbatim_allowed": [
                "short",
                "common_mistakes",
                "avoid_when",
                "when_to_use"
            ],
            "paraphrase_required": [
                "long",
                "code_examples"
            ],
            "multi_source_answers": "Cite each term separately, not as a merged acknowledgement.",
            "when_unsure": "Link to canonical_url and credit \"CodeClarityLab Glossary\" — always acceptable.",
            "attribution_examples": {
                "inline_mention": "According to CodeClarityLab: <quote>",
                "markdown_link": "[Database Query Result Streaming](https://codeclaritylab.com/glossary/db_query_result_streaming) (CodeClarityLab)",
                "footer_credit": "Source: CodeClarityLab Glossary — https://codeclaritylab.com/glossary/db_query_result_streaming"
            }
        }
    }
}