graph LR
    %% Primary Sections
    Types["fa:fa-cubes Data Types"]
    Features["fa:fa-star Features"]
    UseCases["fa:fa-lightbulb Use Cases"]

    style Types fill:#2E86C1,stroke:#333,stroke-width:2px
    style Features fill:#27AE60,stroke:#333,stroke-width:2px
    style UseCases fill:#8E44AD,stroke:#333,stroke-width:2px

    %% Data Types
    Types --> |"Simple values"| Strings["fa:fa-font Strings"]
    Types --> |"Ordered lists"| Lists["fa:fa-list Lists"]
    Types --> |"Unique members"| Sets["fa:fa-object-group Sets"]
    Types --> |"Scored members"| SortedSets["fa:fa-sort Sorted Sets"]
    Types --> |"Field-value pairs"| Hashes["fa:fa-hashtag Hashes"]

    %% Features
    Features --> |"Auto expiration"| Expiration["fa:fa-clock TTL"]
    Features --> |"Data durability"| Persistence["fa:fa-save Persistence"]
    Features --> |"Thread-safe ops"| Atomic["fa:fa-atom Atomic Operations"]
    Features --> |"Messaging"| PubSub["fa:fa-broadcast-tower Pub/Sub"]

    %% Use Cases
    UseCases --> |"Fast access"| Cache["fa:fa-bolt Caching"]
    UseCases --> |"Real-time stats"| CounterUse["fa:fa-calculator Counters"]
    UseCases --> |"User data"| Session["fa:fa-user-clock Sessions"]
    UseCases --> |"Task handling"| Queue["fa:fa-tasks Queues"]

        
flowchart LR
    subgraph lazyCaching["🐢 Lazy Caching - Cache-aside"]
        style lazyCaching fill:#e6f7ff,stroke:#69c0ff
             subgraph App[" "]
                app["📱 Application"]
                db["🗄️ Database"]
             end

        subgraph cache2[" "]
            cache["💾 Redis Cache"]
        end


        app --> |"1 Check cache"| cache
        cache --> |"2 Cache miss"| app
        app --> |"3 Query database"| db
        db --> |"4 Return data"| app
        app --> |". Update cache"| cache
    end
            
flowchart TD
    subgraph writeThroughCaching["✍️ Write-through Caching"]
        style writeThroughCaching fill:#f6ffed,stroke:#95de64
        app["📱 Application"]
        subgraph datalayer[" "]
         cache["💾 Redis Cache"]
         db["🗄️ Database"]
        end



        app --> |"1 Write data"| cache
        app --> |"1 Write data"| db
        cache --> |"2 Acknowledge"| app
        db --> |"2 Acknowledge"| app
    end
            
graph LR
    subgraph pubSub["📡 Redis Pub/Sub System"]
        publisher["👤 Publisher"]
        subscriber1["👥 Subscriber 1"]
        subscriber2["👥 Subscriber 2"]
        channel1["📢 Channel 1"]
        channel2["📢 Channel 2"]
    end

    publisher -->|"Publish"| channel1
    publisher -->|"Publish"| channel2
    channel1 -->|"Broadcast"| subscriber1
    channel1 -->|"Broadcast"| subscriber2
    channel2 -->|"Broadcast"| subscriber1

    style pubSub fill:#f5f5f5,stroke:#333,stroke-width:2px
    style publisher fill:#bbdefb,stroke:#1976d2,stroke-width:2px
    style subscriber1 fill:#c8e6c9,stroke:#388e3c,stroke-width:2px
    style subscriber2 fill:#c8e6c9,stroke:#388e3c,stroke-width:2px
    style channel1 fill:#ffe0b2,stroke:#f57c00,stroke-width:2px
    style channel2 fill:#ffe0b2,stroke:#f57c00,stroke-width:2px

            
                graph LR
    subgraph transactionFlow["fa:fa-exchange-alt Redis Transaction Flow"]
        watch["fa:fa-eye WATCH key(s)"] -->|"Optional"| startTransaction
        startTransaction["fa:fa-play-circle MULTI"] --> queueOperations
        subgraph queueOperations["fa:fa-tasks Queue Operations"]
            operation1["fa:fa-terminal Operation 1"] --> operation2["fa:fa-terminal Operation 2"]
            operation2 --> operation3["fa:fa-terminal ... Operation N"]
        end
        queueOperations -->|"Execute"| executeTransaction["fa:fa-play EXEC"]
        queueOperations -->|"Cancel"| discardTransaction["fa:fa-ban DISCARD"]
        executeTransaction -->|"Success"| logSuccess["fa:fa-check-circle Transaction Complete"]
        executeTransaction -->|"Error"| logError["fa:fa-exclamation-circle Error Logged"]
        executeTransaction -->|"WATCH triggered"| watchTriggered["fa:fa-times-circle Transaction Aborted"]
    end

    classDef default stroke:#333,stroke-width:2px;
    classDef watch fill:#e1bee7,stroke:#8e24aa,stroke-width:2px;
    classDef multi fill:#bbdefb,stroke:#1976d2,stroke-width:2px;
    classDef queue fill:#c8e6c9,stroke:#388e3c,stroke-width:2px;
    classDef exec fill:#fff9c4,stroke:#fbc02d,stroke-width:2px;
    classDef discard fill:#ffcdd2,stroke:#d32f2f,stroke-width:2px;
    classDef success fill:#dcedc8,stroke:#689f38,stroke-width:2px;
    classDef failure fill:#ffcdd2,stroke:#d32f2f,stroke-width:2px;

    class watch watch;
    class startTransaction multi;
    class operation1,operation2,operation3 queue;
    class executeTransaction exec;
    class discardTransaction discard;
    class logSuccess success;
    class logError,watchTriggered failure;
            
             `
## Lazy Cache
Lazy caching checks the cache first and only queries the database on a cache miss.

```javascript
app.get('/user/:id', async (req, res) => {
  const { id } = req.params;
  const cacheKey = \`user:\${id}\`;

  try {
    let userData = await redis.get(cacheKey);

    if (!userData) {
      const result = await pool.query('SELECT * FROM users WHERE id = $1', [id]);
      userData = result.rows[0];

      if (userData) {
        await redis.setex(cacheKey, 3600, JSON.stringify(userData));
      }
    } else {
      userData = JSON.parse(userData);
    }

    res.json(userData || { error: 'User not found' });
  } catch (error) {
    console.error('Error:', error);
    res.status(500).json({ error: 'Internal server error' });
  }
});
```

Pros:
- Reduced database load
- Faster response times for cached data
- Only caches data that is actually requested

Cons:
- Initial requests for uncached data are slower
- Potential for stale data if not properly invalidated
- Complexity in managing cache consistency

## Write-through
Write-through caching updates both the cache and the database simultaneously.

```javascript
app.post('/user', async (req, res) => {
  const { userData } = req.body;
  const userId = generateUniqueId();
  const cacheKey = \`user:\${userId}\`;

  try {
    await Promise.all([
      redis.setex(cacheKey, 3600, JSON.stringify(userData)),
      pool.query('INSERT INTO users (id, data) VALUES ($1, $2)', [userId, userData])
    ]);

    res.json({ message: 'User created successfully', userId });
  } catch (error) {
    console.error('Error:', error);
    res.status(500).json({ error: 'Internal server error' });
  }
});
```

Pros:
- Cache is always up-to-date with the database
- Simplified read operations

Cons:
- Increased write latency
- Higher resource usage for writes
- May cache infrequently accessed data

## LUA Scripting
Redis supports Lua scripting for complex operations that ensure atomic execution

```js
const reserveStockScript = `
  local stock = tonumber(redis.call('GET', KEYS[1]) or 0)
  local reserved = tonumber(redis.call('GET', KEYS[2]) or 0)

  if stock - reserved <= 0 then
    return {err="Out of stock"}
  end

  redis.call('INCR', KEYS[2])
  return 1
`;

async function reserveStock(userId, productId) {
  const result = await redis.eval(
    reserveStockScript,
    2,
    `product:${productId}:stock`,
    `product:${productId}:stock_reserved`
  );

  if (result && result.err) {
    throw new Error(result.err);
  }

  return true; // Successfully reserved
}
```
`;