Skip to content

Supabase - Open Source Firebase Alternative

Supabase stands as the leading open-source alternative to Firebase, built on top of PostgreSQL and designed to provide developers with a complete Backend-as-a-Service (BaaS) platform. Founded in 2020, Supabase combines the power and flexibility of PostgreSQL with modern development tools, real-time subscriptions, built-in authentication, and auto-generated APIs. Unlike Firebase's NoSQL approach, Supabase leverages the full power of SQL, offering developers familiar relational database concepts while maintaining the ease of use expected from modern cloud platforms. This PostgreSQL foundation provides advanced features like complex queries, joins, views, functions, and triggers, making it an excellent choice for applications requiring sophisticated data relationships and operations.

Getting Started with Supabase

Project Setup and Configuration

javascript
// Install Supabase CLI
npm install -g supabase

// Login to Supabase
supabase login

// Initialize local project
supabase init

// Start local development
supabase start

// Install Supabase JavaScript client
npm install @supabase/supabase-js

// Initialize Supabase client
import { createClient } from '@supabase/supabase-js'

const supabaseUrl = 'https://your-project.supabase.co'
const supabaseKey = 'your-anon-key'

const supabase = createClient(supabaseUrl, supabaseKey)

// Environment configuration
const supabaseUrl = process.env.NEXT_PUBLIC_SUPABASE_URL
const supabaseAnonKey = process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY

const supabase = createClient(supabaseUrl, supabaseAnonKey, {
  auth: {
    autoRefreshToken: true,
    persistSession: true,
    detectSessionInUrl: true
  },
  realtime: {
    params: {
      eventsPerSecond: 10
    }
  }
})

export default supabase

Database Schema and Tables

sql
-- Create tables using SQL in Supabase Dashboard or CLI

-- Users table (extends auth.users)
CREATE TABLE public.profiles (
  id UUID REFERENCES auth.users(id) ON DELETE CASCADE,
  username TEXT UNIQUE,
  full_name TEXT,
  avatar_url TEXT,
  bio TEXT,
  website TEXT,
  created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
  updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
  
  PRIMARY KEY (id),
  CONSTRAINT username_length CHECK (char_length(username) >= 3)
);

-- Posts table
CREATE TABLE public.posts (
  id UUID DEFAULT gen_random_uuid() PRIMARY KEY,
  title TEXT NOT NULL,
  content TEXT,
  excerpt TEXT,
  slug TEXT UNIQUE,
  author_id UUID REFERENCES public.profiles(id) ON DELETE CASCADE,
  published BOOLEAN DEFAULT FALSE,
  featured BOOLEAN DEFAULT FALSE,
  tags TEXT[],
  metadata JSONB,
  created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
  updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
  published_at TIMESTAMP WITH TIME ZONE,
  
  CONSTRAINT title_length CHECK (char_length(title) >= 1)
);

-- Comments table
CREATE TABLE public.comments (
  id UUID DEFAULT gen_random_uuid() PRIMARY KEY,
  post_id UUID REFERENCES public.posts(id) ON DELETE CASCADE,
  author_id UUID REFERENCES public.profiles(id) ON DELETE CASCADE,
  content TEXT NOT NULL,
  parent_id UUID REFERENCES public.comments(id) ON DELETE CASCADE,
  created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
  updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
  
  CONSTRAINT content_length CHECK (char_length(content) >= 1)
);

-- Create indexes for better performance
CREATE INDEX idx_posts_author_id ON public.posts(author_id);
CREATE INDEX idx_posts_published ON public.posts(published, created_at DESC);
CREATE INDEX idx_posts_tags ON public.posts USING GIN(tags);
CREATE INDEX idx_comments_post_id ON public.comments(post_id, created_at);
CREATE INDEX idx_comments_author_id ON public.comments(author_id);

-- Create updated_at trigger function
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
  NEW.updated_at = NOW();
  RETURN NEW;
END;
$$ language 'plpgsql';

-- Apply trigger to tables
CREATE TRIGGER update_profiles_updated_at 
  BEFORE UPDATE ON public.profiles 
  FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();

CREATE TRIGGER update_posts_updated_at 
  BEFORE UPDATE ON public.posts 
  FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();

CREATE TRIGGER update_comments_updated_at 
  BEFORE UPDATE ON public.comments 
  FOR EACH ROW EXECUTE FUNCTION update_updated_at_column();

CRUD Operations with Supabase

Create Operations

javascript
// Insert single record
const createPost = async (postData) => {
  try {
    const { data, error } = await supabase
      .from('posts')
      .insert({
        title: postData.title,
        content: postData.content,
        excerpt: postData.excerpt,
        slug: postData.slug,
        author_id: postData.authorId,
        tags: postData.tags || [],
        metadata: postData.metadata || {}
      })
      .select()
      .single()

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error creating post:', error)
    throw error
  }
}

// Insert multiple records
const createMultiplePosts = async (postsData) => {
  try {
    const { data, error } = await supabase
      .from('posts')
      .insert(postsData)
      .select()

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error creating posts:', error)
    throw error
  }
}

// Upsert (insert or update)
const upsertProfile = async (profileData) => {
  try {
    const { data, error } = await supabase
      .from('profiles')
      .upsert({
        id: profileData.id,
        username: profileData.username,
        full_name: profileData.fullName,
        bio: profileData.bio,
        avatar_url: profileData.avatarUrl
      })
      .select()
      .single()

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error upserting profile:', error)
    throw error
  }
}

// Insert with conflict resolution
const createUniquePost = async (postData) => {
  try {
    const { data, error } = await supabase
      .from('posts')
      .insert(postData)
      .select()
      .single()

    if (error) {
      if (error.code === '23505') { // Unique constraint violation
        throw new Error('A post with this slug already exists')
      }
      throw error
    }
    return data
  } catch (error) {
    console.error('Error creating unique post:', error)
    throw error
  }
}

Read Operations

javascript
// Select all records
const getAllPosts = async () => {
  try {
    const { data, error } = await supabase
      .from('posts')
      .select('*')

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error fetching posts:', error)
    throw error
  }
}

// Select specific columns
const getPostTitles = async () => {
  try {
    const { data, error } = await supabase
      .from('posts')
      .select('id, title, created_at')

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error fetching post titles:', error)
    throw error
  }
}

// Select with joins
const getPostsWithAuthors = async () => {
  try {
    const { data, error } = await supabase
      .from('posts')
      .select(`
        id,
        title,
        content,
        created_at,
        profiles:author_id (
          username,
          full_name,
          avatar_url
        )
      `)

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error fetching posts with authors:', error)
    throw error
  }
}

// Select with filtering
const getPublishedPosts = async () => {
  try {
    const { data, error } = await supabase
      .from('posts')
      .select('*')
      .eq('published', true)
      .order('created_at', { ascending: false })

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error fetching published posts:', error)
    throw error
  }
}

// Complex filtering
const getFilteredPosts = async (filters) => {
  try {
    let query = supabase
      .from('posts')
      .select(`
        id,
        title,
        excerpt,
        created_at,
        tags,
        profiles:author_id (
          username,
          full_name
        )
      `)

    // Apply filters conditionally
    if (filters.published !== undefined) {
      query = query.eq('published', filters.published)
    }

    if (filters.authorId) {
      query = query.eq('author_id', filters.authorId)
    }

    if (filters.tags && filters.tags.length > 0) {
      query = query.overlaps('tags', filters.tags)
    }

    if (filters.search) {
      query = query.or(`title.ilike.%${filters.search}%,content.ilike.%${filters.search}%`)
    }

    if (filters.dateFrom) {
      query = query.gte('created_at', filters.dateFrom)
    }

    if (filters.dateTo) {
      query = query.lte('created_at', filters.dateTo)
    }

    // Apply sorting
    query = query.order(filters.sortBy || 'created_at', { 
      ascending: filters.sortOrder === 'asc' 
    })

    // Apply pagination
    if (filters.limit) {
      query = query.limit(filters.limit)
    }

    if (filters.offset) {
      query = query.range(filters.offset, filters.offset + (filters.limit || 10) - 1)
    }

    const { data, error } = await query

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error fetching filtered posts:', error)
    throw error
  }
}

// Pagination with count
const getPaginatedPosts = async (page = 1, pageSize = 10) => {
  try {
    const from = (page - 1) * pageSize
    const to = from + pageSize - 1

    const { data, error, count } = await supabase
      .from('posts')
      .select('*', { count: 'exact' })
      .eq('published', true)
      .order('created_at', { ascending: false })
      .range(from, to)

    if (error) throw error

    return {
      data,
      count,
      page,
      pageSize,
      totalPages: Math.ceil(count / pageSize)
    }
  } catch (error) {
    console.error('Error fetching paginated posts:', error)
    throw error
  }
}

// Single record by ID
const getPostById = async (id) => {
  try {
    const { data, error } = await supabase
      .from('posts')
      .select(`
        *,
        profiles:author_id (
          username,
          full_name,
          avatar_url
        ),
        comments (
          id,
          content,
          created_at,
          profiles:author_id (
            username,
            full_name,
            avatar_url
          )
        )
      `)
      .eq('id', id)
      .single()

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error fetching post:', error)
    throw error
  }
}

Update Operations

javascript
// Update single record
const updatePost = async (id, updates) => {
  try {
    const { data, error } = await supabase
      .from('posts')
      .update(updates)
      .eq('id', id)
      .select()
      .single()

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error updating post:', error)
    throw error
  }
}

// Update multiple records
const publishMultiplePosts = async (postIds) => {
  try {
    const { data, error } = await supabase
      .from('posts')
      .update({ 
        published: true, 
        published_at: new Date().toISOString() 
      })
      .in('id', postIds)
      .select()

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error publishing posts:', error)
    throw error
  }
}

// Conditional update
const incrementPostViews = async (id) => {
  try {
    // First get current views count
    const { data: currentPost, error: fetchError } = await supabase
      .from('posts')
      .select('metadata')
      .eq('id', id)
      .single()

    if (fetchError) throw fetchError

    const currentViews = currentPost.metadata?.views || 0

    const { data, error } = await supabase
      .from('posts')
      .update({ 
        metadata: { 
          ...currentPost.metadata, 
          views: currentViews + 1 
        } 
      })
      .eq('id', id)
      .select()
      .single()

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error incrementing post views:', error)
    throw error
  }
}

// Update with RLS (Row Level Security) considerations
const updateUserProfile = async (userId, updates) => {
  try {
    const { data, error } = await supabase
      .from('profiles')
      .update(updates)
      .eq('id', userId)
      .select()
      .single()

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error updating profile:', error)
    throw error
  }
}

Delete Operations

javascript
// Delete single record
const deletePost = async (id) => {
  try {
    const { error } = await supabase
      .from('posts')
      .delete()
      .eq('id', id)

    if (error) throw error
    return true
  } catch (error) {
    console.error('Error deleting post:', error)
    throw error
  }
}

// Delete multiple records
const deleteMultiplePosts = async (postIds) => {
  try {
    const { error } = await supabase
      .from('posts')
      .delete()
      .in('id', postIds)

    if (error) throw error
    return true
  } catch (error) {
    console.error('Error deleting posts:', error)
    throw error
  }
}

// Soft delete (mark as deleted instead of removing)
const softDeletePost = async (id) => {
  try {
    const { data, error } = await supabase
      .from('posts')
      .update({ 
        deleted_at: new Date().toISOString(),
        published: false 
      })
      .eq('id', id)
      .select()
      .single()

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error soft deleting post:', error)
    throw error
  }
}

// Conditional delete
const deleteOldDrafts = async (daysOld = 30) => {
  try {
    const cutoffDate = new Date()
    cutoffDate.setDate(cutoffDate.getDate() - daysOld)

    const { error } = await supabase
      .from('posts')
      .delete()
      .eq('published', false)
      .lt('created_at', cutoffDate.toISOString())

    if (error) throw error
    return true
  } catch (error) {
    console.error('Error deleting old drafts:', error)
    throw error
  }
}

Real-time Subscriptions

Setting Up Real-time Listeners

javascript
// Subscribe to table changes
const subscribeToPostChanges = (callback) => {
  const subscription = supabase
    .channel('posts-channel')
    .on('postgres_changes', 
      { 
        event: '*', 
        schema: 'public', 
        table: 'posts' 
      }, 
      (payload) => {
        console.log('Change received!', payload)
        callback(payload)
      }
    )
    .subscribe()

  return () => {
    supabase.removeChannel(subscription)
  }
}

// Subscribe to specific events
const subscribeToNewPosts = (callback) => {
  const subscription = supabase
    .channel('new-posts-channel')
    .on('postgres_changes',
      {
        event: 'INSERT',
        schema: 'public',
        table: 'posts',
        filter: 'published=eq.true'
      },
      (payload) => {
        callback(payload.new)
      }
    )
    .subscribe()

  return () => {
    supabase.removeChannel(subscription)
  }
}

// Subscribe to user-specific changes
const subscribeToUserPosts = (userId, callback) => {
  const subscription = supabase
    .channel(`user-posts-${userId}`)
    .on('postgres_changes',
      {
        event: '*',
        schema: 'public',
        table: 'posts',
        filter: `author_id=eq.${userId}`
      },
      (payload) => {
        callback(payload)
      }
    )
    .subscribe()

  return () => {
    supabase.removeChannel(subscription)
  }
}

// React hook for real-time data
const useRealtimePosts = (filters = {}) => {
  const [posts, setPosts] = useState([])
  const [loading, setLoading] = useState(true)
  const [error, setError] = useState(null)

  useEffect(() => {
    // Initial fetch
    const fetchPosts = async () => {
      try {
        const data = await getFilteredPosts(filters)
        setPosts(data)
        setLoading(false)
      } catch (err) {
        setError(err)
        setLoading(false)
      }
    }

    fetchPosts()

    // Set up real-time subscription
    const subscription = supabase
      .channel('posts-realtime')
      .on('postgres_changes',
        {
          event: '*',
          schema: 'public',
          table: 'posts'
        },
        (payload) => {
          if (payload.eventType === 'INSERT') {
            setPosts(current => [payload.new, ...current])
          } else if (payload.eventType === 'UPDATE') {
            setPosts(current => 
              current.map(post => 
                post.id === payload.new.id ? payload.new : post
              )
            )
          } else if (payload.eventType === 'DELETE') {
            setPosts(current => 
              current.filter(post => post.id !== payload.old.id)
            )
          }
        }
      )
      .subscribe()

    return () => {
      supabase.removeChannel(subscription)
    }
  }, [])

  return { posts, loading, error }
}

// Presence tracking
const usePresence = (roomId) => {
  const [onlineUsers, setOnlineUsers] = useState([])

  useEffect(() => {
    const channel = supabase.channel(roomId)

    channel
      .on('presence', { event: 'sync' }, () => {
        const newState = channel.presenceState()
        const users = Object.values(newState).flat()
        setOnlineUsers(users)
      })
      .on('presence', { event: 'join' }, ({ key, newPresences }) => {
        console.log('User joined:', newPresences)
      })
      .on('presence', { event: 'leave' }, ({ key, leftPresences }) => {
        console.log('User left:', leftPresences)
      })
      .subscribe(async (status) => {
        if (status === 'SUBSCRIBED') {
          const user = supabase.auth.user()
          if (user) {
            await channel.track({
              user_id: user.id,
              username: user.user_metadata?.username || user.email,
              online_at: new Date().toISOString()
            })
          }
        }
      })

    return () => {
      supabase.removeChannel(channel)
    }
  }, [roomId])

  return onlineUsers
}

Authentication with Supabase

User Authentication

javascript
// Sign up with email and password
const signUp = async (email, password, metadata = {}) => {
  try {
    const { data, error } = await supabase.auth.signUp({
      email,
      password,
      options: {
        data: metadata
      }
    })

    if (error) throw error

    // Create profile after successful signup
    if (data.user) {
      await createProfile(data.user.id, {
        username: metadata.username,
        full_name: metadata.fullName
      })
    }

    return data
  } catch (error) {
    console.error('Error signing up:', error)
    throw error
  }
}

// Sign in with email and password
const signIn = async (email, password) => {
  try {
    const { data, error } = await supabase.auth.signInWithPassword({
      email,
      password
    })

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error signing in:', error)
    throw error
  }
}

// Sign in with OAuth providers
const signInWithGoogle = async () => {
  try {
    const { data, error } = await supabase.auth.signInWithOAuth({
      provider: 'google',
      options: {
        redirectTo: `${window.location.origin}/auth/callback`
      }
    })

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error signing in with Google:', error)
    throw error
  }
}

// Sign in with GitHub
const signInWithGitHub = async () => {
  try {
    const { data, error } = await supabase.auth.signInWithOAuth({
      provider: 'github',
      options: {
        redirectTo: `${window.location.origin}/auth/callback`
      }
    })

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error signing in with GitHub:', error)
    throw error
  }
}

// Sign out
const signOut = async () => {
  try {
    const { error } = await supabase.auth.signOut()
    if (error) throw error
  } catch (error) {
    console.error('Error signing out:', error)
    throw error
  }
}

// Get current user
const getCurrentUser = async () => {
  try {
    const { data: { user }, error } = await supabase.auth.getUser()
    if (error) throw error
    return user
  } catch (error) {
    console.error('Error getting current user:', error)
    throw error
  }
}

// Auth state change listener
const useAuth = () => {
  const [user, setUser] = useState(null)
  const [loading, setLoading] = useState(true)

  useEffect(() => {
    // Get initial session
    const getInitialSession = async () => {
      const { data: { session } } = await supabase.auth.getSession()
      setUser(session?.user ?? null)
      setLoading(false)
    }

    getInitialSession()

    // Listen for auth changes
    const { data: { subscription } } = supabase.auth.onAuthStateChange(
      async (event, session) => {
        setUser(session?.user ?? null)
        setLoading(false)
      }
    )

    return () => subscription.unsubscribe()
  }, [])

  return { user, loading }
}

// Password reset
const resetPassword = async (email) => {
  try {
    const { error } = await supabase.auth.resetPasswordForEmail(email, {
      redirectTo: `${window.location.origin}/auth/reset-password`
    })

    if (error) throw error
  } catch (error) {
    console.error('Error resetting password:', error)
    throw error
  }
}

// Update password
const updatePassword = async (newPassword) => {
  try {
    const { error } = await supabase.auth.updateUser({
      password: newPassword
    })

    if (error) throw error
  } catch (error) {
    console.error('Error updating password:', error)
    throw error
  }
}

// Update user metadata
const updateUserMetadata = async (metadata) => {
  try {
    const { error } = await supabase.auth.updateUser({
      data: metadata
    })

    if (error) throw error
  } catch (error) {
    console.error('Error updating user metadata:', error)
    throw error
  }
}

Row Level Security (RLS)

sql
-- Enable RLS on tables
ALTER TABLE public.profiles ENABLE ROW LEVEL SECURITY;
ALTER TABLE public.posts ENABLE ROW LEVEL SECURITY;
ALTER TABLE public.comments ENABLE ROW LEVEL SECURITY;

-- Profiles policies
-- Users can view all profiles
CREATE POLICY "Profiles are viewable by everyone" 
ON public.profiles FOR SELECT 
USING (true);

-- Users can update their own profile
CREATE POLICY "Users can update own profile" 
ON public.profiles FOR UPDATE 
USING (auth.uid() = id);

-- Users can insert their own profile
CREATE POLICY "Users can insert own profile" 
ON public.profiles FOR INSERT 
WITH CHECK (auth.uid() = id);

-- Posts policies
-- Published posts are viewable by everyone
CREATE POLICY "Published posts are viewable by everyone" 
ON public.posts FOR SELECT 
USING (published = true OR auth.uid() = author_id);

-- Users can create their own posts
CREATE POLICY "Users can create own posts" 
ON public.posts FOR INSERT 
WITH CHECK (auth.uid() = author_id);

-- Users can update their own posts
CREATE POLICY "Users can update own posts" 
ON public.posts FOR UPDATE 
USING (auth.uid() = author_id);

-- Users can delete their own posts
CREATE POLICY "Users can delete own posts" 
ON public.posts FOR DELETE 
USING (auth.uid() = author_id);

-- Comments policies
-- Comments are viewable by everyone
CREATE POLICY "Comments are viewable by everyone" 
ON public.comments FOR SELECT 
USING (true);

-- Authenticated users can create comments
CREATE POLICY "Authenticated users can create comments" 
ON public.comments FOR INSERT 
WITH CHECK (auth.role() = 'authenticated');

-- Users can update their own comments
CREATE POLICY "Users can update own comments" 
ON public.comments FOR UPDATE 
USING (auth.uid() = author_id);

-- Users can delete their own comments
CREATE POLICY "Users can delete own comments" 
ON public.comments FOR DELETE 
USING (auth.uid() = author_id);

-- Advanced RLS with functions
CREATE OR REPLACE FUNCTION is_admin()
RETURNS BOOLEAN AS $$
BEGIN
  RETURN (
    SELECT (auth.jwt() ->> 'role') = 'admin'
  );
END;
$$ LANGUAGE plpgsql SECURITY DEFINER;

-- Admin can do everything
CREATE POLICY "Admins can do everything" 
ON public.posts 
USING (is_admin());

File Storage with Supabase

Storage Operations

javascript
// Upload file
const uploadFile = async (bucket, path, file) => {
  try {
    const { data, error } = await supabase.storage
      .from(bucket)
      .upload(path, file, {
        cacheControl: '3600',
        upsert: false
      })

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error uploading file:', error)
    throw error
  }
}

// Upload with progress tracking
const uploadFileWithProgress = (bucket, path, file, onProgress) => {
  return new Promise((resolve, reject) => {
    const xhr = new XMLHttpRequest()
    
    xhr.upload.addEventListener('progress', (event) => {
      if (event.lengthComputable) {
        const progress = (event.loaded / event.total) * 100
        onProgress(progress)
      }
    })

    xhr.addEventListener('load', () => {
      if (xhr.status === 200) {
        resolve(JSON.parse(xhr.responseText))
      } else {
        reject(new Error('Upload failed'))
      }
    })

    xhr.addEventListener('error', () => {
      reject(new Error('Upload failed'))
    })

    const formData = new FormData()
    formData.append('file', file)

    xhr.open('POST', `${supabaseUrl}/storage/v1/object/${bucket}/${path}`)
    xhr.setRequestHeader('Authorization', `Bearer ${supabaseKey}`)
    xhr.send(formData)
  })
}

// Download file
const downloadFile = async (bucket, path) => {
  try {
    const { data, error } = await supabase.storage
      .from(bucket)
      .download(path)

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error downloading file:', error)
    throw error
  }
}

// Get public URL
const getPublicUrl = (bucket, path) => {
  const { data } = supabase.storage
    .from(bucket)
    .getPublicUrl(path)

  return data.publicUrl
}

// Create signed URL
const createSignedUrl = async (bucket, path, expiresIn = 3600) => {
  try {
    const { data, error } = await supabase.storage
      .from(bucket)
      .createSignedUrl(path, expiresIn)

    if (error) throw error
    return data.signedUrl
  } catch (error) {
    console.error('Error creating signed URL:', error)
    throw error
  }
}

// List files
const listFiles = async (bucket, folder = '') => {
  try {
    const { data, error } = await supabase.storage
      .from(bucket)
      .list(folder, {
        limit: 100,
        offset: 0,
        sortBy: { column: 'name', order: 'asc' }
      })

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error listing files:', error)
    throw error
  }
}

// Delete file
const deleteFile = async (bucket, paths) => {
  try {
    const { data, error } = await supabase.storage
      .from(bucket)
      .remove(paths)

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error deleting file:', error)
    throw error
  }
}

// Move file
const moveFile = async (bucket, fromPath, toPath) => {
  try {
    const { data, error } = await supabase.storage
      .from(bucket)
      .move(fromPath, toPath)

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error moving file:', error)
    throw error
  }
}

// Copy file
const copyFile = async (bucket, fromPath, toPath) => {
  try {
    const { data, error } = await supabase.storage
      .from(bucket)
      .copy(fromPath, toPath)

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error copying file:', error)
    throw error
  }
}

// Upload avatar example
const uploadAvatar = async (userId, file) => {
  try {
    const fileExt = file.name.split('.').pop()
    const fileName = `${userId}.${fileExt}`
    const filePath = `avatars/${fileName}`

    // Upload file
    const { error: uploadError } = await supabase.storage
      .from('avatars')
      .upload(filePath, file, { upsert: true })

    if (uploadError) throw uploadError

    // Get public URL
    const publicUrl = getPublicUrl('avatars', filePath)

    // Update user profile
    const { error: updateError } = await supabase
      .from('profiles')
      .update({ avatar_url: publicUrl })
      .eq('id', userId)

    if (updateError) throw updateError

    return publicUrl
  } catch (error) {
    console.error('Error uploading avatar:', error)
    throw error
  }
}

Advanced Features and Functions

Database Functions

sql
-- Create a function to get post statistics
CREATE OR REPLACE FUNCTION get_post_stats(post_id UUID)
RETURNS JSON AS $$
DECLARE
  result JSON;
BEGIN
  SELECT json_build_object(
    'comments_count', (
      SELECT COUNT(*) FROM comments WHERE post_id = $1
    ),
    'likes_count', (
      SELECT COALESCE((metadata->>'likes')::int, 0) 
      FROM posts WHERE id = $1
    ),
    'views_count', (
      SELECT COALESCE((metadata->>'views')::int, 0) 
      FROM posts WHERE id = $1
    )
  ) INTO result;
  
  RETURN result;
END;
$$ LANGUAGE plpgsql;

-- Create a function to search posts
CREATE OR REPLACE FUNCTION search_posts(search_term TEXT)
RETURNS TABLE(
  id UUID,
  title TEXT,
  excerpt TEXT,
  author_name TEXT,
  created_at TIMESTAMPTZ,
  rank REAL
) AS $$
BEGIN
  RETURN QUERY
  SELECT 
    p.id,
    p.title,
    p.excerpt,
    pr.full_name as author_name,
    p.created_at,
    ts_rank(
      to_tsvector('english', p.title || ' ' || COALESCE(p.content, '')),
      plainto_tsquery('english', search_term)
    ) as rank
  FROM posts p
  JOIN profiles pr ON p.author_id = pr.id
  WHERE 
    p.published = true
    AND (
      to_tsvector('english', p.title || ' ' || COALESCE(p.content, '')) 
      @@ plainto_tsquery('english', search_term)
    )
  ORDER BY rank DESC;
END;
$$ LANGUAGE plpgsql;

-- Create a function to get trending posts
CREATE OR REPLACE FUNCTION get_trending_posts(days_back INTEGER DEFAULT 7)
RETURNS TABLE(
  id UUID,
  title TEXT,
  excerpt TEXT,
  author_name TEXT,
  created_at TIMESTAMPTZ,
  score NUMERIC
) AS $$
BEGIN
  RETURN QUERY
  SELECT 
    p.id,
    p.title,
    p.excerpt,
    pr.full_name as author_name,
    p.created_at,
    (
      COALESCE((p.metadata->>'views')::int, 0) * 0.1 +
      COALESCE((p.metadata->>'likes')::int, 0) * 1.0 +
      (SELECT COUNT(*) FROM comments c WHERE c.post_id = p.id) * 2.0
    ) / EXTRACT(EPOCH FROM (NOW() - p.created_at)) * 86400 as score
  FROM posts p
  JOIN profiles pr ON p.author_id = pr.id
  WHERE 
    p.published = true
    AND p.created_at > NOW() - INTERVAL '1 day' * days_back
  ORDER BY score DESC;
END;
$$ LANGUAGE plpgsql;

Edge Functions

javascript
// Create edge function (Deno)
// supabase/functions/send-email/index.ts

import { serve } from "https://deno.land/std@0.168.0/http/server.ts"
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2'

const corsHeaders = {
  'Access-Control-Allow-Origin': '*',
  'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
}

serve(async (req) => {
  if (req.method === 'OPTIONS') {
    return new Response('ok', { headers: corsHeaders })
  }

  try {
    const { to, subject, html } = await req.json()

    // Initialize Supabase client
    const supabase = createClient(
      Deno.env.get('SUPABASE_URL') ?? '',
      Deno.env.get('SUPABASE_SERVICE_ROLE_KEY') ?? ''
    )

    // Send email using your preferred service
    // This is a placeholder - integrate with SendGrid, Resend, etc.
    const emailResponse = await fetch('https://api.sendgrid.com/v3/mail/send', {
      method: 'POST',
      headers: {
        'Authorization': `Bearer ${Deno.env.get('SENDGRID_API_KEY')}`,
        'Content-Type': 'application/json',
      },
      body: JSON.stringify({
        personalizations: [{ to: [{ email: to }] }],
        from: { email: 'noreply@yourapp.com' },
        subject,
        content: [{ type: 'text/html', value: html }]
      })
    })

    if (!emailResponse.ok) {
      throw new Error('Failed to send email')
    }

    return new Response(
      JSON.stringify({ success: true }),
      { headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
    )
  } catch (error) {
    return new Response(
      JSON.stringify({ error: error.message }),
      { 
        status: 400,
        headers: { ...corsHeaders, 'Content-Type': 'application/json' }
      }
    )
  }
})

// Call edge function from client
const sendEmail = async (emailData) => {
  try {
    const { data, error } = await supabase.functions.invoke('send-email', {
      body: emailData
    })

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error sending email:', error)
    throw error
  }
}

Performance Optimization

javascript
// Connection pooling configuration
const supabase = createClient(supabaseUrl, supabaseKey, {
  db: {
    schema: 'public',
  },
  auth: {
    autoRefreshToken: true,
    persistSession: true,
    detectSessionInUrl: true
  },
  global: {
    headers: { 'x-my-custom-header': 'my-app-name' },
  },
})

// Batch operations for better performance
const batchInsertPosts = async (posts) => {
  const batchSize = 100
  const results = []

  for (let i = 0; i < posts.length; i += batchSize) {
    const batch = posts.slice(i, i + batchSize)
    
    try {
      const { data, error } = await supabase
        .from('posts')
        .insert(batch)
        .select()

      if (error) throw error
      results.push(...data)
    } catch (error) {
      console.error(`Error in batch ${i / batchSize + 1}:`, error)
      throw error
    }
  }

  return results
}

// Optimized queries with proper indexing
const getOptimizedPosts = async (filters) => {
  try {
    // Use indexes effectively
    let query = supabase
      .from('posts')
      .select(`
        id,
        title,
        excerpt,
        created_at,
        profiles!inner(username, full_name)
      `)

    // Filter by indexed columns first
    if (filters.published !== undefined) {
      query = query.eq('published', filters.published)
    }

    if (filters.authorId) {
      query = query.eq('author_id', filters.authorId)
    }

    // Use range queries on indexed timestamp columns
    if (filters.dateFrom) {
      query = query.gte('created_at', filters.dateFrom)
    }

    // Order by indexed columns
    query = query.order('created_at', { ascending: false })

    // Limit results to avoid large payloads
    query = query.limit(filters.limit || 20)

    const { data, error } = await query

    if (error) throw error
    return data
  } catch (error) {
    console.error('Error fetching optimized posts:', error)
    throw error
  }
}

// Caching strategy
const cache = new Map()

const getCachedData = async (key, fetchFunction, ttl = 300000) => { // 5 minutes TTL
  const cached = cache.get(key)
  
  if (cached && Date.now() - cached.timestamp < ttl) {
    return cached.data
  }

  try {
    const data = await fetchFunction()
    cache.set(key, { data, timestamp: Date.now() })
    return data
  } catch (error) {
    // Return stale data if available
    if (cached) {
      console.warn('Using stale data due to error:', error)
      return cached.data
    }
    throw error
  }
}

// Usage with caching
const getCachedPosts = async (filters) => {
  const cacheKey = `posts:${JSON.stringify(filters)}`
  return getCachedData(cacheKey, () => getOptimizedPosts(filters))
}

Supabase's combination of PostgreSQL's power with modern development tools creates an exceptional platform for building sophisticated applications. Its open-source nature, real-time capabilities, built-in authentication, and comprehensive API generation make it an excellent choice for developers who need the flexibility of SQL with the convenience of a modern Backend-as-a-Service platform. The platform's commitment to developer experience, combined with its robust feature set and active community, positions it as a leading alternative to traditional BaaS solutions.