Zum Inhalt

Dampfrohr Cheat Blatt

generieren

Überblick

Steampipe ist ein Open-Source-Tool, das Cloud-APIs als Datenbanken behandelt, so dass Sie Cloud-Infrastruktur mit SQL abfragen können. Es bietet sofortigen Zugriff auf Cloud-Ressourcen über AWS, Azure, GCP, Kubernetes und 140+ andere Dienste über eine einheitliche SQL-Schnittstelle. Steampipe ermöglicht Sicherheitsauditing, Compliance, Kostenanalyse und Infrastrukturentdeckung mit vertrauter SQL-Syntax.

RECHT *Key Features: SQL-basierte Cloud-Abfrage, 140+ Plugins, Echtzeit-Datenzugriff, Compliance-Frameworks, benutzerdefinierte Dashboards, CLI und Web-Schnittstelle sowie umfangreiche Community-Mods für Sicherheit und Compliance.

Installation und Inbetriebnahme

Binärinstallation

# Install Steampipe (Linux/macOS)
sudo /bin/sh -c "$(curl -fsSL https://raw.githubusercontent.com/turbot/steampipe/main/install.sh)"

# Manual installation for Linux
curl -L "https://github.com/turbot/steampipe/releases/latest/download/steampipe_linux_amd64.tar.gz" -o steampipe.tar.gz
tar -xzf steampipe.tar.gz
sudo mv steampipe /usr/local/bin/
rm steampipe.tar.gz

# Manual installation for macOS
curl -L "https://github.com/turbot/steampipe/releases/latest/download/steampipe_darwin_amd64.tar.gz" -o steampipe.tar.gz
tar -xzf steampipe.tar.gz
sudo mv steampipe /usr/local/bin/
rm steampipe.tar.gz

# Verify installation
steampipe --version
```_

### Installation des Paketmanagers
```bash
# Homebrew (macOS/Linux)
brew install turbot/tap/steampipe

# Chocolatey (Windows)
choco install steampipe

# Scoop (Windows)
scoop bucket add turbot https://github.com/turbot/scoop-bucket.git
scoop install steampipe

# Arch Linux (AUR)
yay -S steampipe

# Verify installation
steampipe --version
```_

### Docker Installation
```bash
# Pull Docker image
docker pull turbot/steampipe

# Run Steampipe in Docker
docker run -it --rm turbot/steampipe steampipe query "select 1 as test"

# Create alias for easier usage
echo 'alias steampipe="docker run -it --rm -v ~/.steampipe:/home/steampipe/.steampipe turbot/steampipe steampipe"' >> ~/.bashrc
source ~/.bashrc

# Run with volume mount for persistent configuration
docker run -it --rm \
  -v ~/.steampipe:/home/steampipe/.steampipe \
  -v ~/.aws:/home/steampipe/.aws \
  -v ~/.azure:/home/steampipe/.azure \
  -v ~/.config/gcloud:/home/steampipe/.config/gcloud \
  turbot/steampipe steampipe query "select name from aws_s3_bucket"

# Create Docker Compose file
cat > docker-compose.yml << 'EOF'
version: '3.8'
services:
  steampipe:
    image: turbot/steampipe
    volumes:
      - ~/.steampipe:/home/steampipe/.steampipe
      - ~/.aws:/home/steampipe/.aws
      - ~/.azure:/home/steampipe/.azure
      - ~/.config/gcloud:/home/steampipe/.config/gcloud
    ports:
      - "9193:9193"
    command: steampipe service start --foreground
EOF

# Start with Docker Compose
docker-compose up -d
```_

### Initial Setup und Konfiguration
```bash
# Initialize Steampipe
steampipe service start

# Check service status
steampipe service status

# Install essential plugins
steampipe plugin install aws
steampipe plugin install azure
steampipe plugin install gcp
steampipe plugin install kubernetes
steampipe plugin install github

# List installed plugins
steampipe plugin list

# Update all plugins
steampipe plugin update --all

# Create configuration directory
mkdir -p ~/.steampipe/config

# Configure AWS plugin
cat > ~/.steampipe/config/aws.spc << 'EOF'
connection "aws" {
  plugin = "aws"

  # Authentication options (choose one):
  # Option 1: Use default AWS credentials
  # (no additional config needed)

  # Option 2: Specify profile
  # profile = "my-profile"

  # Option 3: Specify credentials directly
  # access_key = "AKIA..."
  # secret_key = "..."

  # Option 4: Use IAM role
  # role_arn = "arn:aws:iam::123456789012:role/SteampipeRole"

  # Regions to query (optional)
  regions = ["us-east-1", "us-west-2", "eu-west-1"]

  # Ignore error regions (optional)
  ignore_error_codes = ["UnauthorizedOperation", "AccessDenied"]
}

# Multiple AWS accounts
connection "aws_dev" {
  plugin = "aws"
  profile = "dev-profile"
  regions = ["us-east-1"]
}

connection "aws_prod" {
  plugin = "aws"
  profile = "prod-profile"
  regions = ["us-east-1", "us-west-2"]
}
EOF

# Configure Azure plugin
cat > ~/.steampipe/config/azure.spc << 'EOF'
connection "azure" {
  plugin = "azure"

  # Authentication options (choose one):
  # Option 1: Use Azure CLI credentials (default)
  # (no additional config needed)

  # Option 2: Use service principal
  # tenant_id     = "00000000-0000-0000-0000-000000000000"
  # client_id     = "00000000-0000-0000-0000-000000000000"
  # client_secret = "..."

  # Option 3: Use certificate
  # tenant_id              = "00000000-0000-0000-0000-000000000000"
  # client_id              = "00000000-0000-0000-0000-000000000000"
  # certificate_path       = "/path/to/certificate.pfx"
  # certificate_password   = "..."

  # Subscription IDs to query (optional)
  # subscription_ids = ["00000000-0000-0000-0000-000000000000"]

  # Ignore error codes (optional)
  ignore_error_codes = ["UnauthorizedOperation", "Forbidden"]
}
EOF

# Configure GCP plugin
cat > ~/.steampipe/config/gcp.spc << 'EOF'
connection "gcp" {
  plugin = "gcp"

  # Authentication options (choose one):
  # Option 1: Use Application Default Credentials (default)
  # (no additional config needed)

  # Option 2: Use service account key file
  # credentials = "/path/to/service-account-key.json"

  # Option 3: Use service account key content
  # credentials = "{\"type\": \"service_account\", ...}"

  # Project IDs to query (optional)
  # project = "my-project-id"
  # projects = ["project-1", "project-2"]

  # Ignore error codes (optional)
  ignore_error_codes = ["accessNotConfigured", "forbidden"]
}
EOF

# Test connections
steampipe query "select name, region from aws_region"
steampipe query "select name, location from azure_resource_group"
steampipe query "select name, zone from gcp_compute_zone"
```_

## Grundlegende Nutzung und Abfragen

### Einfache Abfragen
```sql
-- Start interactive query session
steampipe query

-- List all AWS S3 buckets
select name, region, creation_date from aws_s3_bucket;

-- List all Azure resource groups
select name, location, subscription_id from azure_resource_group;

-- List all GCP compute instances
select name, zone, status from gcp_compute_instance;

-- List all Kubernetes pods
select name, namespace, phase from kubernetes_pod;

-- Count resources by type
select 
  'S3 Buckets' as resource_type,
  count(*) as count
from aws_s3_bucket
union all
select 
  'EC2 Instances' as resource_type,
  count(*) as count
from aws_ec2_instance;
```_

### Erweiterte Abfragen
```sql
-- Find unencrypted S3 buckets
select 
  name,
  region,
  server_side_encryption_configuration
from aws_s3_bucket
where server_side_encryption_configuration is null;

-- Find EC2 instances without tags
select 
  instance_id,
  instance_type,
  state,
  region
from aws_ec2_instance
where tags is null or jsonb_array_length(to_jsonb(tags)) = 0;

-- Find security groups with overly permissive rules
select 
  group_id,
  group_name,
  vpc_id,
  ip_permissions
from aws_vpc_security_group
where ip_permissions @> '[{"IpRanges": [{"CidrIp": "0.0.0.0/0"}]}]';

-- Find Azure VMs without managed disks
select 
  name,
  resource_group,
  location,
  storage_profile
from azure_compute_virtual_machine
where storage_profile -> 'osDisk' ->> 'managedDisk' is null;

-- Find GCP instances with external IPs
select 
  name,
  zone,
  status,
  network_interfaces
from gcp_compute_instance
where network_interfaces @> '[{"accessConfigs": [{"type": "ONE_TO_ONE_NAT"}]}]';

-- Cross-cloud resource inventory
select 
  'AWS' as cloud,
  'S3 Bucket' as resource_type,
  name as resource_name,
  region as location
from aws_s3_bucket
union all
select 
  'Azure' as cloud,
  'Storage Account' as resource_type,
  name as resource_name,
  location
from azure_storage_account
union all
select 
  'GCP' as cloud,
  'Storage Bucket' as resource_type,
  name as resource_name,
  location
from gcp_storage_bucket;
```_

### Dateibasierte Abfragen
```bash
# Create query file
cat > security_audit.sql << 'EOF'
-- Security audit query
select 
  'Unencrypted S3 Buckets' as finding,
  count(*) as count
from aws_s3_bucket
where server_side_encryption_configuration is null
union all
select 
  'Public S3 Buckets' as finding,
  count(*) as count
from aws_s3_bucket
where bucket_policy_is_public = true
union all
select 
  'EC2 Instances without IMDSv2' as finding,
  count(*) as count
from aws_ec2_instance
where metadata_options ->> 'HttpTokens' != 'required';
EOF

# Run query from file
steampipe query security_audit.sql

# Run query with output format
steampipe query security_audit.sql --output json
steampipe query security_audit.sql --output csv
steampipe query security_audit.sql --output table

# Run query with parameters
cat > parameterized_query.sql << 'EOF'
select 
  instance_id,
  instance_type,
  state,
  region
from aws_ec2_instance
where region = $1
  and instance_type = $2;
EOF

steampipe query parameterized_query.sql us-east-1 t3.micro
```_

## Advanced Plugin Management

### Plugin Installation und Management
```bash
# List available plugins
steampipe plugin list --available

# Install specific plugins
steampipe plugin install aws
steampipe plugin install azure
steampipe plugin install gcp
steampipe plugin install kubernetes
steampipe plugin install github
steampipe plugin install docker
steampipe plugin install terraform
steampipe plugin install vault
steampipe plugin install consul

# Install plugin with specific version
steampipe plugin install aws@0.100.0

# Update plugins
steampipe plugin update aws
steampipe plugin update --all

# Uninstall plugin
steampipe plugin uninstall aws

# Show plugin details
steampipe plugin show aws

# List plugin tables
steampipe query ".tables" | grep aws_

# Describe table schema
steampipe query ".describe aws_s3_bucket"

# Show plugin configuration
steampipe plugin show aws --config
```_

### Multi-Account und Multi-Cloud-Konfiguration
```bash
# Create comprehensive multi-cloud configuration
cat > ~/.steampipe/config/multi_cloud.spc << 'EOF'
# AWS Connections
connection "aws_dev" {
  plugin = "aws"
  profile = "dev"
  regions = ["us-east-1", "us-west-2"]
}

connection "aws_staging" {
  plugin = "aws"
  profile = "staging"
  regions = ["us-east-1", "us-west-2"]
}

connection "aws_prod" {
  plugin = "aws"
  profile = "prod"
  regions = ["us-east-1", "us-west-2", "eu-west-1"]
}

# Azure Connections
connection "azure_dev" {
  plugin = "azure"
  subscription_ids = ["dev-subscription-id"]
}

connection "azure_prod" {
  plugin = "azure"
  subscription_ids = ["prod-subscription-id"]
}

# GCP Connections
connection "gcp_dev" {
  plugin = "gcp"
  project = "my-dev-project"
}

connection "gcp_prod" {
  plugin = "gcp"
  project = "my-prod-project"
}

# Kubernetes Connections
connection "k8s_dev" {
  plugin = "kubernetes"
  config_path = "~/.kube/config"
  config_context = "dev-cluster"
}

connection "k8s_prod" {
  plugin = "kubernetes"
  config_path = "~/.kube/config"
  config_context = "prod-cluster"
}

# Aggregator connections
connection "aws_all" {
  type = "aggregator"
  plugin = "aws"
  connections = ["aws_dev", "aws_staging", "aws_prod"]
}

connection "azure_all" {
  type = "aggregator"
  plugin = "azure"
  connections = ["azure_dev", "azure_prod"]
}

connection "gcp_all" {
  type = "aggregator"
  plugin = "gcp"
  connections = ["gcp_dev", "gcp_prod"]
}
EOF

# Query across all AWS accounts
steampipe query "select account_id, name, region from aws_all.aws_s3_bucket"

# Query specific connection
steampipe query "select name, region from aws_prod.aws_s3_bucket"

# Cross-account comparison
steampipe query "
select 
  connection_name,
  count(*) as bucket_count
from aws_all.aws_s3_bucket
group by connection_name
order by bucket_count desc"
```_

### Benutzerdefinierte Plugin Entwicklung
```bash
# Create custom plugin directory
mkdir -p ~/.steampipe/plugins/custom

# Create plugin configuration
cat > ~/.steampipe/plugins/custom/plugin.go << 'EOF'
package main

import (
    "context"
    "github.com/turbot/steampipe-plugin-sdk/v5/plugin"
    "github.com/turbot/steampipe-plugin-sdk/v5/plugin/transform"
)

func Plugin(ctx context.Context) *plugin.Plugin {
    p := &plugin;.Plugin{
        Name: "custom",
        ConnectionConfigSchema: &plugin;.ConnectionConfigSchema{
            NewInstance: ConfigInstance,
            Schema:      ConfigSchema,
        },
        DefaultTransform: transform.FromGo().NullIfZero(),
        TableMap: map[string]*plugin.Table{
            "custom_resource": tableCustomResource(ctx),
        },
    }
    return p
}

func main() {
    plugin.Serve(&plugin;.ServeOpts{PluginFunc: Plugin})
}
EOF

# Build custom plugin
cd ~/.steampipe/plugins/custom
go mod init steampipe-plugin-custom
go build -o steampipe-plugin-custom

# Install custom plugin
steampipe plugin install local/custom
```_

## Sicherheits- und Compliance-Abfragen

### AWS Sicherheitsabfragen
```sql
-- CIS AWS Foundations Benchmark queries

-- 1.1 Ensure root access key does not exist
select 
  account_id,
  user_name,
  access_key_id,
  status
from aws_iam_access_key
where user_name = 'root';

-- 1.2 Ensure MFA is enabled for root account
select 
  account_id,
  mfa_enabled
from aws_iam_account_summary
where mfa_enabled = false;

-- 1.3 Ensure credentials unused for 90 days are disabled
select 
  user_name,
  access_key_id,
  last_used_date,
  age(now(), last_used_date) as days_unused
from aws_iam_access_key
where last_used_date < now() - interval '90 days'
  and status = 'Active';

-- 2.1 Ensure CloudTrail is enabled in all regions
select 
  region,
  name,
  is_multi_region_trail,
  include_global_service_events
from aws_cloudtrail_trail
where is_multi_region_trail = false;

-- 2.2 Ensure CloudTrail log file validation is enabled
select 
  name,
  region,
  log_file_validation_enabled
from aws_cloudtrail_trail
where log_file_validation_enabled = false;

-- 2.3 Ensure S3 bucket used for CloudTrail is not publicly accessible
select 
  t.name as trail_name,
  t.s3_bucket_name,
  b.bucket_policy_is_public
from aws_cloudtrail_trail t
join aws_s3_bucket b on t.s3_bucket_name = b.name
where b.bucket_policy_is_public = true;

-- 2.4 Ensure CloudTrail trails are integrated with CloudWatch Logs
select 
  name,
  region,
  cloud_watch_logs_log_group_arn
from aws_cloudtrail_trail
where cloud_watch_logs_log_group_arn is null;

-- 3.1 Ensure VPC flow logging is enabled in all VPCs
select 
  vpc_id,
  region,
  state
from aws_vpc
where vpc_id not in (
  select resource_id 
  from aws_vpc_flow_log 
  where resource_type = 'VPC'
);

-- 3.2 Ensure default security groups restrict all traffic
select 
  group_id,
  group_name,
  vpc_id,
  region
from aws_vpc_security_group
where group_name = 'default'
  and (
    jsonb_array_length(ip_permissions) > 0 
    or jsonb_array_length(ip_permissions_egress) > 1
  );

-- 4.1 Ensure no security groups allow ingress from 0.0.0.0/0 to port 22
select 
  group_id,
  group_name,
  vpc_id,
  ip_permissions
from aws_vpc_security_group
where ip_permissions @> '[{"IpRanges": [{"CidrIp": "0.0.0.0/0"}], "FromPort": 22, "ToPort": 22}]';

-- 4.2 Ensure no security groups allow ingress from 0.0.0.0/0 to port 3389
select 
  group_id,
  group_name,
  vpc_id,
  ip_permissions
from aws_vpc_security_group
where ip_permissions @> '[{"IpRanges": [{"CidrIp": "0.0.0.0/0"}], "FromPort": 3389, "ToPort": 3389}]';

-- S3 Security Assessment
select 
  name,
  region,
  bucket_policy_is_public,
  block_public_acls,
  block_public_policy,
  ignore_public_acls,
  restrict_public_buckets,
  server_side_encryption_configuration
from aws_s3_bucket
where bucket_policy_is_public = true
   or block_public_acls = false
   or block_public_policy = false
   or ignore_public_acls = false
   or restrict_public_buckets = false
   or server_side_encryption_configuration is null;

-- RDS Security Assessment
select 
  db_instance_identifier,
  engine,
  publicly_accessible,
  storage_encrypted,
  backup_retention_period,
  deletion_protection
from aws_rds_db_instance
where publicly_accessible = true
   or storage_encrypted = false
   or backup_retention_period < 7
   or deletion_protection = false;
```_

### Abfragen von Azure Security
```sql
-- Azure Security Center recommendations

-- Find VMs without endpoint protection
select 
  name,
  resource_group,
  location,
  vm_size
from azure_compute_virtual_machine
where name not in (
  select vm_name 
  from azure_security_center_assessment 
  where assessment_key = 'endpoint-protection'
);

-- Find storage accounts without secure transfer
select 
  name,
  resource_group,
  location,
  enable_https_traffic_only
from azure_storage_account
where enable_https_traffic_only = false;

-- Find SQL servers without threat detection
select 
  name,
  resource_group,
  location,
  security_alert_policy
from azure_sql_server
where security_alert_policy ->> 'state' != 'Enabled';

-- Find network security groups with permissive rules
select 
  name,
  resource_group,
  location,
  security_rules
from azure_network_security_group
where security_rules @> '[{"access": "Allow", "direction": "Inbound", "sourceAddressPrefix": "*", "destinationPortRange": "*"}]';

-- Find key vaults without soft delete
select 
  name,
  resource_group,
  location,
  enable_soft_delete
from azure_key_vault
where enable_soft_delete = false;

-- Find unencrypted disks
select 
  name,
  resource_group,
  location,
  encryption_settings
from azure_compute_disk
where encryption_settings is null;
```_

### GCP Sicherheitsabfragen
```sql
-- GCP Security Command Center findings

-- Find compute instances with external IPs
select 
  name,
  zone,
  status,
  network_interfaces
from gcp_compute_instance
where network_interfaces @> '[{"accessConfigs": [{"type": "ONE_TO_ONE_NAT"}]}]';

-- Find storage buckets with public access
select 
  name,
  location,
  iam_policy
from gcp_storage_bucket
where iam_policy -> 'bindings' @> '[{"members": ["allUsers"]}]'
   or iam_policy -> 'bindings' @> '[{"members": ["allAuthenticatedUsers"]}]';

-- Find SQL instances without backup
select 
  name,
  region,
  backend_type,
  settings
from gcp_sql_database_instance
where settings -> 'backupConfiguration' ->> 'enabled' != 'true';

-- Find compute instances without OS Login
select 
  name,
  zone,
  metadata
from gcp_compute_instance
where metadata -> 'items' @> '[{"key": "enable-oslogin", "value": "FALSE"}]'
   or not (metadata -> 'items' @> '[{"key": "enable-oslogin"}]');

-- Find firewall rules allowing ingress from anywhere
select 
  name,
  direction,
  allowed,
  source_ranges
from gcp_compute_firewall
where direction = 'INGRESS'
  and source_ranges @> '["0.0.0.0/0"]';

-- Find KMS keys without rotation
select 
  name,
  location,
  purpose,
  next_rotation_time
from gcp_kms_crypto_key
where purpose = 'ENCRYPT_DECRYPT'
  and next_rotation_time is null;
```_

### Kubernetes Sicherheitsabfragen
```sql
-- Kubernetes security assessment

-- Find pods running as root
select 
  name,
  namespace,
  security_context
from kubernetes_pod
where security_context ->> 'runAsUser' = '0'
   or security_context ->> 'runAsUser' is null;

-- Find pods with privileged containers
select 
  name,
  namespace,
  containers
from kubernetes_pod
where containers @> '[{"securityContext": {"privileged": true}}]';

-- Find services with type LoadBalancer
select 
  name,
  namespace,
  type,
  spec
from kubernetes_service
where type = 'LoadBalancer';

-- Find pods without resource limits
select 
  name,
  namespace,
  containers
from kubernetes_pod
where not (containers @> '[{"resources": {"limits": {}}}]');

-- Find network policies
select 
  name,
  namespace,
  spec
from kubernetes_network_policy;

-- Find RBAC cluster admin bindings
select 
  name,
  role_ref,
  subjects
from kubernetes_cluster_role_binding
where role_ref ->> 'name' = 'cluster-admin';

-- Find pods with host network
select 
  name,
  namespace,
  host_network
from kubernetes_pod
where host_network = true;

-- Find persistent volumes with access mode ReadWriteMany
select 
  name,
  access_modes,
  capacity
from kubernetes_persistent_volume
where access_modes @> '["ReadWriteMany"]';
```_

## Compliance und Reporting

### Überprüfung der Einhaltungsrahmen
```bash
# Install compliance mods
steampipe mod install github.com/turbot/steampipe-mod-aws-compliance
steampipe mod install github.com/turbot/steampipe-mod-azure-compliance
steampipe mod install github.com/turbot/steampipe-mod-gcp-compliance

# Run CIS AWS Foundations Benchmark
steampipe check benchmark.cis_v140

# Run specific control
steampipe check control.cis_v140_1_1

# Run with specific output format
steampipe check benchmark.cis_v140 --output json
steampipe check benchmark.cis_v140 --output csv
steampipe check benchmark.cis_v140 --output html

# Run NIST 800-53 controls
steampipe check benchmark.nist_800_53_rev_5

# Run SOC 2 controls
steampipe check benchmark.soc_2

# Run HIPAA controls
steampipe check benchmark.hipaa_final_omnibus_security_rule_2013

# Generate compliance report
steampipe check benchmark.cis_v140 --export compliance_report.json

# Run compliance check with specific connections
steampipe check benchmark.cis_v140 --search-path aws_prod,aws_staging
```_

### Benutzerdefinierte Compliance Abfragen
```sql
-- Create custom compliance dashboard
create or replace view security_dashboard as
select 
  'AWS' as cloud_provider,
  'S3 Buckets' as resource_type,
  count(*) as total_resources,
  count(*) filter (where server_side_encryption_configuration is not null) as compliant_resources,
  round(
    (count(*) filter (where server_side_encryption_configuration is not null) * 100.0) / count(*), 
    2
  ) as compliance_percentage
from aws_s3_bucket
union all
select 
  'AWS' as cloud_provider,
  'RDS Instances' as resource_type,
  count(*) as total_resources,
  count(*) filter (where storage_encrypted = true) as compliant_resources,
  round(
    (count(*) filter (where storage_encrypted = true) * 100.0) / count(*), 
    2
  ) as compliance_percentage
from aws_rds_db_instance
union all
select 
  'Azure' as cloud_provider,
  'Storage Accounts' as resource_type,
  count(*) as total_resources,
  count(*) filter (where enable_https_traffic_only = true) as compliant_resources,
  round(
    (count(*) filter (where enable_https_traffic_only = true) * 100.0) / count(*), 
    2
  ) as compliance_percentage
from azure_storage_account;

-- Query the dashboard
select * from security_dashboard order by compliance_percentage;

-- Create cost optimization view
create or replace view cost_optimization as
select 
  'AWS EC2' as service,
  'Stopped Instances' as finding,
  count(*) as count,
  'Stop unused instances' as recommendation
from aws_ec2_instance
where instance_state_name = 'stopped'
union all
select 
  'AWS RDS' as service,
  'Unencrypted Instances' as finding,
  count(*) as count,
  'Enable encryption' as recommendation
from aws_rds_db_instance
where storage_encrypted = false
union all
select 
  'AWS S3' as service,
  'Unencrypted Buckets' as finding,
  count(*) as count,
  'Enable server-side encryption' as recommendation
from aws_s3_bucket
where server_side_encryption_configuration is null;

-- Query cost optimization opportunities
select * from cost_optimization where count > 0;
```_

### Automatisierte Reporting Scripts
```python
#!/usr/bin/env python3
# Automated Steampipe reporting

import subprocess
import json
import csv
import datetime
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from email import encoders
import argparse

class SteampipeReporter:
    """Automated reporting for Steampipe"""

    def __init__(self):
        self.timestamp = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')

    def run_query(self, query, output_format='json'):
        """Run a Steampipe query and return results"""

        try:
            cmd = ['steampipe', 'query', query, '--output', output_format]
            result = subprocess.run(cmd, capture_output=True, text=True, check=True)

            if output_format == 'json':
                return json.loads(result.stdout)
            else:
                return result.stdout

        except subprocess.CalledProcessError as e:
            print(f"Error running query: {e}")
            return None

    def run_compliance_check(self, benchmark, output_format='json'):
        """Run a compliance benchmark and return results"""

        try:
            cmd = ['steampipe', 'check', benchmark, '--output', output_format]
            result = subprocess.run(cmd, capture_output=True, text=True, check=True)

            if output_format == 'json':
                return json.loads(result.stdout)
            else:
                return result.stdout

        except subprocess.CalledProcessError as e:
            print(f"Error running compliance check: {e}")
            return None

    def generate_security_report(self):
        """Generate comprehensive security report"""

        print("Generating security report...")

        # Security queries
        queries = {
            'unencrypted_s3_buckets': """
                select name, region from aws_s3_bucket 
                where server_side_encryption_configuration is null
            """,
            'public_s3_buckets': """
                select name, region from aws_s3_bucket 
                where bucket_policy_is_public = true
            """,
            'open_security_groups': """
                select group_id, group_name, vpc_id from aws_vpc_security_group
                where ip_permissions @> '[{"IpRanges": [{"CidrIp": "0.0.0.0/0"}]}]'
            """,
            'unencrypted_rds': """
                select db_instance_identifier, engine from aws_rds_db_instance
                where storage_encrypted = false
            """,
            'root_access_keys': """
                select user_name, access_key_id from aws_iam_access_key
                where user_name = 'root'
            """
        }

        report = {
            'timestamp': self.timestamp,
            'findings': {}
        }

        for query_name, query in queries.items():
            print(f"Running query: {query_name}")
            results = self.run_query(query)
            if results:
                report['findings'][query_name] = results

        # Save report
        report_file = f"security_report_{self.timestamp}.json"
        with open(report_file, 'w') as f:
            json.dump(report, f, indent=2)

        print(f"Security report saved: {report_file}")
        return report_file

    def generate_compliance_report(self, benchmarks):
        """Generate compliance report for specified benchmarks"""

        print("Generating compliance report...")

        report = {
            'timestamp': self.timestamp,
            'benchmarks': {}
        }

        for benchmark in benchmarks:
            print(f"Running benchmark: {benchmark}")
            results = self.run_compliance_check(benchmark)
            if results:
                report['benchmarks'][benchmark] = results

        # Save report
        report_file = f"compliance_report_{self.timestamp}.json"
        with open(report_file, 'w') as f:
            json.dump(report, f, indent=2)

        print(f"Compliance report saved: {report_file}")
        return report_file

    def generate_inventory_report(self):
        """Generate infrastructure inventory report"""

        print("Generating inventory report...")

        # Inventory queries
        queries = {
            'aws_ec2_instances': """
                select instance_id, instance_type, state, region, tags 
                from aws_ec2_instance
            """,
            'aws_s3_buckets': """
                select name, region, creation_date 
                from aws_s3_bucket
            """,
            'aws_rds_instances': """
                select db_instance_identifier, engine, instance_class, region 
                from aws_rds_db_instance
            """,
            'azure_vms': """
                select name, vm_size, location, resource_group 
                from azure_compute_virtual_machine
            """,
            'gcp_instances': """
                select name, machine_type, zone, status 
                from gcp_compute_instance
            """,
            'kubernetes_pods': """
                select name, namespace, phase 
                from kubernetes_pod
            """
        }

        report = {
            'timestamp': self.timestamp,
            'inventory': {}
        }

        for query_name, query in queries.items():
            print(f"Running inventory query: {query_name}")
            results = self.run_query(query)
            if results:
                report['inventory'][query_name] = results

        # Save report
        report_file = f"inventory_report_{self.timestamp}.json"
        with open(report_file, 'w') as f:
            json.dump(report, f, indent=2)

        print(f"Inventory report saved: {report_file}")
        return report_file

    def generate_html_report(self, json_report_file):
        """Generate HTML report from JSON data"""

        with open(json_report_file, 'r') as f:
            data = json.load(f)

        html_content = f"""
<!DOCTYPE html>
<html>
<head>
    <title>Steampipe Report - {data['timestamp']}</title>
    <style>
        body {{ font-family: Arial, sans-serif; margin: 20px; }}
        .header {{ background: #f4f4f4; padding: 20px; border-radius: 5px; }}
        .section {{ margin: 20px 0; }}
        .finding {{ border: 1px solid #ddd; margin: 10px 0; padding: 15px; border-radius: 5px; }}
        .critical {{ border-left: 5px solid #d32f2f; }}
        .warning {{ border-left: 5px solid #f57c00; }}
        .info {{ border-left: 5px solid #1976d2; }}
        table {{ border-collapse: collapse; width: 100%; }}
        th, td {{ border: 1px solid #ddd; padding: 8px; text-align: left; }}
        th {{ background-color: #f2f2f2; }}
    </style>
</head>
<body>
    <div class="header">
        <h1>Steampipe Security Report</h1>
        <p>Generated on: {data['timestamp']}</p>
    </div>
"""

        # Add findings sections
        if 'findings' in data:
            html_content += "<div class='section'><h2>Security Findings</h2>"

            for finding_name, finding_data in data['findings'].items():
                count = len(finding_data) if finding_data else 0
                severity_class = 'critical' if count > 0 else 'info'

                html_content += f"""
                <div class="finding {severity_class}">
                    <h3>{finding_name.replace('_', ' ').title()}</h3>
                    <p>Count: {count}</p>
                </div>
                """

            html_content += "</div>"

        # Add compliance sections
        if 'benchmarks' in data:
            html_content += "<div class='section'><h2>Compliance Results</h2>"

            for benchmark_name, benchmark_data in data['benchmarks'].items():
                html_content += f"""
                <div class="finding info">
                    <h3>{benchmark_name}</h3>
                    <p>Benchmark executed successfully</p>
                </div>
                """

            html_content += "</div>"

        html_content += """
</body>
</html>
"""

        html_file = json_report_file.replace('.json', '.html')
        with open(html_file, 'w') as f:
            f.write(html_content)

        print(f"HTML report saved: {html_file}")
        return html_file

    def send_email_report(self, report_files, email_config):
        """Send report via email"""

        print("Sending email report...")

        msg = MIMEMultipart()
        msg['From'] = email_config['from']
        msg['To'] = ', '.join(email_config['to'])
        msg['Subject'] = f"Steampipe Security Report - {self.timestamp}"

        body = f"""
Steampipe Security Report

Generated on: {self.timestamp}

Please find the attached reports for review.

Best regards,
Steampipe Automation
"""

        msg.attach(MIMEText(body, 'plain'))

        # Attach report files
        for report_file in report_files:
            with open(report_file, 'rb') as attachment:
                part = MIMEBase('application', 'octet-stream')
                part.set_payload(attachment.read())

            encoders.encode_base64(part)
            part.add_header(
                'Content-Disposition',
                f'attachment; filename= {report_file}'
            )
            msg.attach(part)

        # Send email
        try:
            server = smtplib.SMTP(email_config['smtp_server'], email_config['smtp_port'])
            server.starttls()
            server.login(email_config['username'], email_config['password'])
            text = msg.as_string()
            server.sendmail(email_config['from'], email_config['to'], text)
            server.quit()
            print("Email sent successfully")
        except Exception as e:
            print(f"Error sending email: {e}")

def main():
    parser = argparse.ArgumentParser(description='Steampipe Automated Reporter')
    parser.add_argument('--report-type', choices=['security', 'compliance', 'inventory', 'all'], 
                       default='all', help='Type of report to generate')
    parser.add_argument('--benchmarks', nargs='+', 
                       default=['benchmark.cis_v140'], 
                       help='Compliance benchmarks to run')
    parser.add_argument('--email-config', help='Email configuration file (JSON)')
    parser.add_argument('--html', action='store_true', help='Generate HTML reports')

    args = parser.parse_args()

    reporter = SteampipeReporter()
    report_files = []

    if args.report_type in ['security', 'all']:
        security_report = reporter.generate_security_report()
        report_files.append(security_report)

        if args.html:
            html_report = reporter.generate_html_report(security_report)
            report_files.append(html_report)

    if args.report_type in ['compliance', 'all']:
        compliance_report = reporter.generate_compliance_report(args.benchmarks)
        report_files.append(compliance_report)

    if args.report_type in ['inventory', 'all']:
        inventory_report = reporter.generate_inventory_report()
        report_files.append(inventory_report)

    # Send email if configuration provided
    if args.email_config:
        with open(args.email_config, 'r') as f:
            email_config = json.load(f)
        reporter.send_email_report(report_files, email_config)

    print(f"Report generation completed. Files: {report_files}")

if __name__ == "__main__":
    main()
```_

## Erweiterte Konfiguration und Automatisierung

### Service Konfiguration und Management
```bash
#!/bin/bash
# Advanced Steampipe service management

setup_steampipe_service() {
    echo "Setting up Steampipe service..."

    # Create service configuration
    cat > ~/.steampipe/config/default.spc << 'EOF'
# Steampipe service configuration
options "general" {
  update_check = false
}

options "connection" {
  cache     = true
  cache_ttl = 300
}

options "database" {
  port        = 9193
  listen      = ["local", "network"]
  search_path = ["aws", "azure", "gcp", "kubernetes"]
}

options "terminal" {
  multi      = false
  output     = "table"
  header     = true
  separator  = "|"
  timing     = false
}
EOF

    # Create systemd service file
    sudo tee /etc/systemd/system/steampipe.service << 'EOF'
[Unit]
Description=Steampipe Service
After=network.target

[Service]
Type=simple
User=steampipe
Group=steampipe
WorkingDirectory=/home/steampipe
ExecStart=/usr/local/bin/steampipe service start --foreground
ExecStop=/usr/local/bin/steampipe service stop
Restart=always
RestartSec=10

[Install]
WantedBy=multi-user.target
EOF

    # Create steampipe user
    sudo useradd -r -s /bin/false steampipe
    sudo mkdir -p /home/steampipe/.steampipe
    sudo chown -R steampipe:steampipe /home/steampipe

    # Enable and start service
    sudo systemctl daemon-reload
    sudo systemctl enable steampipe
    sudo systemctl start steampipe

    echo "Steampipe service setup complete"
}

# Advanced query optimization
optimize_steampipe_performance() {
    echo "Optimizing Steampipe performance..."

    # Create performance configuration
    cat > ~/.steampipe/config/performance.spc << 'EOF'
options "database" {
  port        = 9193
  listen      = ["local"]
  search_path = ["aws", "azure", "gcp"]

  # Performance optimizations
  cache                = true
  cache_max_ttl        = 3600
  cache_max_size_mb    = 1024

  # Connection pooling
  max_parallel         = 10

  # Query timeout
  query_timeout        = 300
}

options "plugin" {
  # Plugin-specific optimizations
  memory_max_mb = 512
}
EOF

    # Create query optimization script
    cat > ~/.steampipe/optimize_queries.sh << 'EOF'
#!/bin/bash
# Query optimization for Steampipe

# Enable query caching
export STEAMPIPE_CACHE=true
export STEAMPIPE_CACHE_TTL=300

# Optimize connection settings
export STEAMPIPE_MAX_PARALLEL=10
export STEAMPIPE_QUERY_TIMEOUT=300

# Memory optimization
export STEAMPIPE_MEMORY_MAX_MB=1024

echo "Steampipe optimization settings applied"
EOF

    chmod +x ~/.steampipe/optimize_queries.sh

    echo "Performance optimizations applied"
}

# Automated backup and restore
setup_steampipe_backup() {
    echo "Setting up Steampipe backup..."

    # Create backup script
    cat > ~/.steampipe/backup.sh << 'EOF'
#!/bin/bash
# Steampipe backup script

BACKUP_DIR="$HOME/.steampipe/backups"
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
BACKUP_FILE="steampipe_backup_$TIMESTAMP.tar.gz"

mkdir -p "$BACKUP_DIR"

echo "Creating backup: $BACKUP_FILE"

# Backup configuration and data
tar -czf "$BACKUP_DIR/$BACKUP_FILE" \
    ~/.steampipe/config/ \
    ~/.steampipe/mods/ \
    ~/.steampipe/plugins/ \
    --exclude="*.log" \
    --exclude="cache/*"

echo "Backup completed: $BACKUP_DIR/$BACKUP_FILE"

# Keep only last 7 backups
find "$BACKUP_DIR" -name "steampipe_backup_*.tar.gz" -mtime +7 -delete

echo "Old backups cleaned up"
EOF

    chmod +x ~/.steampipe/backup.sh

    # Create restore script
    cat > ~/.steampipe/restore.sh << 'EOF'
#!/bin/bash
# Steampipe restore script

if [ $# -eq 0 ]; then
    echo "Usage: $0 <backup_file>"
    echo "Available backups:"
| ls -la ~/.steampipe/backups/steampipe_backup_*.tar.gz 2>/dev/null |  | echo "No backups found" |
    exit 1
fi

BACKUP_FILE="$1"

if [ ! -f "$BACKUP_FILE" ]; then
    echo "Backup file not found: $BACKUP_FILE"
    exit 1
fi

echo "Restoring from backup: $BACKUP_FILE"

# Stop Steampipe service
steampipe service stop

# Backup current configuration
mv ~/.steampipe/config ~/.steampipe/config.backup.$(date +%s) 2>/dev/null

# Restore from backup
tar -xzf "$BACKUP_FILE" -C /

# Restart service
steampipe service start

echo "Restore completed"
EOF

    chmod +x ~/.steampipe/restore.sh

    # Add to crontab for daily backups
    (crontab -l 2>/dev/null; echo "0 2 * * * ~/.steampipe/backup.sh") | crontab -

    echo "Backup system setup complete"
}

# Monitoring and alerting
setup_steampipe_monitoring() {
    echo "Setting up Steampipe monitoring..."

    # Create monitoring script
    cat > ~/.steampipe/monitor.sh << 'EOF'
#!/bin/bash
# Steampipe monitoring script

LOG_FILE="$HOME/.steampipe/monitor.log"
ALERT_EMAIL="admin@example.com"

log_message() {
    echo "$(date '+%Y-%m-%d %H:%M:%S') - $1" >> "$LOG_FILE"
}

check_service() {
    if steampipe service status > /dev/null 2>&1; then
        log_message "Service is running"
        return 0
    else
        log_message "Service is not running"
        return 1
    fi
}

check_plugins() {
    local failed_plugins=()

    for plugin in aws azure gcp kubernetes; do
        if ! steampipe query "select 1" --search-path "$plugin" > /dev/null 2>&1; then
            failed_plugins+=("$plugin")
        fi
    done

    if [ ${#failed_plugins[@]} -gt 0 ]; then
        log_message "Failed plugins: ${failed_plugins[*]}"
        return 1
    else
        log_message "All plugins are working"
        return 0
    fi
}

send_alert() {
    local message="$1"
    echo "$message" | mail -s "Steampipe Alert" "$ALERT_EMAIL"
    log_message "Alert sent: $message"
}

# Main monitoring logic
if ! check_service; then
    send_alert "Steampipe service is down"

    # Try to restart service
    steampipe service start
    sleep 10

    if check_service; then
        send_alert "Steampipe service restarted successfully"
    else
        send_alert "Failed to restart Steampipe service"
    fi
fi

if ! check_plugins; then
    send_alert "Some Steampipe plugins are not working"
fi

# Check disk space
| DISK_USAGE=$(df ~/.steampipe | tail -1 | awk '{print $5}' | sed 's/%//') |
if [ "$DISK_USAGE" -gt 80 ]; then
    send_alert "Steampipe disk usage is high: ${DISK_USAGE}%"
fi

log_message "Monitoring check completed"
EOF

    chmod +x ~/.steampipe/monitor.sh

    # Add to crontab for monitoring every 5 minutes
    (crontab -l 2>/dev/null; echo "*/5 * * * * ~/.steampipe/monitor.sh") | crontab -

    echo "Monitoring setup complete"
}

# Run setup functions
setup_steampipe_service
optimize_steampipe_performance
setup_steampipe_backup
setup_steampipe_monitoring
```_

## Leistungsoptimierung und Fehlerbehebung

### Leistung Tuning
```bash
#!/bin/bash
# Steampipe performance optimization

optimize_steampipe_queries() {
    echo "Optimizing Steampipe query performance..."

    # 1. Connection optimization
    cat > ~/.steampipe/config/optimized.spc << 'EOF'
options "database" {
  # Connection settings
  port              = 9193
  listen            = ["local"]
  search_path       = ["aws", "azure", "gcp"]

  # Performance settings
  cache             = true
  cache_max_ttl     = 3600
  cache_max_size_mb = 2048

  # Parallel execution
  max_parallel      = 20

  # Query optimization
  query_timeout     = 600
}

options "plugin" {
  # Plugin memory limits
  memory_max_mb = 1024
}

# Connection-specific optimizations
connection "aws" {
  plugin = "aws"

  # Limit regions for faster queries
  regions = ["us-east-1", "us-west-2", "eu-west-1"]

  # Ignore error regions
  ignore_error_codes = ["UnauthorizedOperation", "AccessDenied"]

  # Connection pooling
  max_error_retry_attempts = 3
  max_retry_delay          = 30
}
EOF

    # 2. Query optimization techniques
    cat > ~/.steampipe/query_optimization_guide.md << 'EOF'
# Steampipe Query Optimization Guide

## 1. Use Specific Connections
Instead of:
```_sql
wählen Sie * aus aws_s3_bucket;

Use:

Wählen Sie * aus aws_prod.aws_s3_bucket;

2. Limit Columns

Instead of:

wählen Sie * aus aws_ec2_instance;

Use:

wählen Sie Instanz_id, Instanz_Typ, Zustand aus aws_ec2_instance;

3. Use WHERE Clauses

Instead of:

wählen Sie * aus aws_ec2_instance;

Use:

wählen Sie * aus aws_ec2_instance wo region = 'us-east-1';

4. Use LIMIT for Large Results

Wählen Sie * aus aws_cloudtrail_event limit 100;

5. Use Indexes When Available

Wählen Sie * aus aws_s3_bucket, wobei Name = 'spezifischer Bucket-Name' ist;

6. Avoid Cross-Connection JOINs

Instead of:

Wählen Sie * aus aws_prod.aws_s3_bucket b)
aws_dev.aws_s3_bucket d auf b.name = d.name;

Use separate queries or aggregator connections. EOF

# 3. Create performance monitoring
cat > ~/.steampipe/performance_monitor.py << 'EOF'

!/usr/bin/env python3

import subprocess import time import json import psutil import argparse

class SteampipePerformanceMonitor: def init(self): self.metrics = []

def get_steampipe_process(self):
    """Find Steampipe process"""
    for proc in psutil.process_iter(['pid', 'name', 'cmdline']):
        if 'steampipe' in proc.info['name']:
            return proc
    return None

def monitor_query(self, query, duration=60):
    """Monitor query performance"""
    print(f"Monitoring query performance for {duration} seconds...")

    start_time = time.time()

    # Start query in background
    proc = subprocess.Popen([
        'steampipe', 'query', query
    ], stdout=subprocess.PIPE, stderr=subprocess.PIPE)

    # Monitor system resources
    steampipe_proc = self.get_steampipe_process()

    while time.time() - start_time < duration:
        if steampipe_proc:
            try:
                cpu_percent = steampipe_proc.cpu_percent()
                memory_mb = steampipe_proc.memory_info().rss / 1024 / 1024

                metric = {
                    'timestamp': time.time(),
                    'cpu_percent': cpu_percent,
                    'memory_mb': memory_mb
                }

                self.metrics.append(metric)
                print(f"CPU: {cpu_percent:.1f}%, Memory: {memory_mb:.1f}MB")

            except psutil.NoSuchProcess:
                break

        time.sleep(1)

    # Wait for query to complete
    proc.wait()

    return self.metrics

def benchmark_queries(self, queries):
    """Benchmark multiple queries"""
    results = {}

    for name, query in queries.items():
        print(f"\nBenchmarking: {name}")

        start_time = time.time()

        try:
            result = subprocess.run([
                'steampipe', 'query', query, '--output', 'json'
            ], capture_output=True, text=True, timeout=300)

            end_time = time.time()
            duration = end_time - start_time

            if result.returncode == 0:
                try:
                    data = json.loads(result.stdout)
                    row_count = len(data)
                except:
                    row_count = 0

                results[name] = {
                    'duration': duration,
                    'row_count': row_count,
                    'rows_per_second': row_count / duration if duration > 0 else 0,
                    'status': 'success'
                }
            else:
                results[name] = {
                    'duration': duration,
                    'status': 'error',
                    'error': result.stderr
                }

            print(f"Duration: {duration:.2f}s, Rows: {results[name].get('row_count', 0)}")

        except subprocess.TimeoutExpired:
            results[name] = {
                'duration': 300,
                'status': 'timeout'
            }
            print("Query timed out")

    return results

def main(): parser = argparse.ArgumentParser(description='Steampipe Performance Monitor') parser.add_argument('--query', help='Single query to monitor') parser.add_argument('--benchmark', action='store_true', help='Run benchmark suite') parser.add_argument('--duration', type=int, default=60, help='Monitoring duration')

args = parser.parse_args()

monitor = SteampipePerformanceMonitor()

if args.query:
    metrics = monitor.monitor_query(args.query, args.duration)

    # Save metrics
    with open(f'performance_metrics_{int(time.time())}.json', 'w') as f:
        json.dump(metrics, f, indent=2)

elif args.benchmark:
    benchmark_queries = {
        'simple_count': 'select count(*) from aws_s3_bucket',
        'complex_join': '''
            select b.name, r.region_name 
            from aws_s3_bucket b 
            join aws_region r on b.region = r.name
        ''',
        'aggregation': '''
            select region, count(*) as bucket_count 
            from aws_s3_bucket 
            group by region 
            order by bucket_count desc
        ''',
        'json_query': '''
            select name, tags 
            from aws_ec2_instance 
            where tags is not null
        '''
    }

    results = monitor.benchmark_queries(benchmark_queries)

    # Save benchmark results
    with open(f'benchmark_results_{int(time.time())}.json', 'w') as f:
        json.dump(results, f, indent=2)

    # Print summary
    print("\nBenchmark Summary:")
    print("-" * 50)
    for name, result in results.items():
        if result['status'] == 'success':
            print(f"{name}: {result['duration']:.2f}s ({result['rows_per_second']:.1f} rows/s)")
        else:
            print(f"{name}: {result['status']}")

if name == "main": main() EOF

chmod +x ~/.steampipe/performance_monitor.py

echo "Performance optimization setup complete"

}

Memory optimization

optimize_steampipe_memory() { echo "Optimizing Steampipe memory usage..."

# Create memory optimization script
cat > ~/.steampipe/memory_optimization.sh << 'EOF'

!/bin/bash

Memory optimization for Steampipe

Set memory limits

export STEAMPIPE_MEMORY_MAX_MB=2048 export STEAMPIPE_PLUGIN_MEMORY_MAX_MB=512

Optimize garbage collection

export GOGC=100

Limit concurrent connections

export STEAMPIPE_MAX_PARALLEL=10

Enable memory profiling

export STEAMPIPE_MEMORY_PROFILE=true

echo "Memory optimization settings applied"

Monitor memory usage

| watch -n 5 'ps aux | grep steampipe | grep -v grep' | EOF

chmod +x ~/.steampipe/memory_optimization.sh

echo "Memory optimization complete"

}

Run optimizations

optimize_steampipe_queries optimize_steampipe_memory ```_

Probleme bei der Fehlerbehebung

```bash

!/bin/bash

Steampipe troubleshooting guide

troubleshoot_steampipe() { echo "Steampipe Troubleshooting Guide" echo "==============================="

# Check if Steampipe is installed
if ! command -v steampipe &> /dev/null; then
    echo "❌ Steampipe not found"
    echo "Solution: Install Steampipe"
    echo "  sudo /bin/sh -c \"\$(curl -fsSL https://raw.githubusercontent.com/turbot/steampipe/main/install.sh)\""
    return 1
fi

echo "✅ Steampipe found: $(steampipe --version)"

# Check service status
if steampipe service status > /dev/null 2>&1; then
    echo "✅ Steampipe service is running"
else
    echo "⚠️  Steampipe service is not running"
    echo "Solution: Start the service"
    echo "  steampipe service start"
fi

# Check plugins
plugin_count=$(steampipe plugin list | grep -c "installed")
if [ "$plugin_count" -eq 0 ]; then
    echo "⚠️  No plugins installed"
    echo "Solution: Install required plugins"
    echo "  steampipe plugin install aws"
    echo "  steampipe plugin install azure"
    echo "  steampipe plugin install gcp"
else
    echo "✅ Found $plugin_count installed plugins"
fi

# Check configuration
if [ ! -d ~/.steampipe/config ]; then
    echo "⚠️  Configuration directory not found"
    echo "Solution: Create configuration directory"
    echo "  mkdir -p ~/.steampipe/config"
else
    echo "✅ Configuration directory exists"
fi

# Check database connectivity
if steampipe query "select 1 as test" > /dev/null 2>&1; then
    echo "✅ Database connectivity working"
else
    echo "❌ Database connectivity failed"
    echo "Solution: Check service and restart if needed"
    echo "  steampipe service restart"
fi

# Check system resources
available_memory=$(free -m | awk 'NR==2{printf "%.1f", $7/1024}')
if (( $(echo "$available_memory < 2.0" | bc -l) )); then
    echo "⚠️  Low available memory: ${available_memory}GB"
    echo "Solution: Free up memory or increase system memory"
else
    echo "✅ Available memory: ${available_memory}GB"
fi

# Check disk space

| disk_usage=$(df ~/.steampipe | tail -1 | awk '{print $5}' | sed 's/%//') | if [ "$disk_usage" -gt 80 ]; then echo "⚠️ High disk usage: ${disk_usage}%" echo "Solution: Clean up cache and logs" echo " steampipe service stop" echo " rm -rf ~/.steampipe/logs/" echo " rm -rf ~/.steampipe/db/12.1.0/data/pg_log/" echo " steampipe service start" else echo "✅ Disk usage: ${disk_usage}%" fi

echo "Troubleshooting completed"

}

Common error solutions

fix_common_steampipe_errors() { echo "Common Steampipe Errors and Solutions" echo "===================================="

cat << 'EOF'
  1. "steampipe: command not found" Solution:

    • Install Steampipe using the official installer
    • Add installation directory to PATH
    • Verify installation with: steampipe --version
  2. "connection refused" or "database not available" Solution:

    • Start Steampipe service: steampipe service start
    • Check service status: steampipe service status
    • Restart if needed: steampipe service restart
  3. "plugin not found" or "table does not exist" Solution:

    • Install required plugin: steampipe plugin install
    • Update plugins: steampipe plugin update --all
    • Check installed plugins: steampipe plugin list
  4. "authentication failed" for cloud providers Solution:

    • Configure cloud credentials (AWS CLI, Azure CLI, gcloud)
    • Check connection configuration in ~/.steampipe/config/
    • Verify permissions for the service account/user
  5. "query timeout" or slow performance Solution:

    • Increase query timeout in configuration
    • Use more specific WHERE clauses
    • Limit regions in plugin configuration
    • Enable caching for repeated queries
  6. "out of memory" errors Solution:

    • Increase system memory
    • Reduce max_parallel connections
    • Use LIMIT clauses for large result sets
    • Optimize queries to reduce memory usage
  7. "permission denied" errors Solution:

    • Check file permissions for ~/.steampipe/
    • Ensure user has access to configuration files
    • Run with appropriate user privileges
  8. "plugin update failed" Solution:

    • Check internet connectivity
    • Clear plugin cache: rm -rf ~/.steampipe/plugins/
    • Reinstall plugins manually
  9. "database corruption" or startup failures Solution:

    • Stop service: steampipe service stop
    • Remove database: rm -rf ~/.steampipe/db/
    • Restart service: steampipe service start
  10. "high CPU usage" or performance issues Solution:

    • Optimize queries with proper WHERE clauses
    • Reduce concurrent connections
    • Enable query caching
    • Monitor with performance tools EOF }

Performance diagnostics

diagnose_steampipe_performance() { echo "Diagnosing Steampipe Performance Issues" echo "======================================"

# Check system load

| load_avg=$(uptime | awk -F'load average:' '{print $2}' | awk '{print $1}' | sed 's/,//') | echo "System load average: $load_avg"

# Check Steampipe process
if pgrep steampipe > /dev/null; then
    steampipe_pid=$(pgrep steampipe)
    cpu_usage=$(ps -p $steampipe_pid -o %cpu --no-headers)
    memory_usage=$(ps -p $steampipe_pid -o %mem --no-headers)
    echo "Steampipe CPU usage: ${cpu_usage}%"
    echo "Steampipe memory usage: ${memory_usage}%"
else
    echo "Steampipe process not found"
fi

# Check database size
if [ -d ~/.steampipe/db ]; then
    db_size=$(du -sh ~/.steampipe/db | cut -f1)
    echo "Database size: $db_size"
fi

# Check cache size
if [ -d ~/.steampipe/cache ]; then
    cache_size=$(du -sh ~/.steampipe/cache | cut -f1)
    echo "Cache size: $cache_size"
fi

# Test query performance
echo "Testing query performance..."
start_time=$(date +%s.%N)
steampipe query "select count(*) from aws_region" > /dev/null 2>&1
end_time=$(date +%s.%N)
duration=$(echo "$end_time - $start_time" | bc)
echo "Simple query time: ${duration}s"

# Recommendations
echo ""
echo "Performance Recommendations:"
echo "- Optimal memory: >= 4GB for large environments"
echo "- Use SSD storage for better I/O performance"
echo "- Enable caching for repeated queries"
echo "- Limit regions in plugin configurations"
echo "- Use specific connections instead of aggregators when possible"

}

Main troubleshooting function

main() { troubleshoot_steampipe echo "" fix_common_steampipe_errors echo "" diagnose_steampipe_performance }

Run troubleshooting

main ```_

Ressourcen und Dokumentation

Offizielle Mittel

Gemeinschaftsmittel

Integrationsbeispiele